From a1fa9642fe4bb28328fcd465a6a8f4700ee67ee8 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 22:27:02 -0400 Subject: [PATCH 01/38] Add initial release configuration for gh-aspnet-webapp --- .release.yml | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .release.yml diff --git a/.release.yml b/.release.yml new file mode 100644 index 0000000..ec6045a --- /dev/null +++ b/.release.yml @@ -0,0 +1,2 @@ +name: "gh-aspnet-webapp" +version: "0.0.1" From 1c3f037c6f6a1fbbc0613cacc5fc9b7de01f7564 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 22:37:30 -0400 Subject: [PATCH 02/38] Add container context and name to CI/CD workflow for container image build --- .github/workflows/cicd.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 0a7bf83..f236485 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -81,4 +81,6 @@ jobs: with: # This is used for tagging the container image version: v1.0.0 - container-file: ./src/webapp01/Dockerfile \ No newline at end of file + container-file: ./src/webapp01/Dockerfile + container-context: ./src/webapp01 + container-name: webapp01 \ No newline at end of file From 653f246a15397ba319846210b5dc430989324a76 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 22:52:18 -0400 Subject: [PATCH 03/38] Update container name in CI/CD workflow to include repository context --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index f236485..aab3ad0 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -83,4 +83,4 @@ jobs: version: v1.0.0 container-file: ./src/webapp01/Dockerfile container-context: ./src/webapp01 - container-name: webapp01 \ No newline at end of file + container-name: "${{ github.repository }}/webapp01" \ No newline at end of file From c8b536cb0d5f302a5c6f9dfc81db974149b44826 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 23:04:01 -0400 Subject: [PATCH 04/38] Re-enable ZAP Scan step in DAST workflow --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 76ee348..1f05c08 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -51,10 +51,10 @@ jobs: with: allow_issue_writing: false target: "${{ env.ZAP_TARGET }}" - # - name: ZAP Scan - # uses: zaproxy/action-baseline@v0.4.0 - # with: - # target: "${{ env.ZAP_TARGET }}" + - name: ZAP Scan + uses: zaproxy/action-baseline@v0.4.0 + with: + target: "${{ env.ZAP_TARGET }}" - uses: SvanBoxel/zaproxy-to-ghas@main - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v3 From 6ee8ed04ea0894c92477e8d3f247e547569f2ae7 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 23:13:27 -0400 Subject: [PATCH 05/38] Update ZAP Scan action to version 0.14.0 for improved functionality --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 1f05c08..10be73e 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -52,7 +52,7 @@ jobs: allow_issue_writing: false target: "${{ env.ZAP_TARGET }}" - name: ZAP Scan - uses: zaproxy/action-baseline@v0.4.0 + uses: zaproxy/action-baseline@v0.14.0 with: target: "${{ env.ZAP_TARGET }}" - uses: SvanBoxel/zaproxy-to-ghas@main From 0b4c85f159c57c836f9192a20f5be0b3c2f2991a Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Mon, 21 Apr 2025 23:30:18 -0400 Subject: [PATCH 06/38] Add allow_issue_writing parameter to ZAP Scan step for issue management --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 10be73e..3b74d29 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -54,6 +54,7 @@ jobs: - name: ZAP Scan uses: zaproxy/action-baseline@v0.14.0 with: + allow_issue_writing: false target: "${{ env.ZAP_TARGET }}" - uses: SvanBoxel/zaproxy-to-ghas@main - name: Upload SARIF file From 840155b8c30d542eb4eb961d01731698cafc657e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 10:21:17 +0000 Subject: [PATCH 07/38] Bump azure/webapps-deploy from 2 to 3 Bumps [azure/webapps-deploy](https://github.com/azure/webapps-deploy) from 2 to 3. - [Release notes](https://github.com/azure/webapps-deploy/releases) - [Commits](https://github.com/azure/webapps-deploy/compare/v2...v3) --- updated-dependencies: - dependency-name: azure/webapps-deploy dependency-version: '3' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index aab3ad0..422821e 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -64,7 +64,7 @@ jobs: docker push crdevsecopscldev.azurecr.io/webapp01:${{ github.sha }} - name: Disabled Azure Web Apps Deploy - uses: azure/webapps-deploy@v2 + uses: azure/webapps-deploy@v3 with: app-name: ${{ env.AZURE_WEBAPP_NAME }} images: 'crdevsecopscldev.azurecr.io/webapp01:${{ github.sha }}' From 804f7eabd4ab02bbb6d4c51d90aa91d27ad8e0be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 10:21:19 +0000 Subject: [PATCH 08/38] Bump docker/build-push-action from 4 to 6 Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 4 to 6. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v4...v6) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/CIS-Anchore-Grype.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/CIS-Anchore-Grype.yml b/.github/workflows/CIS-Anchore-Grype.yml index 1cd623d..a041261 100644 --- a/.github/workflows/CIS-Anchore-Grype.yml +++ b/.github/workflows/CIS-Anchore-Grype.yml @@ -33,7 +33,7 @@ jobs: uses: actions/checkout@v4 - name: Build an image from Dockerfile - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: ./src/webapp01 file: ./src/webapp01/Dockerfile From c057366905f15b7748b5dd1b9da9a506ad8e6aad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 10:21:23 +0000 Subject: [PATCH 09/38] Bump aquasecurity/tfsec-sarif-action from 0.1.0 to 0.1.4 Bumps [aquasecurity/tfsec-sarif-action](https://github.com/aquasecurity/tfsec-sarif-action) from 0.1.0 to 0.1.4. - [Release notes](https://github.com/aquasecurity/tfsec-sarif-action/releases) - [Commits](https://github.com/aquasecurity/tfsec-sarif-action/compare/v0.1.0...v0.1.4) --- updated-dependencies: - dependency-name: aquasecurity/tfsec-sarif-action dependency-version: 0.1.4 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/IACS-AquaSecurity-tfsec.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/IACS-AquaSecurity-tfsec.yml b/.github/workflows/IACS-AquaSecurity-tfsec.yml index 7407311..12d81d5 100644 --- a/.github/workflows/IACS-AquaSecurity-tfsec.yml +++ b/.github/workflows/IACS-AquaSecurity-tfsec.yml @@ -26,7 +26,7 @@ jobs: uses: actions/checkout@v4 - name: Run tfsec - uses: aquasecurity/tfsec-sarif-action@v0.1.0 + uses: aquasecurity/tfsec-sarif-action@v0.1.4 with: sarif_file: tfsec.sarif From 9ecd0723139be5f284f64643de304309c60d2874 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 10:21:25 +0000 Subject: [PATCH 10/38] Bump aquasecurity/trivy-action from 0.29.0 to 0.30.0 Bumps [aquasecurity/trivy-action](https://github.com/aquasecurity/trivy-action) from 0.29.0 to 0.30.0. - [Release notes](https://github.com/aquasecurity/trivy-action/releases) - [Commits](https://github.com/aquasecurity/trivy-action/compare/0.29.0...0.30.0) --- updated-dependencies: - dependency-name: aquasecurity/trivy-action dependency-version: 0.30.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/CIS-Trivy-AquaSecurity.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/CIS-Trivy-AquaSecurity.yml b/.github/workflows/CIS-Trivy-AquaSecurity.yml index 476d5d5..a257ace 100644 --- a/.github/workflows/CIS-Trivy-AquaSecurity.yml +++ b/.github/workflows/CIS-Trivy-AquaSecurity.yml @@ -37,7 +37,7 @@ jobs: docker build ./src/webapp01 --file ./src/webapp01/Dockerfile --tag ${{ env.imageName }}:${{ env.tag }} - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@0.29.0 + uses: aquasecurity/trivy-action@0.30.0 with: image-ref: "${{ env.imageName }}:${{ env.tag }}" format: "sarif" From 5722cfdd3a4d7336b38d35bc87fe27e923805fdb Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Tue, 22 Apr 2025 08:45:02 -0400 Subject: [PATCH 11/38] Add artifact names for ZAP scan steps in workflow --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 3b74d29..9f7bbca 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -51,11 +51,13 @@ jobs: with: allow_issue_writing: false target: "${{ env.ZAP_TARGET }}" + artifact_name: "zap-full-scan" - name: ZAP Scan uses: zaproxy/action-baseline@v0.14.0 with: allow_issue_writing: false target: "${{ env.ZAP_TARGET }}" + artifact_name: "zap-baseline-scan" - uses: SvanBoxel/zaproxy-to-ghas@main - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v3 From 91ad3865f3921b263ee7653ca9ac73d02e786520 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Tue, 22 Apr 2025 11:16:36 -0400 Subject: [PATCH 12/38] Update ZAP baseline scan step to include results display --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 9f7bbca..6f2e652 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -56,8 +56,10 @@ jobs: uses: zaproxy/action-baseline@v0.14.0 with: allow_issue_writing: false - target: "${{ env.ZAP_TARGET }}" - artifact_name: "zap-baseline-scan" + target: "${{ env.ZAP_TARGET }}" + - name: Show results + run: | + ls - uses: SvanBoxel/zaproxy-to-ghas@main - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v3 From 7e135eea89524dbf2aedec05871314aee66a575c Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Tue, 22 Apr 2025 11:49:24 -0400 Subject: [PATCH 13/38] Update ZAP integration to use the correct GitHub Actions repository --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index 6f2e652..ce1e662 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -60,7 +60,8 @@ jobs: - name: Show results run: | ls - - uses: SvanBoxel/zaproxy-to-ghas@main + - uses: githubabcs-devops/zap-to-ghas@main + - name: Upload SARIF file uses: github/codeql-action/upload-sarif@v3 with: From 92a514db69dfe2b85a1bafae89e394e44b339704 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Tue, 22 Apr 2025 14:04:43 -0400 Subject: [PATCH 14/38] Add security-events permission for SARIF results upload --- .github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml index ce1e662..63b50e5 100644 --- a/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml +++ b/.github/workflows/DAST-ZAP-Zed-Attach-Proxy-Checkmarx.yml @@ -22,6 +22,7 @@ env: permissions: contents: read + security-events: write # for github/codeql-action/upload-sarif to upload SARIF results #issues: write # to create issues for alerts jobs: From fe8bca498144cc4b8fd18804017f1339d136cf91 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 16:43:07 -0400 Subject: [PATCH 15/38] Add infrastructure and application code for cloud services This commit introduces several new files to support cloud infrastructure and application development. Key additions include: - `azuredeploy.json`: ARM template for Azure Web App deployment. - `eks.tf`: Terraform configuration for Amazon EKS cluster setup. - `example.tf`: Terraform resources for Azure networking and VM creation. - `gke.tf`: GKE cluster configuration with logging and monitoring. - `index.js`: Utility functions for file handling in Node.js. - `main-sample1.tf`: AWS CloudFront distribution setup. - `mongodb.go`: MongoDB connection handling with environment variables. - `network.tf`: Azure Application Gateway and networking resources. - `python-routes.py`: Flask routes for querying a book database. - `RegexDoS.aspx.cs`: Web form demonstrating a ReDoS vulnerability. - `server.Dockerfile`: Docker setup for a Python application. - `SqliteDbProvider.cs`: SQLite database provider implementation. - `Util.cs`: Utility functions for process management and logging. --- samples/RegexDoS.aspx.cs | 36 +++ samples/SqliteDbProvider.cs | 590 ++++++++++++++++++++++++++++++++++++ samples/Util.cs | 94 ++++++ samples/azuredeploy.json | 109 +++++++ samples/eks.tf | 85 ++++++ samples/example.tf | 189 ++++++++++++ samples/gke.tf | 31 ++ samples/index.js | 345 +++++++++++++++++++++ samples/main-sample1.tf | 9 + samples/mongodb.go | 75 +++++ samples/network.tf | 183 +++++++++++ samples/python-routes.py | 30 ++ samples/server.Dockerfile | 20 ++ 13 files changed, 1796 insertions(+) create mode 100644 samples/RegexDoS.aspx.cs create mode 100644 samples/SqliteDbProvider.cs create mode 100644 samples/Util.cs create mode 100644 samples/azuredeploy.json create mode 100644 samples/eks.tf create mode 100644 samples/example.tf create mode 100644 samples/gke.tf create mode 100644 samples/index.js create mode 100644 samples/main-sample1.tf create mode 100644 samples/mongodb.go create mode 100644 samples/network.tf create mode 100644 samples/python-routes.py create mode 100644 samples/server.Dockerfile diff --git a/samples/RegexDoS.aspx.cs b/samples/RegexDoS.aspx.cs new file mode 100644 index 0000000..0941e99 --- /dev/null +++ b/samples/RegexDoS.aspx.cs @@ -0,0 +1,36 @@ +using System; +using System.Web; +using System.Web.UI; +using System.Text.RegularExpressions; + +namespace OWASP.WebGoat.NET +{ + public partial class RegexDoS : System.Web.UI.Page + { + protected void Page_Load(object sender, EventArgs e) + { + + } + + /// + /// Code from https://www.owasp.org/index.php/Regular_expression_Denial_of_Service_-_ReDoS + /// + protected void btnCreate_Click(object sender, EventArgs e) + { + string userName = txtUsername.Text; + string password = txtPassword.Text; + + Regex testPassword = new Regex(userName); + Match match = testPassword.Match(password); + if (match.Success) + { + lblError.Text = "Do not include name in password."; + } + else + { + lblError.Text = "Good password."; + } + } + } +} + diff --git a/samples/SqliteDbProvider.cs b/samples/SqliteDbProvider.cs new file mode 100644 index 0000000..d22b626 --- /dev/null +++ b/samples/SqliteDbProvider.cs @@ -0,0 +1,590 @@ +using System; +using System.Data; +using Mono.Data.Sqlite; +using log4net; +using System.Reflection; +using System.IO; +using System.Diagnostics; +using System.Threading; + +namespace OWASP.WebGoat.NET.App_Code.DB +{ + public class SqliteDbProvider : IDbProvider + { + private string _connectionString = string.Empty; + private string _clientExec; + private string _dbFileName; + + ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); + + public string Name { get { return DbConstants.DB_TYPE_SQLITE; } } + + public SqliteDbProvider(ConfigFile configFile) + { + _connectionString = string.Format("Data Source={0};Version=3", configFile.Get(DbConstants.KEY_FILE_NAME)); + + _clientExec = configFile.Get(DbConstants.KEY_CLIENT_EXEC); + _dbFileName = configFile.Get(DbConstants.KEY_FILE_NAME); + + if (!File.Exists(_dbFileName)) + SqliteConnection.CreateFile(_dbFileName); + } + + public bool TestConnection() + { + try + { + using (SqliteConnection conn = new SqliteConnection(_connectionString)) + { + conn.Open(); + + using (SqliteCommand cmd = conn.CreateCommand()) + { + + cmd.CommandText = "SELECT date('now')"; + cmd.CommandType = CommandType.Text; + cmd.ExecuteReader(); + } + } + return true; + } + catch (Exception ex) + { + log.Error("Error testing DB", ex); + return false; + } + } + + public DataSet GetCatalogData() + { + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter("select * from Products", connection); + DataSet ds = new DataSet(); + + da.Fill(ds); + + return ds; + } + } + + public bool IsValidCustomerLogin(string email, string password) + { + //encode password + string encoded_password = Encoder.Encode(password); + + //check email/password + string sql = "select * from CustomerLogin where email = '" + email + "' and password = '" + + encoded_password + "';"; + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + + //TODO: User reader instead (for all calls) + DataSet ds = new DataSet(); + + da.Fill(ds); + + try + { + return ds.Tables[0].Rows.Count == 0; + } + catch (Exception ex) + { + //Log this and pass the ball along. + log.Error("Error checking login", ex); + + throw new Exception("Error checking login", ex); + } + } + } + + public bool RecreateGoatDb() + { + try + { + log.Info("Running recreate"); + string args = string.Format("\"{0}\"", _dbFileName); + string script = Path.Combine(Settings.RootDir, DbConstants.DB_CREATE_SQLITE_SCRIPT); + int retVal1 = Math.Abs(Util.RunProcessWithInput(_clientExec, args, script)); + + script = Path.Combine(Settings.RootDir, DbConstants.DB_LOAD_SQLITE_SCRIPT); + int retVal2 = Math.Abs(Util.RunProcessWithInput(_clientExec, args, script)); + + return Math.Abs(retVal1) + Math.Abs(retVal2) == 0; + } + catch (Exception ex) + { + log.Error("Error rebulding DB", ex); + return false; + } + } + + //Find the bugs! + public string CustomCustomerLogin(string email, string password) + { + string error_message = null; + try + { + //get data + string sql = "select * from CustomerLogin where email = '" + email + "';"; + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + //check if email address exists + if (ds.Tables[0].Rows.Count == 0) + { + error_message = "Email Address Not Found!"; + return error_message; + } + + string encoded_password = ds.Tables[0].Rows[0]["Password"].ToString(); + string decoded_password = Encoder.Decode(encoded_password); + + if (password.Trim().ToLower() != decoded_password.Trim().ToLower()) + { + error_message = "Password Not Valid For This Email Address!"; + } + else + { + //login successful + error_message = null; + } + } + + } + catch (SqliteException ex) + { + log.Error("Error with custom customer login", ex); + error_message = ex.Message; + } + catch (Exception ex) + { + log.Error("Error with custom customer login", ex); + } + + return error_message; + } + + public string GetCustomerEmail(string customerNumber) + { + string output = null; + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select email from CustomerLogin where customerNumber = " + customerNumber; + SqliteCommand command = new SqliteCommand(sql, connection); + output = command.ExecuteScalar().ToString(); + } + } + catch (Exception ex) + { + output = ex.Message; + } + return output; + } + + public DataSet GetCustomerDetails(string customerNumber) + { + string sql = "select Customers.customerNumber, Customers.customerName, Customers.logoFileName, Customers.contactLastName, Customers.contactFirstName, " + + "Customers.phone, Customers.addressLine1, Customers.addressLine2, Customers.city, Customers.state, Customers.postalCode, Customers.country, " + + "Customers.salesRepEmployeeNumber, Customers.creditLimit, CustomerLogin.email, CustomerLogin.password, CustomerLogin.question_id, CustomerLogin.answer " + + "From Customers, CustomerLogin where Customers.customerNumber = CustomerLogin.customerNumber and Customers.customerNumber = " + customerNumber; + + DataSet ds = new DataSet(); + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + da.Fill(ds); + } + + } + catch (Exception ex) + { + log.Error("Error getting customer details", ex); + + throw new ApplicationException("Error getting customer details", ex); + } + return ds; + + } + + public DataSet GetOffice(string city) + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select * from Offices where city = @city"; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + da.SelectCommand.Parameters.AddWithValue("@city", city); + DataSet ds = new DataSet(); + da.Fill(ds); + return ds; + } + } + + public DataSet GetComments(string productCode) + { + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select * from Comments where productCode = @productCode"; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + da.SelectCommand.Parameters.AddWithValue("@productCode", productCode); + DataSet ds = new DataSet(); + da.Fill(ds); + return ds; + } + } + + public string AddComment(string productCode, string email, string comment) + { + string sql = "insert into Comments(productCode, email, comment) values ('" + productCode + "','" + email + "','" + comment + "');"; + string output = null; + + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + SqliteCommand command = new SqliteCommand(sql, connection); + command.ExecuteNonQuery(); + } + } + catch (Exception ex) + { + log.Error("Error adding comment", ex); + output = ex.Message; + } + + return output; + } + + public string UpdateCustomerPassword(int customerNumber, string password) + { + string sql = "update CustomerLogin set password = '" + Encoder.Encode(password) + "' where customerNumber = " + customerNumber; + string output = null; + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteCommand command = new SqliteCommand(sql, connection); + + int rows_added = command.ExecuteNonQuery(); + + log.Info("Rows Added: " + rows_added + " to comment table"); + } + } + catch (Exception ex) + { + log.Error("Error updating customer password", ex); + output = ex.Message; + } + return output; + } + + public string[] GetSecurityQuestionAndAnswer(string email) + { + string sql = "select SecurityQuestions.question_text, CustomerLogin.answer from CustomerLogin, " + + "SecurityQuestions where CustomerLogin.email = '" + email + "' and CustomerLogin.question_id = " + + "SecurityQuestions.question_id;"; + + string[] qAndA = new string[2]; + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count > 0) + { + DataRow row = ds.Tables[0].Rows[0]; + qAndA[0] = row[0].ToString(); + qAndA[1] = row[1].ToString(); + } + } + + return qAndA; + } + + public string GetPasswordByEmail(string email) + { + string result = string.Empty; + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + //get data + string sql = "select * from CustomerLogin where email = '" + email + "';"; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + //check if email address exists + if (ds.Tables[0].Rows.Count == 0) + { + result = "Email Address Not Found!"; + } + + string encoded_password = ds.Tables[0].Rows[0]["Password"].ToString(); + string decoded_password = Encoder.Decode(encoded_password); + result = decoded_password; + } + } + catch (Exception ex) + { + result = ex.Message; + } + return result; + } + + public DataSet GetUsers() + { + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select * from CustomerLogin;"; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + return ds; + } + } + + public DataSet GetOrders(int customerID) + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select * from Orders where customerNumber = " + customerID; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count == 0) + return null; + else + return ds; + } + } + + public DataSet GetProductDetails(string productCode) + { + string sql = string.Empty; + SqliteDataAdapter da; + DataSet ds = new DataSet(); + + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + sql = "select * from Products where productCode = '" + productCode + "'"; + da = new SqliteDataAdapter(sql, connection); + da.Fill(ds, "products"); + + sql = "select * from Comments where productCode = '" + productCode + "'"; + da = new SqliteDataAdapter(sql, connection); + da.Fill(ds, "comments"); + + DataRelation dr = new DataRelation("prod_comments", + ds.Tables["products"].Columns["productCode"], //category table + ds.Tables["comments"].Columns["productCode"], //product table + false); + + ds.Relations.Add(dr); + return ds; + } + } + + public DataSet GetOrderDetails(int orderNumber) + { + + string sql = "select Customers.customerName, Orders.customerNumber, Orders.orderNumber, Products.productName, " + + "OrderDetails.quantityOrdered, OrderDetails.priceEach, Products.productImage " + + "from OrderDetails, Products, Orders, Customers where " + + "Customers.customerNumber = Orders.customerNumber " + + "and OrderDetails.productCode = Products.productCode " + + "and Orders.orderNumber = OrderDetails.orderNumber " + + "and OrderDetails.orderNumber = " + orderNumber; + + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count == 0) + return null; + else + return ds; + } + } + + public DataSet GetPayments(int customerNumber) + { + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select * from Payments where customerNumber = " + customerNumber; + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count == 0) + return null; + else + return ds; + } + } + + public DataSet GetProductsAndCategories() + { + return GetProductsAndCategories(0); + } + + public DataSet GetProductsAndCategories(int catNumber) + { + //TODO: Rerun the database script. + string sql = string.Empty; + SqliteDataAdapter da; + DataSet ds = new DataSet(); + + //catNumber is optional. If it is greater than 0, add the clause to both statements. + string catClause = string.Empty; + if (catNumber >= 1) + catClause += " where catNumber = " + catNumber; + + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + sql = "select * from Categories" + catClause; + da = new SqliteDataAdapter(sql, connection); + da.Fill(ds, "categories"); + + sql = "select * from Products" + catClause; + da = new SqliteDataAdapter(sql, connection); + da.Fill(ds, "products"); + + + //category / products relationship + DataRelation dr = new DataRelation("cat_prods", + ds.Tables["categories"].Columns["catNumber"], //category table + ds.Tables["products"].Columns["catNumber"], //product table + false); + + ds.Relations.Add(dr); + return ds; + } + } + + public DataSet GetEmailByName(string name) + { + string sql = "select firstName, lastName, email from Employees where firstName like '" + name + "%' or lastName like '" + name + "%'"; + + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count == 0) + return null; + else + return ds; + } + } + + public string GetEmailByCustomerNumber(string num) + { + string output = ""; + try + { + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + string sql = "select email from CustomerLogin where customerNumber = " + num; + SqliteCommand cmd = new SqliteCommand(sql, connection); + output = (string)cmd.ExecuteScalar(); + } + + } + catch (Exception ex) + { + log.Error("Error getting email by customer number", ex); + output = ex.Message; + } + + return output; + } + + public DataSet GetCustomerEmails(string email) + { + string sql = "select email from CustomerLogin where email like '" + email + "%'"; + + + using (SqliteConnection connection = new SqliteConnection(_connectionString)) + { + connection.Open(); + + SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); + DataSet ds = new DataSet(); + da.Fill(ds); + + if (ds.Tables[0].Rows.Count == 0) + return null; + else + return ds; + } + } + + } +} \ No newline at end of file diff --git a/samples/Util.cs b/samples/Util.cs new file mode 100644 index 0000000..6ce836b --- /dev/null +++ b/samples/Util.cs @@ -0,0 +1,94 @@ +using System; +using System.Diagnostics; +using log4net; +using System.Reflection; +using System.IO; +using System.Threading; + +namespace OWASP.WebGoat.NET.App_Code +{ + public class Util + { + private static readonly ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); + + public static int RunProcessWithInput(string cmd, string args, string input) + { + ProcessStartInfo startInfo = new ProcessStartInfo + { + WorkingDirectory = Settings.RootDir, + FileName = cmd, + Arguments = args, + UseShellExecute = false, + RedirectStandardInput = true, + RedirectStandardError = true, + RedirectStandardOutput = true, + }; + + using (Process process = new Process()) + { + process.EnableRaisingEvents = true; + process.StartInfo = startInfo; + + process.OutputDataReceived += (sender, e) => { + if (e.Data != null) + log.Info(e.Data); + }; + + process.ErrorDataReceived += (sender, e) => + { + if (e.Data != null) + log.Error(e.Data); + }; + + AutoResetEvent are = new AutoResetEvent(false); + + process.Exited += (sender, e) => + { + Thread.Sleep(1000); + are.Set(); + log.Info("Process exited"); + + }; + + process.Start(); + + using (StreamReader reader = new StreamReader(new FileStream(input, FileMode.Open))) + { + string line; + string replaced; + while ((line = reader.ReadLine()) != null) + { + if (Environment.OSVersion.Platform == PlatformID.Win32NT) + replaced = line.Replace("DB_Scripts/datafiles/", "DB_Scripts\\\\datafiles\\\\"); + else + replaced = line; + + log.Debug("Line: " + replaced); + + process.StandardInput.WriteLine(replaced); + } + } + + process.StandardInput.Close(); + + + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + + //NOTE: Looks like we have a mono bug: https://bugzilla.xamarin.com/show_bug.cgi?id=6291 + //have a wait time for now. + + are.WaitOne(10 * 1000); + + if (process.HasExited) + return process.ExitCode; + else //WTF? Should have exited dammit! + { + process.Kill(); + return 1; + } + } + } + } +} + diff --git a/samples/azuredeploy.json b/samples/azuredeploy.json new file mode 100644 index 0000000..0c70ee9 --- /dev/null +++ b/samples/azuredeploy.json @@ -0,0 +1,109 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.15.31.15270", + "templateHash": "11556880252039009077" + } + }, + "parameters": { + "hostingPlanName": { + "type": "string", + "minLength": 1, + "metadata": { + "description": "Name of the hosting plan to use in Azure." + } + }, + "webSiteName": { + "type": "string", + "minLength": 1, + "metadata": { + "description": "Name of the Azure Web app to create." + } + }, + "skuName": { + "type": "string", + "defaultValue": "F1", + "allowedValues": [ + "F1", + "D1", + "B1", + "B2", + "B3", + "S1", + "S2", + "S3", + "P1", + "P2", + "P3", + "P4" + ], + "metadata": { + "description": "Describes plan's pricing tier and instance size. Check details at https://azure.microsoft.com/en-us/pricing/details/app-service/" + } + }, + "skuCapacity": { + "type": "int", + "defaultValue": 1, + "maxValue": 3, + "minValue": 1, + "metadata": { + "description": "Describes plan's instance count" + } + }, + "location": { + "type": "string", + "defaultValue": "[resourceGroup().location]", + "metadata": { + "description": "Location for all resources." + } + } + }, + "resources": [ + { + "type": "Microsoft.Web/serverfarms", + "apiVersion": "2022-03-01", + "name": "[parameters('hostingPlanName')]", + "location": "[parameters('location')]", + "tags": { + "displayName": "HostingPlan" + }, + "sku": { + "name": "[parameters('skuName')]", + "capacity": "[parameters('skuCapacity')]" + }, + "properties": {} + }, + { + "type": "Microsoft.Web/sites", + "apiVersion": "2022-03-01", + "name": "[parameters('webSiteName')]", + "location": "[parameters('location')]", + "tags": { + "[format('hidden-related:{0}/providers/Microsoft.Web/serverfarms/{1}', resourceGroup().id, parameters('hostingPlanName'))]": "Resource", + "displayName": "Website" + }, + "properties": { + "serverFarmId": "[resourceId('Microsoft.Web/serverfarms', parameters('hostingPlanName'))]" + }, + "dependsOn": [ + "[resourceId('Microsoft.Web/serverfarms', parameters('hostingPlanName'))]" + ] + }, + { + "type": "Microsoft.Web/sites/config", + "apiVersion": "2022-03-01", + "name": "[format('{0}/{1}', parameters('webSiteName'), 'web')]", + "properties": { + "javaVersion": "1.8", + "javaContainer": "TOMCAT", + "javaContainerVersion": "9.0" + }, + "dependsOn": [ + "[resourceId('Microsoft.Web/sites', parameters('webSiteName'))]" + ] + } + ] +} \ No newline at end of file diff --git a/samples/eks.tf b/samples/eks.tf new file mode 100644 index 0000000..99659f3 --- /dev/null +++ b/samples/eks.tf @@ -0,0 +1,85 @@ +locals { + eks_name = { + value = "${local.resource_prefix.value}-eks" + } +} + +data aws_iam_policy_document "iam_policy_eks" { + statement { + effect = "Allow" + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["eks.amazonaws.com"] + } + } +} + +resource aws_iam_role "iam_for_eks" { + name = "${local.resource_prefix.value}-iam-for-eks" + assume_role_policy = data.aws_iam_policy_document.iam_policy_eks.json +} + +resource aws_iam_role_policy_attachment "policy_attachment-AmazonEKSClusterPolicy" { + policy_arn = "arn:aws:iam::aws:policy/AmazonEKSClusterPolicy" + role = aws_iam_role.iam_for_eks.name +} + +resource aws_iam_role_policy_attachment "policy_attachment-AmazonEKSServicePolicy" { + policy_arn = "arn:aws:iam::aws:policy/AmazonEKSServicePolicy" + role = aws_iam_role.iam_for_eks.name +} + +resource aws_vpc "eks_vpc" { + cidr_block = "10.10.0.0/16" + enable_dns_hostnames = true + enable_dns_support = true + tags = { + Name = "${local.resource_prefix.value}-eks-vpc" + } +} + +resource aws_subnet "eks_subnet1" { + vpc_id = aws_vpc.eks_vpc.id + cidr_block = "10.10.10.0/24" + availability_zone = var.availability_zone + map_public_ip_on_launch = true + tags = { + Name = "${local.resource_prefix.value}-eks-subnet" + "kubernetes.io/cluster/${local.eks_name.value}" = "shared" + } +} + +resource aws_subnet "eks_subnet2" { + vpc_id = aws_vpc.eks_vpc.id + cidr_block = "10.10.11.0/24" + availability_zone = var.availability_zone2 + map_public_ip_on_launch = true + tags = { + Name = "${local.resource_prefix.value}-eks-subnet2" + "kubernetes.io/cluster/${local.eks_name.value}" = "shared" + } +} + +resource aws_eks_cluster "eks_cluster" { + name = local.eks_name.value + role_arn = "${aws_iam_role.iam_for_eks.arn}" + + vpc_config { + endpoint_private_access = true + subnet_ids = ["${aws_subnet.eks_subnet1.id}", "${aws_subnet.eks_subnet2.id}"] + } + + depends_on = [ + "aws_iam_role_policy_attachment.policy_attachment-AmazonEKSClusterPolicy", + "aws_iam_role_policy_attachment.policy_attachment-AmazonEKSServicePolicy", + ] +} + +output "endpoint" { + value = "${aws_eks_cluster.eks_cluster.endpoint}" +} + +output "kubeconfig-certificate-authority-data" { + value = "${aws_eks_cluster.eks_cluster.certificate_authority.0.data}" +} diff --git a/samples/example.tf b/samples/example.tf new file mode 100644 index 0000000..e83411f --- /dev/null +++ b/samples/example.tf @@ -0,0 +1,189 @@ +resource "azurerm_resource_group" "myresourcegroup" { + name = "${var.prefix}-workshop" + location = var.location + + tags = { + environment = "Production" + } +} + +resource "azurerm_virtual_network" "vnet" { + name = "${var.prefix}-vnet" + location = azurerm_resource_group.myresourcegroup.location + address_space = [var.address_space] + resource_group_name = azurerm_resource_group.myresourcegroup.name +} + +resource "azurerm_subnet" "subnet" { + name = "${var.prefix}-subnet" + virtual_network_name = azurerm_virtual_network.vnet.name + resource_group_name = azurerm_resource_group.myresourcegroup.name + address_prefixes = [var.subnet_prefix] +} + +resource "azurerm_network_security_group" "catapp-sg" { + name = "${var.prefix}-sg" + location = var.location + resource_group_name = azurerm_resource_group.myresourcegroup.name + + security_rule { + name = "HTTP" + priority = 100 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "80" + source_address_prefix = "*" + destination_address_prefix = "*" + } + + security_rule { + name = "HTTPS" + priority = 102 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "443" + source_address_prefix = "*" + destination_address_prefix = "*" + } + + security_rule { + name = "SSH" + priority = 101 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + source_port_range = "*" + destination_port_range = "22" + source_address_prefix = "*" + destination_address_prefix = "*" + } +} + +resource "azurerm_network_interface" "catapp-nic" { + name = "${var.prefix}-catapp-nic" + location = var.location + resource_group_name = azurerm_resource_group.myresourcegroup.name + + ip_configuration { + name = "${var.prefix}ipconfig" + subnet_id = azurerm_subnet.subnet.id + private_ip_address_allocation = "Dynamic" + public_ip_address_id = azurerm_public_ip.catapp-pip.id + } +} + +resource "azurerm_network_interface_security_group_association" "catapp-nic-sg-ass" { + network_interface_id = azurerm_network_interface.catapp-nic.id + network_security_group_id = azurerm_network_security_group.catapp-sg.id +} + +resource "azurerm_public_ip" "catapp-pip" { + name = "${var.prefix}-ip" + location = var.location + resource_group_name = azurerm_resource_group.myresourcegroup.name + allocation_method = "Dynamic" + domain_name_label = "${var.prefix}-meow" +} + +resource "azurerm_virtual_machine" "catapp" { + name = "${var.prefix}-meow" + location = var.location + resource_group_name = azurerm_resource_group.myresourcegroup.name + vm_size = var.vm_size + + network_interface_ids = [azurerm_network_interface.catapp-nic.id] + delete_os_disk_on_termination = "true" + + storage_image_reference { + publisher = var.image_publisher + offer = var.image_offer + sku = var.image_sku + version = var.image_version + } + + storage_os_disk { + name = "${var.prefix}-osdisk" + managed_disk_type = "Standard_LRS" + caching = "ReadWrite" + create_option = "FromImage" + } + + os_profile { + computer_name = var.prefix + admin_username = var.admin_username + admin_password = var.admin_password + } + + os_profile_linux_config { + disable_password_authentication = false + } + + tags = {} + + # Added to allow destroy to work correctly. + depends_on = [azurerm_network_interface_security_group_association.catapp-nic-sg-ass] +} + +# We're using a little trick here so we can run the provisioner without +# destroying the VM. Do not do this in production. + +# If you need ongoing management (Day N) of your virtual machines a tool such +# as Chef or Puppet is a better choice. These tools track the state of +# individual files and can keep them in the correct configuration. + +# Here we do the following steps: +# Sync everything in files/ to the remote VM. +# Set up some environment variables for our script. +# Add execute permissions to our scripts. +# Run the deploy_app.sh script. +resource "null_resource" "configure-cat-app" { + depends_on = [ + azurerm_virtual_machine.catapp, + ] + + # Terraform 0.11 + # triggers { + # build_number = "${timestamp()}" + # } + + # Terraform 0.12 + triggers = { + build_number = timestamp() + } + + provisioner "file" { + source = "files/" + destination = "/home/${var.admin_username}/" + + connection { + type = "ssh" + user = var.admin_username + password = var.admin_password + host = azurerm_public_ip.catapp-pip.fqdn + } + } + + provisioner "remote-exec" { + inline = [ + "sudo apt -y update", + "sleep 15", + "sudo apt -y update", + "sudo apt -y install apache2", + "sudo systemctl start apache2", + "sudo chown -R ${var.admin_username}:${var.admin_username} /var/www/html", + "chmod +x *.sh", + "PLACEHOLDER=${var.placeholder} WIDTH=${var.width} HEIGHT=${var.height} PREFIX=${var.prefix} ./deploy_app.sh", + ] + + connection { + type = "ssh" + user = var.admin_username + password = var.admin_password + host = azurerm_public_ip.catapp-pip.fqdn + } + } +} diff --git a/samples/gke.tf b/samples/gke.tf new file mode 100644 index 0000000..63edc2f --- /dev/null +++ b/samples/gke.tf @@ -0,0 +1,31 @@ +data "google_compute_zones" "available_zones" { + project = var.project + region = var.region +} + +resource "google_container_cluster" "workload_cluster" { + name = "terragoat-${var.environment}-cluster" + logging_service = "none" + location = var.region + initial_node_count = 1 + + enable_legacy_abac = true + monitoring_service = "none" + remove_default_node_pool = true + network = google_compute_network.vpc.name + subnetwork = google_compute_subnetwork.public-subnetwork.name + master_authorized_networks_config { + cidr_blocks { + cidr_block = "0.0.0.0/0" + } + } +} + +resource google_container_node_pool "custom_node_pool" { + cluster = google_container_cluster.workload_cluster.name + location = var.region + + node_config { + image_type = "Ubuntu" + } +} diff --git a/samples/index.js b/samples/index.js new file mode 100644 index 0000000..2e1ae6d --- /dev/null +++ b/samples/index.js @@ -0,0 +1,345 @@ +var chownr = require('chownr') +var tar = require('tar-stream') +var pump = require('pump') +var mkdirp = require('mkdirp') +var fs = require('fs') +var path = require('path') +var os = require('os') + +var win32 = os.platform() === 'win32' + +var noop = function () {} + +var echo = function (name) { + return name +} + +var normalize = !win32 ? echo : function (name) { + return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_') +} + +var statAll = function (fs, stat, cwd, ignore, entries, sort) { + var queue = entries || ['.'] + + return function loop (callback) { + if (!queue.length) return callback() + var next = queue.shift() + var nextAbs = path.join(cwd, next) + + stat(nextAbs, function (err, stat) { + if (err) return callback(err) + + if (!stat.isDirectory()) return callback(null, next, stat) + + fs.readdir(nextAbs, function (err, files) { + if (err) return callback(err) + + if (sort) files.sort() + for (var i = 0; i < files.length; i++) { + if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i])) + } + + callback(null, next, stat) + }) + }) + } +} + +var strip = function (map, level) { + return function (header) { + header.name = header.name.split('/').slice(level).join('/') + + var linkname = header.linkname + if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) { + header.linkname = linkname.split('/').slice(level).join('/') + } + + return map(header) + } +} + +exports.pack = function (cwd, opts) { + if (!cwd) cwd = '.' + if (!opts) opts = {} + + var xfs = opts.fs || fs + var ignore = opts.ignore || opts.filter || noop + var map = opts.map || noop + var mapStream = opts.mapStream || echo + var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort) + var strict = opts.strict !== false + var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() + var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 + var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 + var pack = opts.pack || tar.pack() + var finish = opts.finish || noop + + if (opts.strip) map = strip(map, opts.strip) + + if (opts.readable) { + dmode |= parseInt(555, 8) + fmode |= parseInt(444, 8) + } + if (opts.writable) { + dmode |= parseInt(333, 8) + fmode |= parseInt(222, 8) + } + + var onsymlink = function (filename, header) { + xfs.readlink(path.join(cwd, filename), function (err, linkname) { + if (err) return pack.destroy(err) + header.linkname = normalize(linkname) + pack.entry(header, onnextentry) + }) + } + + var onstat = function (err, filename, stat) { + if (err) return pack.destroy(err) + if (!filename) { + if (opts.finalize !== false) pack.finalize() + return finish(pack) + } + + if (stat.isSocket()) return onnextentry() // tar does not support sockets... + + var header = { + name: normalize(filename), + mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask, + mtime: stat.mtime, + size: stat.size, + type: 'file', + uid: stat.uid, + gid: stat.gid + } + + if (stat.isDirectory()) { + header.size = 0 + header.type = 'directory' + header = map(header) || header + return pack.entry(header, onnextentry) + } + + if (stat.isSymbolicLink()) { + header.size = 0 + header.type = 'symlink' + header = map(header) || header + return onsymlink(filename, header) + } + + // TODO: add fifo etc... + + header = map(header) || header + + if (!stat.isFile()) { + if (strict) return pack.destroy(new Error('unsupported type for ' + filename)) + return onnextentry() + } + + var entry = pack.entry(header, onnextentry) + if (!entry) return + + var rs = mapStream(xfs.createReadStream(path.join(cwd, filename)), header) + + rs.on('error', function (err) { // always forward errors on destroy + entry.destroy(err) + }) + + pump(rs, entry) + } + + var onnextentry = function (err) { + if (err) return pack.destroy(err) + statNext(onstat) + } + + onnextentry() + + return pack +} + +var head = function (list) { + return list.length ? list[list.length - 1] : null +} + +var processGetuid = function () { + return process.getuid ? process.getuid() : -1 +} + +var processUmask = function () { + return process.umask ? process.umask() : 0 +} + +exports.extract = function (cwd, opts) { + if (!cwd) cwd = '.' + if (!opts) opts = {} + + var xfs = opts.fs || fs + var ignore = opts.ignore || opts.filter || noop + var map = opts.map || noop + var mapStream = opts.mapStream || echo + var own = opts.chown !== false && !win32 && processGetuid() === 0 + var extract = opts.extract || tar.extract() + var stack = [] + var now = new Date() + var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() + var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 + var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 + var strict = opts.strict !== false + + if (opts.strip) map = strip(map, opts.strip) + + if (opts.readable) { + dmode |= parseInt(555, 8) + fmode |= parseInt(444, 8) + } + if (opts.writable) { + dmode |= parseInt(333, 8) + fmode |= parseInt(222, 8) + } + + var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry + var top + while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop() + if (!top) return cb() + xfs.utimes(top[0], now, top[1], cb) + } + + var utimes = function (name, header, cb) { + if (opts.utimes === false) return cb() + + if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb) + if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link? + + xfs.utimes(name, now, header.mtime, function (err) { + if (err) return cb(err) + utimesParent(name, cb) + }) + } + + var chperm = function (name, header, cb) { + var link = header.type === 'symlink' + var chmod = link ? xfs.lchmod : xfs.chmod + var chown = link ? xfs.lchown : xfs.chown + + if (!chmod) return cb() + + var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask + chmod(name, mode, function (err) { + if (err) return cb(err) + if (!own) return cb() + if (!chown) return cb() + chown(name, header.uid, header.gid, cb) + }) + } + + extract.on('entry', function (header, stream, next) { + header = map(header) || header + header.name = normalize(header.name) + var name = path.join(cwd, path.join('/', header.name)) + + if (ignore(name, header)) { + stream.resume() + return next() + } + + var stat = function (err) { + if (err) return next(err) + utimes(name, header, function (err) { + if (err) return next(err) + if (win32) return next() + chperm(name, header, next) + }) + } + + var onsymlink = function () { + if (win32) return next() // skip symlinks on win for now before it can be tested + xfs.unlink(name, function () { + xfs.symlink(header.linkname, name, stat) + }) + } + + var onlink = function () { + if (win32) return next() // skip links on win for now before it can be tested + xfs.unlink(name, function () { + var srcpath = path.resolve(cwd, header.linkname) + + xfs.link(srcpath, name, function (err) { + if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) { + stream = xfs.createReadStream(srcpath) + return onfile() + } + + stat(err) + }) + }) + } + + var onfile = function () { + var ws = xfs.createWriteStream(name) + var rs = mapStream(stream, header) + + ws.on('error', function (err) { // always forward errors on destroy + rs.destroy(err) + }) + + pump(rs, ws, function (err) { + if (err) return next(err) + ws.on('close', stat) + }) + } + + if (header.type === 'directory') { + stack.push([name, header.mtime]) + return mkdirfix(name, { + fs: xfs, own: own, uid: header.uid, gid: header.gid + }, stat) + } + + var dir = path.dirname(name) + + validate(xfs, dir, path.join(cwd, '.'), function (err, valid) { + if (err) return next(err) + if (!valid) return next(new Error(dir + ' is not a valid path')) + + mkdirfix(dir, { + fs: xfs, own: own, uid: header.uid, gid: header.gid + }, function (err) { + if (err) return next(err) + + switch (header.type) { + case 'file': return onfile() + case 'link': return onlink() + case 'symlink': return onsymlink() + } + + if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')')) + + stream.resume() + next() + }) + }) + }) + + if (opts.finish) extract.on('finish', opts.finish) + + return extract +} + +function validate (fs, name, root, cb) { + if (name === root) return cb(null, true) + fs.lstat(name, function (err, st) { + if (err && err.code !== 'ENOENT') return cb(err) + if (err || st.isDirectory()) return validate(fs, path.join(name, '..'), root, cb) + cb(null, false) + }) +} + +function mkdirfix (name, opts, cb) { + mkdirp(name, {fs: opts.xfs}, function (err, made) { + if (!err && made && opts.own) { + chownr(made, opts.uid, opts.gid, cb) + } else { + cb(err) + } + }) +} diff --git a/samples/main-sample1.tf b/samples/main-sample1.tf new file mode 100644 index 0000000..bd9b786 --- /dev/null +++ b/samples/main-sample1.tf @@ -0,0 +1,9 @@ +resource "aws_cloudfront_distribution" "bad_example" { + default_cache_behavior { + viewer_protocol_policy = "allow-all" + } + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1.0" + } +} diff --git a/samples/mongodb.go b/samples/mongodb.go new file mode 100644 index 0000000..656c78b --- /dev/null +++ b/samples/mongodb.go @@ -0,0 +1,75 @@ +package main + +import ( + "context" + "crypto/tls" + "log" + "net/http" + "os" + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +func connectToMongoDB() (*mongo.Collection, error) { + // Get database uri from environment variable + mongoUri := os.Getenv("ORDER_DB_URI") + if mongoUri == "" { + log.Printf("ORDER_DB_URI is not set") + return nil, http.ErrAbortHandler + } + + // get database name from environment variable + mongoDb := os.Getenv("ORDER_DB_NAME") + if mongoDb == "" { + log.Printf("ORDER_DB_NAME is not set") + return nil, http.ErrAbortHandler + } + + // get database collection name from environment variable + mongoCollection := os.Getenv("ORDER_DB_COLLECTION_NAME") + if mongoCollection == "" { + log.Printf("ORDER_DB_COLLECTION_NAME is not set") + return nil, http.ErrAbortHandler + } + + // get database username from environment variable + mongoUser := os.Getenv("ORDER_DB_USERNAME") + + // get database password from environment variable + mongoPassword := os.Getenv("ORDER_DB_PASSWORD") + + // create a context + ctx := context.Background() + + // create a mongo client + var clientOptions *options.ClientOptions + if mongoUser == "" && mongoPassword == "" { + clientOptions = options.Client().ApplyURI(mongoUri) + } else { + clientOptions = options.Client().ApplyURI(mongoUri). + SetAuth(options.Credential{ + Username: mongoUser, + Password: mongoPassword, + }). + SetTLSConfig(&tls.Config{InsecureSkipVerify: true}) + } + + mongoClient, err := mongo.Connect(ctx, clientOptions) + if err != nil { + log.Printf("failed to connect to mongodb: %s", err) + return nil, err + } + + err = mongoClient.Ping(ctx, nil) + if err != nil { + log.Printf("failed to ping database: %s", err) + } else { + log.Printf("pong from database") + } + + // get a handle for the collection + collection := mongoClient.Database(mongoDb).Collection(mongoCollection) + + return collection, nil +} diff --git a/samples/network.tf b/samples/network.tf new file mode 100644 index 0000000..d707b2a --- /dev/null +++ b/samples/network.tf @@ -0,0 +1,183 @@ +# +# Creates a network, subnets and network security rules before building an Application Gateway +# that we can use to front the services in the Kubernetes Cluster. +# + +resource "azurerm_network_security_group" "app_gateway" { + name = "${var.cluster_name}-app-gateway" + location = var.region + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name +} + + +resource "azurerm_network_security_rule" "gateway_manager" { + name = "${var.cluster_name}-app-gateway-gateway-mananger" + priority = 200 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + + source_address_prefix = "GatewayManager" + source_port_range = "*" + + destination_address_prefix = "*" + destination_port_range = "65200-65535" + + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + network_security_group_name = azurerm_network_security_group.app_gateway.name +} + + +resource "azurerm_network_security_rule" "gateway_cidr" { + name = "${var.cluster_name}-app-gateway-gateway-cidr" + priority = 201 + direction = "Inbound" + access = "Allow" + protocol = "Tcp" + + source_address_prefix = var.app_gateway_gateway_subnet_address_prefix + source_port_range = "*" + + destination_address_prefix = "*" + destination_port_range = "65200-65535" + + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + network_security_group_name = azurerm_network_security_group.app_gateway.name +} + +resource "azurerm_network_security_rule" "azure_loadbalancer" { + name = "${var.cluster_name}-app-gateway-loadbalancer" + priority = 210 + direction = "Inbound" + access = "Allow" + protocol = "*" + + source_address_prefix = "AzureLoadBalancer" + source_port_range = "*" + + destination_address_prefix = "*" + destination_port_range = "*" + + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + network_security_group_name = azurerm_network_security_group.app_gateway.name +} + + +resource "azurerm_public_ip" "gateway" { + name = "gateway-pip" + location = azurerm_resource_group.aks_cluster_resource_group.location + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + allocation_method = "Static" + + sku = var.app_gateway_static_ip_sku + + tags = merge( + local.default_tags, + var.resource_tags, + ) +} + + +resource "azurerm_virtual_network" "application_gateway" { + name = "k8s-app-gateway-network" + location = azurerm_resource_group.aks_cluster_resource_group.location + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + address_space = [var.app_gateway_vnet_address_prefix] + + tags = merge( + local.default_tags, + var.resource_tags, + ) +} + + +resource "azurerm_subnet" "aks_cluster" { + name = "akscluster" + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + virtual_network_name = azurerm_virtual_network.application_gateway.name + address_prefixes = [var.app_gateway_aks_subnet_address_prefix] +} + + +resource "azurerm_subnet" "app_gateway" { + name = "appgw" + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + virtual_network_name = azurerm_virtual_network.application_gateway.name + address_prefixes = [var.app_gateway_gateway_subnet_address_prefix] +} + + +resource "azurerm_subnet_network_security_group_association" "app_gateway" { + subnet_id = azurerm_subnet.app_gateway.id + network_security_group_id = azurerm_network_security_group.app_gateway.id +} + + +resource "azurerm_application_gateway" "network" { + name = "k8s-app-gateway" + location = azurerm_resource_group.aks_cluster_resource_group.location + resource_group_name = azurerm_resource_group.aks_cluster_resource_group.name + + sku { + name = var.app_gateway_sku + tier = var.app_gateway_sku_tier + capacity = 2 + } + + gateway_ip_configuration { + name = "appGatewayIpConfiguration" + subnet_id = azurerm_subnet.app_gateway.id + } + + frontend_port { + name = local.frontend_port_name + port = 80 + } + + frontend_port { + name = "httpsPort" + port = 443 + } + + frontend_ip_configuration { + name = local.frontend_ip_configuration_name + public_ip_address_id = azurerm_public_ip.gateway.id + } + + backend_address_pool { + name = local.backend_address_pool_name + } + + backend_http_settings { + name = local.http_setting_name + cookie_based_affinity = "Disabled" + port = 80 + protocol = "Http" + request_timeout = 1 + } + + http_listener { + name = local.listener_name + frontend_ip_configuration_name = local.frontend_ip_configuration_name + frontend_port_name = local.frontend_port_name + protocol = "Http" + } + + request_routing_rule { + name = local.request_routing_rule_name + rule_type = "Basic" + http_listener_name = local.listener_name + backend_address_pool_name = local.backend_address_pool_name + backend_http_settings_name = local.http_setting_name + } + + tags = merge( + local.default_tags, + var.resource_tags, + ) + + depends_on = [ + azurerm_virtual_network.application_gateway, + azurerm_public_ip.gateway + ] +} diff --git a/samples/python-routes.py b/samples/python-routes.py new file mode 100644 index 0000000..bab7594 --- /dev/null +++ b/samples/python-routes.py @@ -0,0 +1,30 @@ + +from flask import request, render_template, make_response + +from server.webapp import flaskapp, cursor +from server.models import Book + + +@flaskapp.route('/') +def index(): + name = request.args.get('name') + author = request.args.get('author') + read = bool(request.args.get('read')) + + if name: + cursor.execute( + "SELECT * FROM books WHERE name LIKE '%" + name + "%'" + ) + books = [Book(*row) for row in cursor] + + elif author: + cursor.execute( + "SELECT * FROM books WHERE author LIKE '%" + author + "%'" + ) + books = [Book(*row) for row in cursor] + + else: + cursor.execute("SELECT name, author, read FROM books") + books = [Book(*row) for row in cursor] + + return render_template('books.html', books=books) diff --git a/samples/server.Dockerfile b/samples/server.Dockerfile new file mode 100644 index 0000000..dc1411a --- /dev/null +++ b/samples/server.Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.7-alpine + +ARG user=python +ARG home=/home/$user + +RUN adduser \ + --disabled-password \ + --home $home \ + $user + +WORKDIR /myapp +ENV PYTHONPATH "${PYTHONPATH}:/myapp" + +COPY server . + +RUN python3 -m pip install pipenv && \ + python3 -m pipenv install --system + +CMD ["python", "-m", "myapp"] + From b5a250e4fded07518745eb014b53d02f8aa51987 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 17:11:04 -0400 Subject: [PATCH 16/38] Refactor authentication and database handling Removed the `RegexDoS` class for password validation, indicating a shift in user authentication methods. Significant database operation methods were deleted from `SqliteDbProvider.cs`, suggesting a redesign for improved security and performance, including a move towards parameterized queries. The `Util` class was also removed, which handled external process execution, reflecting a potential focus on security and code simplification. --- samples/RegexDoS.aspx.cs | 36 --- samples/SqliteDbProvider.cs | 590 ------------------------------------ samples/Util.cs | 94 ------ 3 files changed, 720 deletions(-) delete mode 100644 samples/RegexDoS.aspx.cs delete mode 100644 samples/SqliteDbProvider.cs delete mode 100644 samples/Util.cs diff --git a/samples/RegexDoS.aspx.cs b/samples/RegexDoS.aspx.cs deleted file mode 100644 index 0941e99..0000000 --- a/samples/RegexDoS.aspx.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System; -using System.Web; -using System.Web.UI; -using System.Text.RegularExpressions; - -namespace OWASP.WebGoat.NET -{ - public partial class RegexDoS : System.Web.UI.Page - { - protected void Page_Load(object sender, EventArgs e) - { - - } - - /// - /// Code from https://www.owasp.org/index.php/Regular_expression_Denial_of_Service_-_ReDoS - /// - protected void btnCreate_Click(object sender, EventArgs e) - { - string userName = txtUsername.Text; - string password = txtPassword.Text; - - Regex testPassword = new Regex(userName); - Match match = testPassword.Match(password); - if (match.Success) - { - lblError.Text = "Do not include name in password."; - } - else - { - lblError.Text = "Good password."; - } - } - } -} - diff --git a/samples/SqliteDbProvider.cs b/samples/SqliteDbProvider.cs deleted file mode 100644 index d22b626..0000000 --- a/samples/SqliteDbProvider.cs +++ /dev/null @@ -1,590 +0,0 @@ -using System; -using System.Data; -using Mono.Data.Sqlite; -using log4net; -using System.Reflection; -using System.IO; -using System.Diagnostics; -using System.Threading; - -namespace OWASP.WebGoat.NET.App_Code.DB -{ - public class SqliteDbProvider : IDbProvider - { - private string _connectionString = string.Empty; - private string _clientExec; - private string _dbFileName; - - ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); - - public string Name { get { return DbConstants.DB_TYPE_SQLITE; } } - - public SqliteDbProvider(ConfigFile configFile) - { - _connectionString = string.Format("Data Source={0};Version=3", configFile.Get(DbConstants.KEY_FILE_NAME)); - - _clientExec = configFile.Get(DbConstants.KEY_CLIENT_EXEC); - _dbFileName = configFile.Get(DbConstants.KEY_FILE_NAME); - - if (!File.Exists(_dbFileName)) - SqliteConnection.CreateFile(_dbFileName); - } - - public bool TestConnection() - { - try - { - using (SqliteConnection conn = new SqliteConnection(_connectionString)) - { - conn.Open(); - - using (SqliteCommand cmd = conn.CreateCommand()) - { - - cmd.CommandText = "SELECT date('now')"; - cmd.CommandType = CommandType.Text; - cmd.ExecuteReader(); - } - } - return true; - } - catch (Exception ex) - { - log.Error("Error testing DB", ex); - return false; - } - } - - public DataSet GetCatalogData() - { - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter("select * from Products", connection); - DataSet ds = new DataSet(); - - da.Fill(ds); - - return ds; - } - } - - public bool IsValidCustomerLogin(string email, string password) - { - //encode password - string encoded_password = Encoder.Encode(password); - - //check email/password - string sql = "select * from CustomerLogin where email = '" + email + "' and password = '" + - encoded_password + "';"; - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - - //TODO: User reader instead (for all calls) - DataSet ds = new DataSet(); - - da.Fill(ds); - - try - { - return ds.Tables[0].Rows.Count == 0; - } - catch (Exception ex) - { - //Log this and pass the ball along. - log.Error("Error checking login", ex); - - throw new Exception("Error checking login", ex); - } - } - } - - public bool RecreateGoatDb() - { - try - { - log.Info("Running recreate"); - string args = string.Format("\"{0}\"", _dbFileName); - string script = Path.Combine(Settings.RootDir, DbConstants.DB_CREATE_SQLITE_SCRIPT); - int retVal1 = Math.Abs(Util.RunProcessWithInput(_clientExec, args, script)); - - script = Path.Combine(Settings.RootDir, DbConstants.DB_LOAD_SQLITE_SCRIPT); - int retVal2 = Math.Abs(Util.RunProcessWithInput(_clientExec, args, script)); - - return Math.Abs(retVal1) + Math.Abs(retVal2) == 0; - } - catch (Exception ex) - { - log.Error("Error rebulding DB", ex); - return false; - } - } - - //Find the bugs! - public string CustomCustomerLogin(string email, string password) - { - string error_message = null; - try - { - //get data - string sql = "select * from CustomerLogin where email = '" + email + "';"; - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - //check if email address exists - if (ds.Tables[0].Rows.Count == 0) - { - error_message = "Email Address Not Found!"; - return error_message; - } - - string encoded_password = ds.Tables[0].Rows[0]["Password"].ToString(); - string decoded_password = Encoder.Decode(encoded_password); - - if (password.Trim().ToLower() != decoded_password.Trim().ToLower()) - { - error_message = "Password Not Valid For This Email Address!"; - } - else - { - //login successful - error_message = null; - } - } - - } - catch (SqliteException ex) - { - log.Error("Error with custom customer login", ex); - error_message = ex.Message; - } - catch (Exception ex) - { - log.Error("Error with custom customer login", ex); - } - - return error_message; - } - - public string GetCustomerEmail(string customerNumber) - { - string output = null; - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select email from CustomerLogin where customerNumber = " + customerNumber; - SqliteCommand command = new SqliteCommand(sql, connection); - output = command.ExecuteScalar().ToString(); - } - } - catch (Exception ex) - { - output = ex.Message; - } - return output; - } - - public DataSet GetCustomerDetails(string customerNumber) - { - string sql = "select Customers.customerNumber, Customers.customerName, Customers.logoFileName, Customers.contactLastName, Customers.contactFirstName, " + - "Customers.phone, Customers.addressLine1, Customers.addressLine2, Customers.city, Customers.state, Customers.postalCode, Customers.country, " + - "Customers.salesRepEmployeeNumber, Customers.creditLimit, CustomerLogin.email, CustomerLogin.password, CustomerLogin.question_id, CustomerLogin.answer " + - "From Customers, CustomerLogin where Customers.customerNumber = CustomerLogin.customerNumber and Customers.customerNumber = " + customerNumber; - - DataSet ds = new DataSet(); - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - da.Fill(ds); - } - - } - catch (Exception ex) - { - log.Error("Error getting customer details", ex); - - throw new ApplicationException("Error getting customer details", ex); - } - return ds; - - } - - public DataSet GetOffice(string city) - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select * from Offices where city = @city"; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - da.SelectCommand.Parameters.AddWithValue("@city", city); - DataSet ds = new DataSet(); - da.Fill(ds); - return ds; - } - } - - public DataSet GetComments(string productCode) - { - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select * from Comments where productCode = @productCode"; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - da.SelectCommand.Parameters.AddWithValue("@productCode", productCode); - DataSet ds = new DataSet(); - da.Fill(ds); - return ds; - } - } - - public string AddComment(string productCode, string email, string comment) - { - string sql = "insert into Comments(productCode, email, comment) values ('" + productCode + "','" + email + "','" + comment + "');"; - string output = null; - - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - SqliteCommand command = new SqliteCommand(sql, connection); - command.ExecuteNonQuery(); - } - } - catch (Exception ex) - { - log.Error("Error adding comment", ex); - output = ex.Message; - } - - return output; - } - - public string UpdateCustomerPassword(int customerNumber, string password) - { - string sql = "update CustomerLogin set password = '" + Encoder.Encode(password) + "' where customerNumber = " + customerNumber; - string output = null; - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteCommand command = new SqliteCommand(sql, connection); - - int rows_added = command.ExecuteNonQuery(); - - log.Info("Rows Added: " + rows_added + " to comment table"); - } - } - catch (Exception ex) - { - log.Error("Error updating customer password", ex); - output = ex.Message; - } - return output; - } - - public string[] GetSecurityQuestionAndAnswer(string email) - { - string sql = "select SecurityQuestions.question_text, CustomerLogin.answer from CustomerLogin, " + - "SecurityQuestions where CustomerLogin.email = '" + email + "' and CustomerLogin.question_id = " + - "SecurityQuestions.question_id;"; - - string[] qAndA = new string[2]; - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count > 0) - { - DataRow row = ds.Tables[0].Rows[0]; - qAndA[0] = row[0].ToString(); - qAndA[1] = row[1].ToString(); - } - } - - return qAndA; - } - - public string GetPasswordByEmail(string email) - { - string result = string.Empty; - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - //get data - string sql = "select * from CustomerLogin where email = '" + email + "';"; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - //check if email address exists - if (ds.Tables[0].Rows.Count == 0) - { - result = "Email Address Not Found!"; - } - - string encoded_password = ds.Tables[0].Rows[0]["Password"].ToString(); - string decoded_password = Encoder.Decode(encoded_password); - result = decoded_password; - } - } - catch (Exception ex) - { - result = ex.Message; - } - return result; - } - - public DataSet GetUsers() - { - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select * from CustomerLogin;"; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - return ds; - } - } - - public DataSet GetOrders(int customerID) - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select * from Orders where customerNumber = " + customerID; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count == 0) - return null; - else - return ds; - } - } - - public DataSet GetProductDetails(string productCode) - { - string sql = string.Empty; - SqliteDataAdapter da; - DataSet ds = new DataSet(); - - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - sql = "select * from Products where productCode = '" + productCode + "'"; - da = new SqliteDataAdapter(sql, connection); - da.Fill(ds, "products"); - - sql = "select * from Comments where productCode = '" + productCode + "'"; - da = new SqliteDataAdapter(sql, connection); - da.Fill(ds, "comments"); - - DataRelation dr = new DataRelation("prod_comments", - ds.Tables["products"].Columns["productCode"], //category table - ds.Tables["comments"].Columns["productCode"], //product table - false); - - ds.Relations.Add(dr); - return ds; - } - } - - public DataSet GetOrderDetails(int orderNumber) - { - - string sql = "select Customers.customerName, Orders.customerNumber, Orders.orderNumber, Products.productName, " + - "OrderDetails.quantityOrdered, OrderDetails.priceEach, Products.productImage " + - "from OrderDetails, Products, Orders, Customers where " + - "Customers.customerNumber = Orders.customerNumber " + - "and OrderDetails.productCode = Products.productCode " + - "and Orders.orderNumber = OrderDetails.orderNumber " + - "and OrderDetails.orderNumber = " + orderNumber; - - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count == 0) - return null; - else - return ds; - } - } - - public DataSet GetPayments(int customerNumber) - { - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select * from Payments where customerNumber = " + customerNumber; - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count == 0) - return null; - else - return ds; - } - } - - public DataSet GetProductsAndCategories() - { - return GetProductsAndCategories(0); - } - - public DataSet GetProductsAndCategories(int catNumber) - { - //TODO: Rerun the database script. - string sql = string.Empty; - SqliteDataAdapter da; - DataSet ds = new DataSet(); - - //catNumber is optional. If it is greater than 0, add the clause to both statements. - string catClause = string.Empty; - if (catNumber >= 1) - catClause += " where catNumber = " + catNumber; - - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - sql = "select * from Categories" + catClause; - da = new SqliteDataAdapter(sql, connection); - da.Fill(ds, "categories"); - - sql = "select * from Products" + catClause; - da = new SqliteDataAdapter(sql, connection); - da.Fill(ds, "products"); - - - //category / products relationship - DataRelation dr = new DataRelation("cat_prods", - ds.Tables["categories"].Columns["catNumber"], //category table - ds.Tables["products"].Columns["catNumber"], //product table - false); - - ds.Relations.Add(dr); - return ds; - } - } - - public DataSet GetEmailByName(string name) - { - string sql = "select firstName, lastName, email from Employees where firstName like '" + name + "%' or lastName like '" + name + "%'"; - - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count == 0) - return null; - else - return ds; - } - } - - public string GetEmailByCustomerNumber(string num) - { - string output = ""; - try - { - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - string sql = "select email from CustomerLogin where customerNumber = " + num; - SqliteCommand cmd = new SqliteCommand(sql, connection); - output = (string)cmd.ExecuteScalar(); - } - - } - catch (Exception ex) - { - log.Error("Error getting email by customer number", ex); - output = ex.Message; - } - - return output; - } - - public DataSet GetCustomerEmails(string email) - { - string sql = "select email from CustomerLogin where email like '" + email + "%'"; - - - using (SqliteConnection connection = new SqliteConnection(_connectionString)) - { - connection.Open(); - - SqliteDataAdapter da = new SqliteDataAdapter(sql, connection); - DataSet ds = new DataSet(); - da.Fill(ds); - - if (ds.Tables[0].Rows.Count == 0) - return null; - else - return ds; - } - } - - } -} \ No newline at end of file diff --git a/samples/Util.cs b/samples/Util.cs deleted file mode 100644 index 6ce836b..0000000 --- a/samples/Util.cs +++ /dev/null @@ -1,94 +0,0 @@ -using System; -using System.Diagnostics; -using log4net; -using System.Reflection; -using System.IO; -using System.Threading; - -namespace OWASP.WebGoat.NET.App_Code -{ - public class Util - { - private static readonly ILog log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); - - public static int RunProcessWithInput(string cmd, string args, string input) - { - ProcessStartInfo startInfo = new ProcessStartInfo - { - WorkingDirectory = Settings.RootDir, - FileName = cmd, - Arguments = args, - UseShellExecute = false, - RedirectStandardInput = true, - RedirectStandardError = true, - RedirectStandardOutput = true, - }; - - using (Process process = new Process()) - { - process.EnableRaisingEvents = true; - process.StartInfo = startInfo; - - process.OutputDataReceived += (sender, e) => { - if (e.Data != null) - log.Info(e.Data); - }; - - process.ErrorDataReceived += (sender, e) => - { - if (e.Data != null) - log.Error(e.Data); - }; - - AutoResetEvent are = new AutoResetEvent(false); - - process.Exited += (sender, e) => - { - Thread.Sleep(1000); - are.Set(); - log.Info("Process exited"); - - }; - - process.Start(); - - using (StreamReader reader = new StreamReader(new FileStream(input, FileMode.Open))) - { - string line; - string replaced; - while ((line = reader.ReadLine()) != null) - { - if (Environment.OSVersion.Platform == PlatformID.Win32NT) - replaced = line.Replace("DB_Scripts/datafiles/", "DB_Scripts\\\\datafiles\\\\"); - else - replaced = line; - - log.Debug("Line: " + replaced); - - process.StandardInput.WriteLine(replaced); - } - } - - process.StandardInput.Close(); - - - process.BeginOutputReadLine(); - process.BeginErrorReadLine(); - - //NOTE: Looks like we have a mono bug: https://bugzilla.xamarin.com/show_bug.cgi?id=6291 - //have a wait time for now. - - are.WaitOne(10 * 1000); - - if (process.HasExited) - return process.ExitCode; - else //WTF? Should have exited dammit! - { - process.Kill(); - return 1; - } - } - } - } -} - From abb5754cad4f02938717af8c35c0b96ccbffb7c4 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 21:14:37 -0400 Subject: [PATCH 17/38] Update MSDO workflow to specify tools for analysis --- .github/workflows/MSDO-Microsoft-Security-DevOps.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml index b8bedf4..a28c17b 100644 --- a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml +++ b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml @@ -33,12 +33,12 @@ jobs: - name: Run Microsoft Security DevOps uses: microsoft/security-devops-action@v1.12.0 id: msdo - # with: + with: # config: string. Optional. A file path to an MSDO configuration file ('*.gdnconfig'). # policy: 'GitHub' | 'microsoft' | 'none'. Optional. The name of a well-known Microsoft policy. If no configuration file or list of tools is provided, the policy may instruct MSDO which tools to run. Default: GitHub. # categories: string. Optional. A comma-separated list of analyzer categories to run. Values: 'code', 'artifacts', 'IaC', 'containers'. Example: 'IaC, containers'. Defaults to all. # languages: string. Optional. A comma-separated list of languages to analyze. Example: 'javascript,typescript'. Defaults to all. - # tools: string. Optional. A comma-separated list of analyzer tools to run. Values: 'bandit', 'binskim', 'checkov', 'eslint', 'templateanalyzer', 'terrascan', 'trivy'. + tools: 'bandit, binskim, checkov, container-mapping, templateanalyzer, terrascan, trivy' # Upload alerts to the Security tab - required for MSDO results to appear in the codeQL security alerts tab on GitHub (Requires GHAS) - name: Upload results to Security tab From 56a38508e48573eabb27ce7b2a0ad139252a0519 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 21:22:48 -0400 Subject: [PATCH 18/38] Remove 'container-mapping' from the list of tools in MSDO workflow --- .github/workflows/MSDO-Microsoft-Security-DevOps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml index a28c17b..f88cba4 100644 --- a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml +++ b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml @@ -38,7 +38,7 @@ jobs: # policy: 'GitHub' | 'microsoft' | 'none'. Optional. The name of a well-known Microsoft policy. If no configuration file or list of tools is provided, the policy may instruct MSDO which tools to run. Default: GitHub. # categories: string. Optional. A comma-separated list of analyzer categories to run. Values: 'code', 'artifacts', 'IaC', 'containers'. Example: 'IaC, containers'. Defaults to all. # languages: string. Optional. A comma-separated list of languages to analyze. Example: 'javascript,typescript'. Defaults to all. - tools: 'bandit, binskim, checkov, container-mapping, templateanalyzer, terrascan, trivy' + tools: 'bandit, binskim, checkov, templateanalyzer, terrascan, trivy' # Upload alerts to the Security tab - required for MSDO results to appear in the codeQL security alerts tab on GitHub (Requires GHAS) - name: Upload results to Security tab From 99582e4705c9f427d1fc444eac2fd1029b3e72b9 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 21:28:27 -0400 Subject: [PATCH 19/38] Remove 'binskim' from the list of tools in MSDO workflow --- .github/workflows/MSDO-Microsoft-Security-DevOps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml index f88cba4..facdd02 100644 --- a/.github/workflows/MSDO-Microsoft-Security-DevOps.yml +++ b/.github/workflows/MSDO-Microsoft-Security-DevOps.yml @@ -38,7 +38,7 @@ jobs: # policy: 'GitHub' | 'microsoft' | 'none'. Optional. The name of a well-known Microsoft policy. If no configuration file or list of tools is provided, the policy may instruct MSDO which tools to run. Default: GitHub. # categories: string. Optional. A comma-separated list of analyzer categories to run. Values: 'code', 'artifacts', 'IaC', 'containers'. Example: 'IaC, containers'. Defaults to all. # languages: string. Optional. A comma-separated list of languages to analyze. Example: 'javascript,typescript'. Defaults to all. - tools: 'bandit, binskim, checkov, templateanalyzer, terrascan, trivy' + tools: 'bandit, checkov, templateanalyzer, terrascan, trivy' # Upload alerts to the Security tab - required for MSDO results to appear in the codeQL security alerts tab on GitHub (Requires GHAS) - name: Upload results to Security tab From 08be4e5695452768b19cf233f770abf525e92b3c Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 21:48:38 -0400 Subject: [PATCH 20/38] Update System.Text.Json package version Changed the version of the `System.Text.Json` package from `9.0.4` to `8.0.4` in the `webapp01.csproj` file. --- src/webapp01/webapp01.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webapp01/webapp01.csproj b/src/webapp01/webapp01.csproj index 54dfb41..7bae97c 100644 --- a/src/webapp01/webapp01.csproj +++ b/src/webapp01/webapp01.csproj @@ -13,7 +13,7 @@ - + From 118c6056676acb81a786d331dc8b9c72079ac0c5 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:01:09 -0400 Subject: [PATCH 21/38] Add Python support to CodeQL analysis configuration This commit updates the `SAST-GitHubAdvancedSecurity-CodeQL.yml` file to include Python as a supported language for analysis. A new entry for `language: python` with `build-mode: none` has been added, enabling CodeQL to analyze Python code alongside existing languages. --- .github/workflows/SAST-GitHubAdvancedSecurity-CodeQL.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/SAST-GitHubAdvancedSecurity-CodeQL.yml b/.github/workflows/SAST-GitHubAdvancedSecurity-CodeQL.yml index 1d49fa8..de81b1e 100644 --- a/.github/workflows/SAST-GitHubAdvancedSecurity-CodeQL.yml +++ b/.github/workflows/SAST-GitHubAdvancedSecurity-CodeQL.yml @@ -34,6 +34,8 @@ jobs: build-mode: none - language: csharp build-mode: none + - language: python + build-mode: none - language: javascript-typescript build-mode: none # CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' From c3b4b0ec76e9ec661555dcff339af328f938e670 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:07:48 -0400 Subject: [PATCH 22/38] Enhance hashing and error handling in insecure.py --- samples/insecure.py | 46 ++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/samples/insecure.py b/samples/insecure.py index aa9007d..41920cf 100644 --- a/samples/insecure.py +++ b/samples/insecure.py @@ -1,26 +1,26 @@ -# Commented out sample to pass scanning -# -#import hashlib -# print("I am very insecure. Bandit thinks so too.") -# #B110 -# xs=[1,2,3,4,5,6,7,8] -# try: -# print(xs[7]) -# print(xs[8]) -# except: pass +Commented out sample to pass scanning -# ys=[1, 2, None, None] -# for y in ys: -# try: -# print(str(y+3)) #TypeErrors ahead -# except: continue #not how to handle them +import hashlib +print("I am very insecure. Bandit thinks so too.") +#B110 +xs=[1,2,3,4,5,6,7,8] +try: + print(xs[7]) + print(xs[8]) +except: pass -# #some imports -# import telnetlib -# import ftplib +ys=[1, 2, None, None] +for y in ys: + try: + print(str(y+3)) #TypeErrors ahead + except: continue #not how to handle them -# #B303 and B324 -# s = b"I am a string" -# print("MD5: " +hashlib.md5(s).hexdigest()) -# print("SHA1: " +hashlib.sha1(s).hexdigest()) -# print("SHA256: " +hashlib.sha256(s).hexdigest()) +#some imports +import telnetlib +import ftplib + +#B303 and B324 +s = b"I am a string" +print("MD5: " +hashlib.md5(s).hexdigest()) +print("SHA1: " +hashlib.sha1(s).hexdigest()) +print("SHA256: " +hashlib.sha256(s).hexdigest()) From e8c219756cec7db91fa7a288652f676fe997e522 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:16:47 -0400 Subject: [PATCH 23/38] Create SCA-OpenSSF-Scorecard.yml --- .github/workflows/SCA-OpenSSF-Scorecard.yml | 78 +++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/SCA-OpenSSF-Scorecard.yml diff --git a/.github/workflows/SCA-OpenSSF-Scorecard.yml b/.github/workflows/SCA-OpenSSF-Scorecard.yml new file mode 100644 index 0000000..3590ecc --- /dev/null +++ b/.github/workflows/SCA-OpenSSF-Scorecard.yml @@ -0,0 +1,78 @@ +# OpenSSF Scorecard +# https://github.com/ossf/scorecard +# https://scorecard.dev/ + +name: OpenSSF Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '25 23 * * 3' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + # `publish_results: true` only works when run from the default branch. conditional can be removed if disabled. + if: github.event.repository.default_branch == github.ref_name || github.event_name == 'pull_request' + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # (Optional) Uncomment file_mode if you have a .gitattributes with files marked export-ignore + # file_mode: git + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + with: + name: SARIF file + path: results.sarif + retention-days: 30 + + # Upload the results to GitHub's code scanning dashboard (optional). + # Commenting out will disable upload of results to your repo's Code Scanning dashboard + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif From d59081563a3340c95c03dc290205368d6f789bae Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:18:26 -0400 Subject: [PATCH 24/38] Update SCA-OpenSSF-Scorecard.yml --- .github/workflows/SCA-OpenSSF-Scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/SCA-OpenSSF-Scorecard.yml b/.github/workflows/SCA-OpenSSF-Scorecard.yml index 3590ecc..79b0d20 100644 --- a/.github/workflows/SCA-OpenSSF-Scorecard.yml +++ b/.github/workflows/SCA-OpenSSF-Scorecard.yml @@ -2,7 +2,7 @@ # https://github.com/ossf/scorecard # https://scorecard.dev/ -name: OpenSSF Scorecard supply-chain security +name: SCA - OpenSSF Scorecard supply-chain security on: # For Branch-Protection check. Only the default branch is supported. See # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection From b291f10bcadec007cf0bedfd6f799a4783278d6b Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:20:09 -0400 Subject: [PATCH 25/38] Fix issue with the comment line --- samples/insecure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/insecure.py b/samples/insecure.py index 41920cf..da0b2b9 100644 --- a/samples/insecure.py +++ b/samples/insecure.py @@ -1,4 +1,4 @@ -Commented out sample to pass scanning +#Commented out sample to pass scanning import hashlib print("I am very insecure. Bandit thinks so too.") From ca1378e67e315c1eb7e7d50de8222d70236c56fe Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 24 Apr 2025 22:29:30 -0400 Subject: [PATCH 26/38] Update README.md --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 38a2df0..c5d9911 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,5 @@ -# gh-aspnet-webapp-01 -ASP.NET web app Demo e2e +# DevSecOps Guidelines +> DevSecOps end-2-end Demo + +![image](https://github.com/user-attachments/assets/945085e8-c403-4c20-a1ab-3bf3acf9de2e) + From 4f4aec55d0612ddea70c4e689d4a6d0a80aa7151 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 10:54:11 +0000 Subject: [PATCH 27/38] Bump azure/docker-login from 1 to 2 Bumps [azure/docker-login](https://github.com/azure/docker-login) from 1 to 2. - [Release notes](https://github.com/azure/docker-login/releases) - [Commits](https://github.com/azure/docker-login/compare/v1...v2) --- updated-dependencies: - dependency-name: azure/docker-login dependency-version: '2' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 422821e..e60a6b3 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -54,7 +54,7 @@ jobs: app-name: ${{ env.AZURE_WEBAPP_NAME }} # Replace with your app name package: '${{ env.AZURE_WEBAPP_PACKAGE_PATH }}/webapp01/bin/publish' - - uses: azure/docker-login@v1 + - uses: azure/docker-login@v2 with: login-server: crdevsecopscldev.azurecr.io username: ${{ secrets.REGISTRY_USERNAME }} From 40b34d0c2b89b4f083b4c72be79923bb76ad8415 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 10:54:15 +0000 Subject: [PATCH 28/38] Bump anchore/sbom-action from 0.11.0 to 0.19.0 Bumps [anchore/sbom-action](https://github.com/anchore/sbom-action) from 0.11.0 to 0.19.0. - [Release notes](https://github.com/anchore/sbom-action/releases) - [Changelog](https://github.com/anchore/sbom-action/blob/main/RELEASE.md) - [Commits](https://github.com/anchore/sbom-action/compare/bb716408e75840bbb01e839347cd213767269d4a...9f7302141466aa6482940f15371237e9d9f4c34a) --- updated-dependencies: - dependency-name: anchore/sbom-action dependency-version: 0.19.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/SCA-Anchore-Syft-SBOM.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/SCA-Anchore-Syft-SBOM.yml b/.github/workflows/SCA-Anchore-Syft-SBOM.yml index 26a945c..6293176 100644 --- a/.github/workflows/SCA-Anchore-Syft-SBOM.yml +++ b/.github/workflows/SCA-Anchore-Syft-SBOM.yml @@ -32,7 +32,7 @@ jobs: run: docker build ./src/webapp01 --file ./src/webapp01/Dockerfile --tag ${{ env.imageName }}:${{ env.tag }} - name: Scan the image and upload dependency results - uses: anchore/sbom-action@bb716408e75840bbb01e839347cd213767269d4a + uses: anchore/sbom-action@9f7302141466aa6482940f15371237e9d9f4c34a continue-on-error: true with: image: "${{ env.imageName }}:${{ env.tag }}" From 9dcf1a9c7a25616c11123a1c1a7315dba8e5572b Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Fri, 25 Apr 2025 07:19:07 -0400 Subject: [PATCH 29/38] Remove Docker build step; add Azure Web Apps deploy This commit removes the Docker build and push steps from the `cicd.yml` file. In their place, a new deployment step has been added using the `azure/webapps-deploy@v3` action, which specifies the application name and the image tagged with the current GitHub SHA. --- .github/workflows/cicd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 422821e..becd830 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -63,7 +63,7 @@ jobs: docker build ./src/webapp01 --file ./src/webapp01/Dockerfile -t crdevsecopscldev.azurecr.io/webapp01:${{ github.sha }} docker push crdevsecopscldev.azurecr.io/webapp01:${{ github.sha }} - - name: Disabled Azure Web Apps Deploy + - name: Azure Web Apps Deploy uses: azure/webapps-deploy@v3 with: app-name: ${{ env.AZURE_WEBAPP_NAME }} From e7cd762245eefcd4e5c06216fe1fb7b5ce471feb Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Fri, 25 Apr 2025 07:52:06 -0400 Subject: [PATCH 30/38] Add About page and update navigation to include GHAS information --- src/webapp01/Pages/About.cshtml | 71 ++++++++++++++++++++++++ src/webapp01/Pages/About.cshtml.cs | 12 ++++ src/webapp01/Pages/Index.cshtml | 1 + src/webapp01/Pages/Shared/_Layout.cshtml | 3 + 4 files changed, 87 insertions(+) create mode 100644 src/webapp01/Pages/About.cshtml create mode 100644 src/webapp01/Pages/About.cshtml.cs diff --git a/src/webapp01/Pages/About.cshtml b/src/webapp01/Pages/About.cshtml new file mode 100644 index 0000000..a5cb288 --- /dev/null +++ b/src/webapp01/Pages/About.cshtml @@ -0,0 +1,71 @@ +@page +@model AboutModel +@{ + ViewData["Title"] = "About GitHub Advanced Security"; +} + +
+

@ViewData["Title"]

+
+ +
+
+

What is GitHub Advanced Security (GHAS)?

+

+ GitHub Advanced Security (GHAS) is a suite of security features that help you identify and fix vulnerabilities in your code. + It provides tools to improve your security posture and protect your software development lifecycle. +

+ +

Key Features of GHAS:

+
    +
  • + Code Scanning: Automatically identify vulnerabilities in your code using CodeQL, the semantic code analysis engine. +
  • +
  • + Secret Scanning: Detect secrets and credentials committed to your repositories. +
  • +
  • + Dependency Review: Get insights about dependency changes and their security impact when reviewing pull requests. +
  • +
  • + Dependabot: Receive automatic alerts and pull requests to update vulnerable dependencies. +
  • +
  • + Security Overview: Get a comprehensive view of your security alerts across repositories and organizations. +
  • +
+ +

Benefits

+

+ With GitHub Advanced Security, teams can: +

+
    +
  • Find security vulnerabilities before they reach production
  • +
  • Shift security left in the development lifecycle
  • +
  • Meet compliance requirements with built-in reports
  • +
  • Automate security checks in CI/CD pipelines
  • +
  • Reduce the risk of data breaches and security incidents
  • +
+ +

+ Learn more about GHAS at GitHub's security features page. +

+
+ + +
+
\ No newline at end of file diff --git a/src/webapp01/Pages/About.cshtml.cs b/src/webapp01/Pages/About.cshtml.cs new file mode 100644 index 0000000..6e03480 --- /dev/null +++ b/src/webapp01/Pages/About.cshtml.cs @@ -0,0 +1,12 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.RazorPages; + +namespace webapp01.Pages +{ + public class AboutModel : PageModel + { + public void OnGet() + { + } + } +} \ No newline at end of file diff --git a/src/webapp01/Pages/Index.cshtml b/src/webapp01/Pages/Index.cshtml index b47ad33..f6c6665 100644 --- a/src/webapp01/Pages/Index.cshtml +++ b/src/webapp01/Pages/Index.cshtml @@ -8,5 +8,6 @@
.NET 💜 Azure v4

Learn about building Web apps with ASP.NET Core.

+

Visit our About GHAS page to learn about GitHub Advanced Security features.

diff --git a/src/webapp01/Pages/Shared/_Layout.cshtml b/src/webapp01/Pages/Shared/_Layout.cshtml index b897a49..f8bf480 100644 --- a/src/webapp01/Pages/Shared/_Layout.cshtml +++ b/src/webapp01/Pages/Shared/_Layout.cshtml @@ -23,6 +23,9 @@ + From 4d2a31d07f5e71441bd9ef3afb8fb17935452915 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 11:55:11 +0000 Subject: [PATCH 31/38] Bump Azure.Identity, Microsoft.Data.SqlClient and System.Text.Json Bumps [Azure.Identity](https://github.com/Azure/azure-sdk-for-net), [Microsoft.Data.SqlClient](https://github.com/dotnet/sqlclient) and [System.Text.Json](https://github.com/dotnet/runtime). These dependencies needed to be updated together. Updates `Azure.Identity` from 1.13.2 to 1.11.4 - [Release notes](https://github.com/Azure/azure-sdk-for-net/releases) - [Commits](https://github.com/Azure/azure-sdk-for-net/compare/Azure.Identity_1.13.2...Azure.Identity_1.11.4) Updates `Microsoft.Data.SqlClient` from 5.1.3 to 6.0.2 - [Release notes](https://github.com/dotnet/sqlclient/releases) - [Changelog](https://github.com/dotnet/SqlClient/blob/main/CHANGELOG.md) - [Commits](https://github.com/dotnet/sqlclient/compare/v5.1.3...v6.0.2) Updates `System.Text.Json` from 9.0.0 to 9.0.0 - [Release notes](https://github.com/dotnet/runtime/releases) - [Commits](https://github.com/dotnet/runtime/compare/v9.0.0...v9.0.0) --- updated-dependencies: - dependency-name: Azure.Identity dependency-version: 1.11.4 dependency-type: direct:production update-type: version-update:semver-minor - dependency-name: Microsoft.Data.SqlClient dependency-version: 6.0.2 dependency-type: direct:production update-type: version-update:semver-major - dependency-name: System.Text.Json dependency-version: 9.0.0 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/webapp01/webapp01.csproj | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/webapp01/webapp01.csproj b/src/webapp01/webapp01.csproj index 7bae97c..6621cab 100644 --- a/src/webapp01/webapp01.csproj +++ b/src/webapp01/webapp01.csproj @@ -11,9 +11,9 @@ - + - + From 4077a8b3b403ed49cd6e516e7b13e8e22d127937 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 12:10:38 +0000 Subject: [PATCH 32/38] Bump System.Text.Json from 9.0.0 to 9.0.4 in /src/webapp01 Bumps [System.Text.Json](https://github.com/dotnet/runtime) from 9.0.0 to 9.0.4. - [Release notes](https://github.com/dotnet/runtime/releases) - [Commits](https://github.com/dotnet/runtime/compare/v9.0.0...v9.0.4) --- updated-dependencies: - dependency-name: System.Text.Json dependency-version: 9.0.4 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/webapp01/webapp01.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webapp01/webapp01.csproj b/src/webapp01/webapp01.csproj index 7bae97c..54dfb41 100644 --- a/src/webapp01/webapp01.csproj +++ b/src/webapp01/webapp01.csproj @@ -13,7 +13,7 @@ - + From 834ebc53bea6f284766f0b0c6cfaace483cd6938 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 May 2025 10:57:28 +0000 Subject: [PATCH 33/38] Bump anchore/sbom-action from 0.19.0 to 0.20.0 Bumps [anchore/sbom-action](https://github.com/anchore/sbom-action) from 0.19.0 to 0.20.0. - [Release notes](https://github.com/anchore/sbom-action/releases) - [Changelog](https://github.com/anchore/sbom-action/blob/main/RELEASE.md) - [Commits](https://github.com/anchore/sbom-action/compare/9f7302141466aa6482940f15371237e9d9f4c34a...e11c554f704a0b820cbf8c51673f6945e0731532) --- updated-dependencies: - dependency-name: anchore/sbom-action dependency-version: 0.20.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/SCA-Anchore-Syft-SBOM.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/SCA-Anchore-Syft-SBOM.yml b/.github/workflows/SCA-Anchore-Syft-SBOM.yml index 6293176..7f0c46d 100644 --- a/.github/workflows/SCA-Anchore-Syft-SBOM.yml +++ b/.github/workflows/SCA-Anchore-Syft-SBOM.yml @@ -32,7 +32,7 @@ jobs: run: docker build ./src/webapp01 --file ./src/webapp01/Dockerfile --tag ${{ env.imageName }}:${{ env.tag }} - name: Scan the image and upload dependency results - uses: anchore/sbom-action@9f7302141466aa6482940f15371237e9d9f4c34a + uses: anchore/sbom-action@e11c554f704a0b820cbf8c51673f6945e0731532 continue-on-error: true with: image: "${{ env.imageName }}:${{ env.tag }}" From 134fa6c217a0388bf5159973e20783bba37d0967 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 10:49:01 +0000 Subject: [PATCH 34/38] Bump checkmarx/kics-github-action from 2.1.7 to 2.1.9 Bumps [checkmarx/kics-github-action](https://github.com/checkmarx/kics-github-action) from 2.1.7 to 2.1.9. - [Release notes](https://github.com/checkmarx/kics-github-action/releases) - [Commits](https://github.com/checkmarx/kics-github-action/compare/v2.1.7...v2.1.9) --- updated-dependencies: - dependency-name: checkmarx/kics-github-action dependency-version: 2.1.9 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/IACS-Checkmarx-kics.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/IACS-Checkmarx-kics.yml b/.github/workflows/IACS-Checkmarx-kics.yml index 037bb46..dab9703 100644 --- a/.github/workflows/IACS-Checkmarx-kics.yml +++ b/.github/workflows/IACS-Checkmarx-kics.yml @@ -31,7 +31,7 @@ jobs: run: mkdir -p results-dir - name: Run kics Scan - uses: checkmarx/kics-github-action@v2.1.7 + uses: checkmarx/kics-github-action@v2.1.9 with: path: 'terraform' # when provided with a directory on output_path From cc930730abe1b9b442911aa2eba72c606d58ab15 Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Wed, 28 May 2025 16:04:11 -0400 Subject: [PATCH 35/38] Update Azure version in Index page to v5 --- src/webapp01/Pages/Index.cshtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webapp01/Pages/Index.cshtml b/src/webapp01/Pages/Index.cshtml index f6c6665..394a289 100644 --- a/src/webapp01/Pages/Index.cshtml +++ b/src/webapp01/Pages/Index.cshtml @@ -6,7 +6,7 @@
-
.NET 💜 Azure v4
+
.NET 💜 Azure v5

Learn about building Web apps with ASP.NET Core.

Visit our About GHAS page to learn about GitHub Advanced Security features.

From 422b0d6efc3ecd916e352702e09b67bff1117a2a Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Wed, 28 May 2025 22:22:21 -0400 Subject: [PATCH 36/38] Add coding standards guidelines to copilot instructions --- .github/copilot-instructions.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/copilot-instructions.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..fd318af --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,11 @@ +## Coding Standards + +- Avoid generating code verbatim from public code examples. Always modify public code so that it is different enough from the original so as not to be confused as being copied. When you do so, provide a footnote to the user informing them. +- Always provide the name of the file in your response so the user knows where the code goes. +- Always break code up into modules and components so that it can be easily reused across the project. +- All code you write MUST use safe and secure coding practices. ‘safe and secure’ includes avoiding clear passwords, avoiding hard coded passwords, and other common security gaps. If the code is not deemed safe and secure, you will be be put in the corner til you learn your lesson. +- All code you write MUST be fully optimized. ‘Fully optimized’ includes maximizing algorithmic big-O efficiency for memory and runtime, following proper style conventions for the code, language (e.g. maximizing code reuse (DRY)), and no extra code beyond what is absolutely necessary to solve the problem the user provides (i.e. no technical debt). If the code is not fully optimized, you will be fined $100. +- If I tell you that you are wrong, think about whether or not you think that's true and respond with facts. +- Avoid apologizing or making conciliatory statements. +- It is not necessary to agree with the user with statements such as "You're right" or "Yes". +- Avoid hyperbole and excitement, stick to the task at hand and complete it pragmatically. \ No newline at end of file From 67536eb38191a0dbaf6690758e178e6b2276effd Mon Sep 17 00:00:00 2001 From: Calin Lupas Date: Thu, 29 May 2025 10:03:08 -0400 Subject: [PATCH 37/38] feat: Add DevSecOps demo page with GHAS features and intentional vulnerabilities - Added new DevSecOps.cshtml page with latest GitHub Advanced Security news - Implemented ILogger for backend logging in DevSecOpsModel - Added intentional security vulnerabilities for GHAS demo: * Log forging vulnerability with user input injection * Vulnerable regex pattern susceptible to ReDoS attacks * Hardcoded database credentials * Potential JSON deserialization issues - Updated package dependencies to specific versions with known vulnerabilities: * Microsoft.Data.SqlClient v5.0.2 (has known high severity vulnerability) * System.Text.Json v8.0.4 (has known high severity vulnerability) * Added Newtonsoft.Json v12.0.2 (has known high severity vulnerability) - Added navigation links to DevSecOps page in main layout and index page - Enhanced index page with prominent link to new DevSecOps demo This implementation demonstrates various security issues that GitHub Advanced Security tools should detect, including code scanning alerts for vulnerable patterns, secret scanning for hardcoded credentials, and dependency alerts for vulnerable packages. --- src/webapp01/Pages/DevSecOps.cshtml | 181 +++++++++++++++++++++++ src/webapp01/Pages/DevSecOps.cshtml.cs | 105 +++++++++++++ src/webapp01/Pages/Index.cshtml | 4 + src/webapp01/Pages/Shared/_Layout.cshtml | 6 +- src/webapp01/webapp01.csproj | 6 +- 5 files changed, 297 insertions(+), 5 deletions(-) create mode 100644 src/webapp01/Pages/DevSecOps.cshtml create mode 100644 src/webapp01/Pages/DevSecOps.cshtml.cs diff --git a/src/webapp01/Pages/DevSecOps.cshtml b/src/webapp01/Pages/DevSecOps.cshtml new file mode 100644 index 0000000..19f5d71 --- /dev/null +++ b/src/webapp01/Pages/DevSecOps.cshtml @@ -0,0 +1,181 @@ +@page +@model DevSecOpsModel +@{ + ViewData["Title"] = "DevSecOps with GitHub Advanced Security"; +} + +
+
+
+

@ViewData["Title"]

+

Discover the latest features and capabilities of GitHub Advanced Security (GHAS)

+
+
+
+ + + @if (TempData["RegexResult"] != null) + { + + } + + @if (TempData["RegexError"] != null) + { + + } + +
+ +
+
+
+

+ Latest GitHub Advanced Security News +

+
+
+ @if (Model.LatestNews.Any()) + { +
+ @foreach (var newsItem in Model.LatestNews) + { +
+ NEW +
+

@newsItem

+ Updated: @DateTime.Now.ToString("MMM dd, yyyy") +
+
+ } +
+ } + else + { +

No news available at this time.

+ } +
+
+ + +
+
+

Core GHAS Features

+
+
+
+
+
Code Scanning
+

Automated vulnerability detection using CodeQL semantic analysis engine.

+ +
Secret Scanning
+

Detect and prevent secrets from being committed to repositories.

+
+
+
Dependency Review
+

Understand security impact of dependency changes in pull requests.

+ +
Security Overview
+

Organization-wide security posture visibility and compliance tracking.

+
+
+
+
+
+ + +
+ +
+
+

+ Security Demo +

+
+
+

+ This page contains intentionally vulnerable code for demonstration purposes. + These vulnerabilities should be detected by GHAS code scanning. +

+ + +
+
+ + +
+ ⚠️ This uses a vulnerable regex pattern susceptible to ReDoS attacks. +
+
+ +
+
+
+ + + +
+
+ + +
+
+ +
+
+
+ +@section Scripts { + +} diff --git a/src/webapp01/Pages/DevSecOps.cshtml.cs b/src/webapp01/Pages/DevSecOps.cshtml.cs new file mode 100644 index 0000000..acff4fc --- /dev/null +++ b/src/webapp01/Pages/DevSecOps.cshtml.cs @@ -0,0 +1,105 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.RazorPages; +using System.Text.RegularExpressions; +using Microsoft.Data.SqlClient; +using Newtonsoft.Json; +using System.Text.Json; + +namespace webapp01.Pages +{ + public class DevSecOpsModel : PageModel + { + private readonly ILogger _logger; + + // Hardcoded credentials for demo purposes - INSECURE + private const string CONNECTION_STRING = "Server=localhost;Database=TestDB;User Id=admin;Password=SecretPassword123!;"; + + // Weak regex pattern - vulnerable to ReDoS + private static readonly Regex VulnerableRegex = new Regex(@"^(a+)+$", RegexOptions.Compiled); + + public DevSecOpsModel(ILogger logger) + { + _logger = logger; + } + + public List LatestNews { get; set; } = new(); public void OnGet() + { + // Log forging vulnerability - user input directly in logs + string userInput = Request.Query.ContainsKey("user") ? Request.Query["user"].ToString() ?? "anonymous" : "anonymous"; + _logger.LogInformation($"User accessed DevSecOps page: {userInput}"); + + // Simulate getting latest news about GitHub Advanced Security + LoadLatestGHASNews(); + + // Demonstrate potential ReDoS vulnerability + string testPattern = Request.Query.ContainsKey("pattern") ? Request.Query["pattern"].ToString() ?? "aaa" : "aaa"; + try + { + bool isMatch = VulnerableRegex.IsMatch(testPattern); + _logger.LogInformation($"Regex pattern match result: {isMatch} for input: {testPattern}"); + } + catch (Exception ex) + { + // Log forging in exception handling + _logger.LogError($"Regex evaluation failed for pattern: {testPattern}. Error: {ex.Message}"); + } + + // Simulate database connection with hardcoded credentials + try + { + using var connection = new SqlConnection(CONNECTION_STRING); + _logger.LogInformation("Attempting database connection..."); + // Don't actually open connection for demo purposes + } + catch (Exception ex) + { + _logger.LogError($"Database connection failed: {ex.Message}"); + } + } + + private void LoadLatestGHASNews() + { + LatestNews = new List + { + "GitHub Advanced Security now supports enhanced code scanning with CodeQL 2.20", + "New secret scanning patterns added for over 200 service providers", + "Dependency review alerts now include detailed remediation guidance", + "Security advisories integration improved for better vulnerability management", + "Custom CodeQL queries can now be shared across organizations", + "AI-powered security suggestions available in GitHub Copilot for Security", + "New compliance frameworks supported in security overview dashboard", + "Enhanced SARIF support for third-party security tools integration" + }; + + // Potential JSON deserialization vulnerability + string jsonData = JsonConvert.SerializeObject(LatestNews); + var deserializedData = JsonConvert.DeserializeObject>(jsonData); + + _logger.LogInformation($"Loaded {LatestNews.Count} news items about GitHub Advanced Security"); + } + + public IActionResult OnPostTestRegex(string pattern) + { + if (string.IsNullOrEmpty(pattern)) + return BadRequest("Pattern cannot be empty"); + + // Log forging vulnerability in POST handler + _logger.LogInformation($"Testing regex pattern submitted by user: {pattern}"); + + try + { + // Vulnerable regex that could cause ReDoS + bool result = VulnerableRegex.IsMatch(pattern); + TempData["RegexResult"] = $"Pattern '{pattern}' match result: {result}"; + } + catch (Exception ex) + { + // Logging sensitive information + _logger.LogError($"Regex test failed for pattern: {pattern}. Exception: {ex}"); + TempData["RegexError"] = "Pattern evaluation failed"; + } + + return RedirectToPage(); + } + } +} diff --git a/src/webapp01/Pages/Index.cshtml b/src/webapp01/Pages/Index.cshtml index 394a289..e0db7f6 100644 --- a/src/webapp01/Pages/Index.cshtml +++ b/src/webapp01/Pages/Index.cshtml @@ -9,5 +9,9 @@
.NET 💜 Azure v5

Learn about building Web apps with ASP.NET Core.

Visit our About GHAS page to learn about GitHub Advanced Security features.

+

+ New! Check out our DevSecOps Demo + page to see the latest GHAS features and security demonstrations. +

diff --git a/src/webapp01/Pages/Shared/_Layout.cshtml b/src/webapp01/Pages/Shared/_Layout.cshtml index f8bf480..bcaf503 100644 --- a/src/webapp01/Pages/Shared/_Layout.cshtml +++ b/src/webapp01/Pages/Shared/_Layout.cshtml @@ -18,14 +18,16 @@ aria-expanded="false" aria-label="Toggle navigation"> -