Add apache spark exploit

This commit is contained in:
JD
2023-02-11 17:46:38 +00:00
parent 52658c518d
commit 1f4950ed7a
17 changed files with 202 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
# Whats needed?
- A scala 2.12.10.deb file
- Apache spark 3.12 files
- To Exploit the metasploit module apache_spark_rce_cve_2022_33891

View File

@@ -0,0 +1,11 @@
# apache spark rce init
# https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz
# https://spark.apache.org/docs/3.1.2/
# https://packetstormsecurity.com/files/168309/Apache-Spark-Unauthenticated-Command-Injection.html
# https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/linux/http/apache_spark_rce_cve_2022_33891.rb
contain apache_spark_rce::install
contain apache_spark_rce::configure
contain apache_spark_rce::service
Class['apache_spark_rce::install']
-> Class['apache_spark_rce::configure']
-> Class['apache_spark_rce::service']

View File

@@ -0,0 +1 @@
spark.acls.enable true

View File

@@ -0,0 +1,15 @@
[Unit]
Description=Apache Spark Shell
After=network.target
[Service]
Type=forking
User=root
Environment="PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/spark/sbin"
WorkingDirectory=/usr/local/spark/bin/
ExecStart=/usr/local/spark/sbin/start-master.sh
Restart=on-abort
RestartSec=1
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,12 @@
function apache_spark_rce::cpandbuild(Array $collection, String $filename) >> Undef {
$collection.each |String $item| {
file { "/tmp/${item}":
ensure => file,
source => "puppet:///modules/apache_spark_rce/${item}",
}
}
exec { "rebuild-${filename}":
cwd => '/tmp/',
command => "cat ${filename}.parta* >${filename}",
}
}

View File

@@ -0,0 +1,25 @@
# Class: apache_spark_rce::configure
# Configure apache spark and secgen
#
class apache_spark_rce::configure {
$sparkconf='spark-defaults.conf'
$leaked_filenames = ['flagtest'] ##$secgen_parameters['leaked_filenames']
$strings_to_leak = ['this is a list of strings that are secrets / flags','another secret'] ##$secgen_parameters['strings_to_leak']
Exec { path => [ '/bin/', '/sbin/' , '/usr/bin/', '/usr/sbin/' ] }
# We set the acls flag in the config - This ensures its vulnerable
file { "/usr/local/spark/conf/${sparkconf}":
ensure => file,
source => "puppet:///modules/apache_spark_rce/${sparkconf}"
}
::secgen_functions::leak_files { 'spark-flag-leak':
storage_directory => '/usr/local/spark/bin/',
leaked_filenames => $leaked_filenames,
strings_to_leak => $strings_to_leak,
owner => 'root',
mode => '0750',
leaked_from => 'apache_spark_rce',
}
}

View File

@@ -0,0 +1,49 @@
# Class: apache_spark_rce::install
# install process
# https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz
# https://www.scala-lang.org/download/2.12.10.html
class apache_spark_rce::install {
Exec { path => [ '/bin/', '/sbin/' , '/usr/bin/', '/usr/sbin/' ] }
$modulename = 'apache_spark_rce'
# Install required packages
# NOTE: once Debian updates insert scala 2.12+ into statement
ensure_packages(['openjdk-11-jdk'], { ensure => 'installed'})
$scaladeb = 'scala-2.12.10.deb'
$releasename = 'spark-3.1.2-bin-hadoop3.2.tgz'
$shortrelease = 'spark-3.1.2-bin-hadoop3.2'
$scalapart = ["${scaladeb}.partaa",
"${scaladeb}.partab",
"${scaladeb}.partac"]
$sparkpart = ["${releasename}.partaa",
"${releasename}.partab",
"${releasename}.partac",
"${releasename}.partad",
"${releasename}.partae"]
$pkgtobuild = [[$scalapart, $scaladeb], [$sparkpart, $releasename]]
$pkgtobuild.each |Array $pkg| {
apache_spark_rce::cpandbuild($pkg[0], $pkg[1])
}
# We run older versions of debian, for now source from local deb file
package { 'scala':
ensure => latest,
provider => apt,
source => "/tmp/${scaladeb}",
}
exec { 'unpack-spark':
cwd => '/tmp',
command => "tar -xf ${releasename}",
creates => '/tmp/spark'
}
-> exec { 'move-spark':
cwd => '/tmp',
command => "mv /tmp/${shortrelease} /usr/local/spark/",
creates => '/usr/local/spark',
}
}

View File

@@ -0,0 +1,15 @@
# Class: apache_spark_rce::service
# Service to start spark-shell
#
class apache_spark_rce::service {
file { '/etc/systemd/system/spark.service':
source => 'puppet:///modules/apache_spark_rce/spark.service',
owner => 'root',
mode => '0777',
}
service { 'spark':
ensure => running,
enable => true,
}
}

View File

@@ -0,0 +1,69 @@
<?xml version="1.0"?>
<vulnerability xmlns="http://www.github/cliffe/SecGen/vulnerability"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.github/cliffe/SecGen/vulnerability">
<name>Apache Spark Unauthenticated Command Injection</name>
<author>James Davis</author>
<module_license>MIT</module_license>
<description>The Apache Spark UI offers the possibility to enable ACLs via the configuration
option spark.acls.enable.
With an authentication filter, this checks whether a user has access permissions to view or
modify the application.
If ACLs are enabled, a code path in HttpSecurityFilter can allow someone to perform
impersonation by providing an arbitrary user name.
A malicious user might then be able to reach a permission check function that will ultimately
build a Unix shell command based on their input, and execute it.
This will result in arbitrary shell command execution as the user Spark is currently running as.
This affects Apache Spark versions 3.0.3 and earlier, versions 3.1.1 to 3.1.2, and versions
3.2.0 to 3.2.1.</description>
<type>http</type>
<privilege>root_rwx</privilege>
<access>remote</access>
<platform>linux</platform>
<difficulty>low</difficulty>
<read_fact>port</read_fact>
<read_fact>strings_to_leak</read_fact>
<read_fact>leaked_filenames</read_fact>
<default_input into="port">
<value>8080</value>
</default_input>
<!-- flags or other secrets exposed after exploitation -->
<default_input into="strings_to_leak">
<generator type="message_generator" />
</default_input>
<default_input into="leaked_filenames">
<generator type="filename_generator" />
</default_input>
<!--optional vulnerability details-->
<cve>CVE 2022-33891</cve>
<cvss_base_score>8.8</cvss_base_score>
<cvss_vector>CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H</cvss_vector>
<software_name>Apache Spark</software_name>
<software_license>Apache License 2.0</software_license>
<reference>
https://packetstormsecurity.com/files/168309/Apache-Spark-Unauthenticated-Command-Injection.html</reference>
<reference>
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/linux/http/apache_spark_rce_cve_2022_33891.rb</reference>
<reference>https://spark.apache.org/docs/3.1.2/</reference>
<reference>https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz</reference>
<!--optional hints-->
<hint>Scan the servers for other machines </hint>
<requires>
<module_path>.*java 11 (OpenJDK)*</module_path>
</requires>
<requires>
<module_path>.*scala 2.12*</module_path>
</requires>
</vulnerability>