MokiBox 2 лет назад
Сommit
7eba4d5eac
7 измененных файлов с 213 добавлено и 0 удалено
  1. 42 0
      Dockerfile
  2. 8 0
      Main.py
  3. 120 0
      crawl.py
  4. 1 0
      data.json
  5. 35 0
      reply.py
  6. BIN
      requirements.txt
  7. 7 0
      start.sh

+ 42 - 0
Dockerfile

@@ -0,0 +1,42 @@
+FROM ubuntu
+
+# 设置时区为东八区
+ENV TZ=Asia/Shanghai
+RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
+
+# 安装 nginx
+RUN apt-get update && \
+    apt-get install -y nginx && \
+    rm -rf /var/lib/apt/lists/*
+
+# 安装 Python3.10
+RUN apt-get update && \
+    apt-get install -y software-properties-common && \
+    add-apt-repository ppa:deadsnakes/ppa && \
+    apt-get update && \
+    apt-get install -y python3.10 python3-pip && \
+    ln -s /usr/bin/python3.10 /usr/local/bin/python && \
+    rm -rf /var/lib/apt/lists/*
+
+# 更换pip镜像源
+RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pip -U && \
+    pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
+    pip config set install.trusted-host pypi.tuna.tsinghua.edu.cn
+
+# 创建并进入工作目录
+WORKDIR /app
+
+# 拷贝启动脚本
+COPY . /app
+
+# 赋予权限
+RUN chmod +x /app/start.sh
+
+# 安装依赖包
+RUN pip install --no-cache-dir -r requirements.txt
+
+# 创建目录
+RUN mkdir -p /app/app /app/html
+
+# 通过脚本启动
+ENTRYPOINT ["/app/start.sh"]

+ 8 - 0
Main.py

@@ -0,0 +1,8 @@
+import threading
+
+import crawl
+import reply
+
+if __name__ == '__main__':
+    # crawl.getData()
+    reply.reply()

+ 120 - 0
crawl.py

@@ -0,0 +1,120 @@
+import time
+
+import requests
+from bs4 import BeautifulSoup
+import re
+import json
+import datetime
+import socket
+import socks
+
+cookies = {}
+
+with open('data.json', 'r') as f:
+    jsonData = json.load(f)
+    cookies = jsonData['cookies']
+
+# 设置代理服务器的 IP 和端口号
+socks.set_default_proxy(socks.SOCKS5, "172.16.8.1", 1080)
+
+# 将所有的 TCP 连接都通过代理服务器进行处理
+socket.socket = socks.socksocket
+
+def flushCookie():
+    reqData = {
+        'email': 'ftebox@qq.com',
+        'passwd': '369958Na',
+        'code': ''
+    }
+    headers = {
+        'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Mobile Safari/537.36',
+        'Referer': 'https://www.freewhale.co/auth/login',
+        'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
+    }
+
+    try:
+        # 发送登录请求
+        response = requests.post('https://www.freewhale.co/auth/login', data=reqData, headers=headers, verify=False)
+        # 判断是否登录成功
+        if response.status_code == 200 and '"ret":1' in response.text:
+            global cookies
+            global jsonData
+            # 读取cookie
+            cookies = response.cookies.get_dict()
+            # 将cookie写入jsonData
+            jsonData['cookies'] = cookies;
+            flushJson("cookie已刷新!");
+        else:
+            # 失败就重新登陆
+            flushCookie();
+    except requests.exceptions.RequestException as e:
+        # 出现异常,打印异常 重新登陆
+        print(e)
+        flushCookie();
+
+
+def getData():
+    url = 'https://www.freewhale.co/user'  # 将此链接替换为您要抓取的实际链接
+    global cookies
+    try:
+        response = requests.get(url, cookies=cookies, verify=False)
+        # 如果发生了重定向 就刷新cookie
+        if response.history:
+            flushCookie();
+            getData();
+    # 如果发生了异常 就递归
+    except requests.exceptions.RequestException as e:
+        print(e)
+        getData();
+    else:
+        html = response.content
+        soup = BeautifulSoup(html, 'html.parser')
+
+        item = soup.findAll(class_='nodemain')
+        item1 = soup.findAll('code')
+        item2 = soup.findAll('input')
+        # 在线设备数
+        zxsb = item[2].find('dd').text.strip()
+        # 上次使用时间
+        sysj = item[2].findAll('div')[4].text[6:].strip()
+        # 剩余流量
+        syll = item1[2].text.strip()
+        # 到期时间
+        dqsj = item[0].findAll('div')[4].text[7:].strip()
+        # ssr订阅链接
+        ssr = item2[0]['value'].strip()
+        ssr_bak = item2[1]['value'].strip()
+        # clash订阅链接
+        clash = item2[2]['value'].strip()
+        # v2ray订阅链接
+        v2ray = item2[4]['value'].strip()
+        v2ray_bak = item2[5]['value'].strip()
+        global jsonData
+        jsonData['data'] = {
+            'code': 1,
+            'message': 'success!',
+            'data': {
+                'zxsb': zxsb,
+                'sysj': sysj,
+                'syll': syll,
+                'dqsj': dqsj,
+                'v2rayurl': v2ray,
+                'v2rayBakurl': v2ray_bak,
+                'SSRurl': ssr,
+                'SSRBakurl': ssr_bak,
+                'Clash': clash
+            }
+        }
+        jsonData['saveTime'] = time.time();
+        flushJson("数据已刷新");
+
+
+def flushJson(msg):
+    global jsonData;
+    with open('data.json', 'w') as f:
+        json.dump(jsonData, f)
+    # 获取当前的日期和时间
+    now = datetime.datetime.now()
+    # 格式化输出日期和时间
+    formatted_time = now.strftime('%Y-%m-%d %H:%M:%S')
+    print('{} '.format(formatted_time) + msg)

+ 1 - 0
data.json

@@ -0,0 +1 @@
+{"cookies": {"email": "ftebox%40qq.com", "expire_in": "1683358780", "ip": "70f0573a59d0c8815863803521ad9830", "key": "a7a02dfa4e72850aaae94d9f27073a1a7236eee8d4a29", "uid": "213332"}, "data": {"code": 1, "message": "success!", "data": {"zxsb": "7 / 15", "sysj": "2023-05-06 10:22:38", "syll": "895.95GB", "dqsj": "2025-04-22 22:05:42", "v2rayurl": "https://api.qiduo.eu.org/link/yMxLpBzlvJ4EOVRi?sub=3", "v2rayBakurl": "https://api-beta.ohmy.cat/link/yMxLpBzlvJ4EOVRi?sub=3", "SSRurl": "https://sub.qiduo.eu.org/link/yMxLpBzlvJ4EOVRi?mu=1", "SSRBakurl": "https://sub-beta.ohmy.cat/link/yMxLpBzlvJ4EOVRi?mu=1", "Clash": "https://sub-api.ohmy.cat/sub?target=clash&url=https%3A%2F%2Fsub.qiduo.eu.org%2Flink%2FyMxLpBzlvJ4EOVRi%3Fmu%3D1%7Chttps%3A%2F%2Fapi.qiduo.eu.org%2Flink%2FyMxLpBzlvJ4EOVRi%3Fsub%3D3&insert=false&config=https%3A%2F%2Fraw.githubusercontent.com%2FACL4SSR%2FACL4SSR%2Fmaster%2FClash%2Fconfig%2FACL4SSR_NoAuto.ini&emoji=true&list=false&udp=true&tfo=false&expand=true&scv=false&fdn=false&sort=false&clash.doh=true&new_name=true"}}, "saveTime": 1683339765.206976}

+ 35 - 0
reply.py

@@ -0,0 +1,35 @@
+import json
+from http.server import HTTPServer, BaseHTTPRequestHandler
+import crawl
+import schedule
+import time
+import datetime
+
+data = {}
+
+def readJson():
+    with open('data.json', 'r') as f:
+        global data
+        jsonData = json.load(f)
+        data = jsonData['data']
+        saveTime = jsonData['saveTime']
+    # 如果数据有效性超过5分钟 就刷新
+    if (time.time() - saveTime)/60 > 5:
+        crawl.getData();
+    # 获取当前的日期和时间
+    now = datetime.datetime.now()
+    # 格式化输出日期和时间
+    formatted_time = now.strftime('%Y-%m-%d %H:%M:%S')
+    print('{} 读取数据!'.format(formatted_time))
+
+class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
+    def do_GET(self):
+        readJson();
+        self.send_response(200)
+        self.send_header('Content-type', 'text/plain; charset=utf-8')
+        self.end_headers()
+        self.wfile.write(str(json.dumps(data)).encode())
+
+def reply():
+    httpd = HTTPServer(('0.0.0.0', 8456), SimpleHTTPRequestHandler)
+    httpd.serve_forever()

BIN
requirements.txt


+ 7 - 0
start.sh

@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# 启动 Nginx 服务
+service nginx start
+
+# 启动 Python 应用程序
+cd /app/app && python Main.py