Sfoglia il codice sorgente

add jobs 待完成

xubo 1 anno fa
parent
commit
3f7065f772
7 ha cambiato i file con 241 aggiunte e 4 eliminazioni
  1. 5 0
      config.yaml
  2. 6 4
      livepocket/302_poc_2.py
  3. 10 0
      tools/__init__.py
  4. 90 0
      tools/logger_util.py
  5. 31 0
      tools/redis_pool.py
  6. 84 0
      tools/redis_util.py
  7. 15 0
      tools/utils.py

+ 5 - 0
config.yaml

@@ -0,0 +1,5 @@
+redis:
+  host: 'localhost'
+  port: 6379
+  db: 0
+  timeout: 600

+ 6 - 4
livepocket/302_poc_2.py

@@ -11,7 +11,7 @@ from urllib import parse
 
 import requests
 
-from tools.utils import get_random_browser, get_proxy_ip, FileWriter
+from tools.utils import get_random_browser, get_proxy_ip, FileWriter, get_random_proxy_at_redis
 
 
 def poc_2(account, passwd, target_url, log_name, single_proxy):
@@ -227,10 +227,12 @@ def process_running():
     threads = []
     for account in account_list:
         # 随机数分页
-        page = random.randint(1, 5)
-        page_size = random.randint(10, 50)
-        single_proxy = get_proxy_ip(page, page_size)
+        # page = random.randint(1, 5)
+        # page_size = random.randint(10, 50)
+        # single_proxy = get_proxy_ip(page, page_size)
 
+        # 直接从redis中获取
+        single_proxy = get_random_proxy_at_redis()
         thread = threading.Thread(target=process_account, args=(account, log_name, single_proxy))
         threads.append(thread)
         thread.start()

+ 10 - 0
tools/__init__.py

@@ -0,0 +1,10 @@
+import yaml
+
+from tools.logger_util import Logger
+from tools.redis_util import redis_util
+
+# 日志记录器
+loggerKit = Logger(None).get_logger
+
+# redis
+redis_client = redis_util()

+ 90 - 0
tools/logger_util.py

@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+"""
+loguru 封装类,导入即可直接使用
+# 当前文件名 logger.py
+"""
+
+import datetime
+import os
+from functools import wraps
+
+import loguru
+
+
+# 单例类的装饰器
+def singleton_class_decorator(cls):
+    """
+    装饰器,单例类的装饰器
+    """
+    # 在装饰器里定义一个字典,用来存放类的实例。
+    _instance = {}
+
+    # 装饰器,被装饰的类
+    @wraps(cls)
+    def wrapper_class(*args, **kwargs):
+        # 判断,类实例不在类实例的字典里,就重新创建类实例
+        if cls not in _instance:
+            # 将新创建的类实例,存入到实例字典中
+            _instance[cls] = cls(*args, **kwargs)
+        # 如果实例字典中,存在类实例,直接取出返回类实例
+        return _instance[cls]
+
+    # 返回,装饰器中,被装饰的类函数
+    return wrapper_class
+
+
+@singleton_class_decorator
+class Logger:
+    def __init__(self, log_path):
+        self.logger_add(log_path)
+
+    @staticmethod
+    def get_project_path(project_path=None):
+        if project_path is None:
+            # 当前项目文件的,绝对真实路径
+            # 路径,一个点代表当前目录,两个点代表当前目录的上级目录
+            project_path = os.path.realpath('.')
+        # 返回当前项目路径
+        return project_path
+
+    def get_log_path(self, log_path):
+        # 项目目录
+        project_path = self.get_project_path(log_path)
+        # 项目日志目录
+        project_log_dir = os.path.join(project_path, 'logs')
+        # 日志文件名
+        project_log_filename = 'rpa-terminal_{}.log'.format(datetime.date.today())
+        # 日志文件路径
+        project_log_path = os.path.join(project_log_dir, project_log_filename)
+        # 返回日志路径
+        return project_log_path
+
+    def logger_add(self, log_path):
+        loguru.logger.add(
+            sink=self.get_log_path(log_path),
+            # 日志创建周期
+            rotation='00:00',
+            # 保存
+            retention='1 week',
+            # 文件的压缩格式
+            compression='zip',
+            # 编码格式
+            encoding="utf-8",
+            # 具有使日志记录调用非阻塞的优点
+            enqueue=True,
+            # 详情日志追踪
+            backtrace=True,
+            # 打印完整异常
+            diagnose=True,
+            # 获取完整的异常
+            catch=True,
+            # 日志序列化
+            serialize=True,
+            # 日志格式化
+            format="{time:YYYY-MM-DD HH:mm:ss} {level} From {module}.{function} : {message}"
+        )
+
+    # 加了@property后,可以用调用属性的形式来调用方法,后面不需要加()。
+    @property
+    def get_logger(self):
+        return loguru.logger

+ 31 - 0
tools/redis_pool.py

@@ -0,0 +1,31 @@
+import redis
+import yaml
+
+
+class redis_pool:
+    instance = None
+
+    def __init__(self):
+        # 读取 YAML 文件
+        with open('config.yaml', 'r') as file:
+            config = yaml.load(file, Loader=yaml.FullLoader)
+
+        lock_timeout = config['redis']['timeout']
+
+        self.pool = redis.BlockingConnectionPool(host=config['redis']['host'],
+                                                 port=config['redis']['port'],
+                                                 password=config['redis']['password'],
+                                                 db=config['redis']['db'],
+                                                 decode_responses=True,
+                                                 max_connections=6,
+                                                 timeout=lock_timeout)
+
+    def __get_connection(self):
+        conn = redis.StrictRedis(connection_pool=self.pool)
+        return conn
+
+    @classmethod
+    def get_conn(cls):
+        if redis_pool.instance is None:
+            redis_pool.instance = redis_pool()
+        return redis_pool.instance.__get_connection()

+ 84 - 0
tools/redis_util.py

@@ -0,0 +1,84 @@
+"""
+redis工具类
+"""
+import redis
+
+from tools import redis_pool
+
+
+class redis_util(object):
+
+    def __init__(self):
+        # 连接redis
+        # self.__redis = redis.StrictRedis(host=host, password=password, port=port)
+        self.__redis = redis_pool.redis_pool.get_conn()
+
+    # 设置key-value
+    def set(self, key, value, timeout=6 * 60 * 60):
+        self.__redis.set(key, value)
+        # key超时时间
+        self.__redis.expire(name=key, time=timeout)
+
+    # 获取key-value
+    def get(self, key):
+        return self.__redis.get(key)
+
+    # 删除key-value
+    def delete(self, key):
+        return self.__redis.delete(key)
+
+    # 判断key是否存在
+    def is_exists_key(self, key):
+        # 返回1存在,0不存在
+        return self.__redis.exists(key)
+
+    # 添加集合操作
+    def add_set(self, key, value):
+        # 集合中存在该元素则返回0,不存在则添加进集合中,并返回1
+        # 如果key不存在,则创建key集合,并添加元素进去,返回1
+        return self.__redis.sadd(key, value)
+
+    # 判断value是否在key集合中
+    def is_insert(self, key, value):
+        """判断value是否在key集合中,返回布尔值"""
+        return self.__redis.sismember(key, value)
+
+    # 模糊匹配key_prefix
+    def match_pattern_prefix(self, key_prefix):
+        """模糊匹配key_prefix"""
+        return self.__redis.keys(pattern=f"{key_prefix}*")
+
+    # 模糊匹配key_suffix
+    def match_pattern_suffix(self, key_suffix):
+        """模糊匹配key_suffix"""
+        return self.__redis.keys(pattern=f"*{key_suffix}")
+
+    # 模糊匹配key
+    def match_pattern_key(self, key):
+        """模糊匹配key"""
+        return self.__redis.keys(pattern=f"*{key}*")
+
+    # 自增
+    def incr(self, name, amount):
+        """模糊匹配key"""
+        return self.__redis.incr(name, amount)
+
+    # 对list类型进行push
+    def left_push(self, name, value):
+        """模糊匹配key"""
+        return self.__redis.lpush(name, value)
+
+    # 获取list 对应key
+    def lrange(self, key, start_index, end_index):
+        return self.__redis.lrange(key, start_index, end_index)
+
+    # 获取list 对应key
+    def set_nx(self, key, value, timeout=6 * 60 * 60):
+        set_true = self.__redis.setnx(key, value)
+        if set_true:
+            self.__redis.expire(name=key, time=timeout)
+        return set_true
+
+    # 关闭连接
+    def close(self):
+        self.__redis.close()

+ 15 - 0
tools/utils.py

@@ -7,6 +7,7 @@ from random import Random
 
 import redis
 import requests
+import schedule
 
 
 # 随机生成token
@@ -135,6 +136,11 @@ def get_proxy_ip(page, page_size):
         return None, None
 
 
+"""
+每天预先半小时存储好(早上8点半定时执行)
+"""
+
+
 def save_proxy_ip(page, page_size):
     response = requests.get(
         f"https://proxy.webshare.io/api/v2/proxy/list/?mode=direct&page={page}&page_size={page_size}",
@@ -160,6 +166,15 @@ def save_proxy_ip(page, page_size):
                 i = i + 1
 
 
+def get_random_proxy_at_redis(total):
+    random_index = random.randint(0, total - 1)
+    # 连接Redis并获取代理IP
+    r = redis.Redis(host='localhost', port=6379, db=0)
+    single_proxy = r.get(f'proxy_{str(random_index)}')
+
+    return single_proxy
+
+
 def process_account(account_number):
     print(f"Processing account {account_number}")