62 lines
1.6 KiB
Python
62 lines
1.6 KiB
Python
![]() |
import os
|
||
|
import logging
|
||
|
from logging.handlers import RotatingFileHandler
|
||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||
|
from sqlalchemy import event
|
||
|
from db_setup import get_engine, set_sqlite_pragma, create_tables, get_session
|
||
|
from defs import MISPProcessor
|
||
|
import urllib3
|
||
|
|
||
|
urllib3.disable_warnings()
|
||
|
|
||
|
# Ajusta según tu config
|
||
|
import config
|
||
|
|
||
|
|
||
|
os.makedirs(os.path.join(os.getcwd(),"logs"),exist_ok=True)
|
||
|
|
||
|
logging.basicConfig(
|
||
|
level=logging.INFO,
|
||
|
handlers=[RotatingFileHandler(os.path.join(os.getcwd(),"logs","misp_fix.log"), maxBytes=262144000, backupCount=10)],
|
||
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
||
|
)
|
||
|
|
||
|
def process_one_event(processor, engine, ev):
|
||
|
session = get_session(engine)
|
||
|
try:
|
||
|
processor.fix_correlation(
|
||
|
session,
|
||
|
ev["id"],
|
||
|
ev["uuid"],
|
||
|
ev["attribute_count"]
|
||
|
)
|
||
|
finally:
|
||
|
session.close()
|
||
|
|
||
|
|
||
|
def main():
|
||
|
db_dir = os.path.join(os.getcwd(), "data")
|
||
|
os.makedirs(db_dir, exist_ok=True)
|
||
|
db_path = os.path.join(db_dir, "procesados.db")
|
||
|
|
||
|
engine = get_engine(db_path) # Usa connect_args={"check_same_thread": False} dentro de get_engine() si deseas hilos
|
||
|
event.listen(engine, "connect", set_sqlite_pragma)
|
||
|
create_tables(engine)
|
||
|
|
||
|
processor = MISPProcessor()
|
||
|
|
||
|
|
||
|
events_list = processor.search_events()
|
||
|
|
||
|
with ThreadPoolExecutor(max_workers=config.NUM_WORKERS) as executor:
|
||
|
futures = []
|
||
|
for ev in events_list:
|
||
|
f = executor.submit(process_one_event, processor, engine, ev)
|
||
|
futures.append(f)
|
||
|
|
||
|
for _ in as_completed(futures):
|
||
|
pass
|
||
|
|
||
|
if __name__ == "__main__":
|
||
|
main()
|