|
| 1 | +# -*- coding: utf-8 -*- |
| 2 | +""" |
| 3 | + proxy.py |
| 4 | + ~~~~~~~~ |
| 5 | + ⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on |
| 6 | + Network monitoring, controls & Application development, testing, debugging. |
| 7 | +
|
| 8 | + :copyright: (c) 2013-present by Abhinav Singh and contributors. |
| 9 | + :license: BSD, see LICENSE for more details. |
| 10 | +""" |
| 11 | +import time |
| 12 | +import argparse |
| 13 | +import threading |
| 14 | +import multiprocessing |
| 15 | +from typing import Any |
| 16 | + |
| 17 | +from proxy.core.work import ( |
| 18 | + Work, ThreadlessPool, BaseLocalExecutor, BaseRemoteExecutor, |
| 19 | +) |
| 20 | +from proxy.common.flag import FlagParser |
| 21 | +from proxy.common.backports import NonBlockingQueue |
| 22 | + |
| 23 | + |
| 24 | +class Task: |
| 25 | + """This will be our work object.""" |
| 26 | + |
| 27 | + def __init__(self, payload: bytes) -> None: |
| 28 | + self.payload = payload |
| 29 | + print(payload) |
| 30 | + |
| 31 | + |
| 32 | +class TaskWork(Work[Task]): |
| 33 | + """This will be our handler class, created for each received work.""" |
| 34 | + |
| 35 | + @staticmethod |
| 36 | + def create(*args: Any) -> Task: |
| 37 | + """Work core doesn't know how to create work objects for us, so |
| 38 | + we must provide an implementation of create method here.""" |
| 39 | + return Task(*args) |
| 40 | + |
| 41 | + |
| 42 | +class LocalTaskExecutor(BaseLocalExecutor): |
| 43 | + """We'll define a local executor which is capable of receiving |
| 44 | + log lines over a non blocking queue.""" |
| 45 | + |
| 46 | + def work(self, *args: Any) -> None: |
| 47 | + task_id = int(time.time()) |
| 48 | + uid = '%s-%s' % (self.iid, task_id) |
| 49 | + self.works[task_id] = self.create(uid, *args) |
| 50 | + |
| 51 | + |
| 52 | +class RemoteTaskExecutor(BaseRemoteExecutor): |
| 53 | + |
| 54 | + def work(self, *args: Any) -> None: |
| 55 | + task_id = int(time.time()) |
| 56 | + uid = '%s-%s' % (self.iid, task_id) |
| 57 | + self.works[task_id] = self.create(uid, *args) |
| 58 | + |
| 59 | + |
| 60 | +def start_local(flags: argparse.Namespace) -> None: |
| 61 | + work_queue = NonBlockingQueue() |
| 62 | + executor = LocalTaskExecutor(iid=1, work_queue=work_queue, flags=flags) |
| 63 | + |
| 64 | + t = threading.Thread(target=executor.run) |
| 65 | + t.daemon = True |
| 66 | + t.start() |
| 67 | + |
| 68 | + try: |
| 69 | + i = 0 |
| 70 | + while True: |
| 71 | + work_queue.put(('%d' % i).encode('utf-8')) |
| 72 | + i += 1 |
| 73 | + except KeyboardInterrupt: |
| 74 | + pass |
| 75 | + finally: |
| 76 | + executor.running.set() |
| 77 | + t.join() |
| 78 | + |
| 79 | + |
| 80 | +def start_remote(flags: argparse.Namespace) -> None: |
| 81 | + pipe = multiprocessing.Pipe() |
| 82 | + work_queue = pipe[0] |
| 83 | + executor = RemoteTaskExecutor(iid=1, work_queue=pipe[1], flags=flags) |
| 84 | + |
| 85 | + p = multiprocessing.Process(target=executor.run) |
| 86 | + p.daemon = True |
| 87 | + p.start() |
| 88 | + |
| 89 | + try: |
| 90 | + i = 0 |
| 91 | + while True: |
| 92 | + work_queue.send(('%d' % i).encode('utf-8')) |
| 93 | + i += 1 |
| 94 | + except KeyboardInterrupt: |
| 95 | + pass |
| 96 | + finally: |
| 97 | + executor.running.set() |
| 98 | + p.join() |
| 99 | + |
| 100 | + |
| 101 | +def start_remote_pool(flags: argparse.Namespace) -> None: |
| 102 | + with ThreadlessPool(flags=flags, executor_klass=RemoteTaskExecutor) as pool: |
| 103 | + try: |
| 104 | + i = 0 |
| 105 | + while True: |
| 106 | + work_queue = pool.work_queues[i % flags.num_workers] |
| 107 | + work_queue.send(('%d' % i).encode('utf-8')) |
| 108 | + i += 1 |
| 109 | + except KeyboardInterrupt: |
| 110 | + pass |
| 111 | + |
| 112 | + |
| 113 | +if __name__ == '__main__': |
| 114 | + flags = FlagParser.initialize( |
| 115 | + ['--disable-http-proxy'], |
| 116 | + work_klass=TaskWork, |
| 117 | + ) |
| 118 | + start_remote_pool(flags) |
| 119 | + # start_remote(flags) |
| 120 | + # start_local(flags) |
0 commit comments