add: complete documentation

fix: unused options in redis
fix: compatibility between python 2-3
fix: forgot vars declarations
fix: carry conditions in listing redis keys
fix: listing redis list keys
fix: removed duplicated tasks when they are listed
fix: index number when redis DB are listed
fix:  some error levels in log
fix: renamed *proc* -> *tasks* files
fix: added the process manager backend for 'tasks' options, thinking in future  to add new process managers
This commit is contained in:
cr0hn
2016-02-29 19:14:20 +01:00
parent d1f583af2d
commit ff2abc7b65
22 changed files with 1447 additions and 91 deletions

View File

@@ -8,11 +8,11 @@ from .. import IModule
from ...libs.core.structs import CommonData
from ...libs.core.models import StringField, SelectField
from .cmd_actions import parser_proc_raw_dump, parser_proc_list_process, parser_proc_inject_process
from .proc_remove import action_proc_remove
from .proc_raw_dump import action_proc_raw_dump
from .proc_list_process import action_proc_list_process
from .proc_inject_process import action_proc_inject_process
from .cmd_actions import parser_proc_raw_dump, parser_proc_list_tasks, parser_taks_inject_process
from .tasks_remove import action_proc_remove
from .tasks_raw_dump import action_proc_raw_dump
from .tasks_list_process import action_proc_list_tasks
from .tasks_inject_process import action_task_inject_process
log = logging.getLogger()
@@ -21,6 +21,8 @@ log = logging.getLogger()
class ModuleModel(CommonData):
target = StringField(required=True)
db = StringField(default=None, label="only for Redis: database to use")
process_manager = SelectField(default="celery", choices=[("celery", "Celery")],
label="process manager running in backend")
broker_type = SelectField(default="redis", choices=[
("redis", "Redis server"),
("zmq", "ZeroMQ"),
@@ -40,15 +42,15 @@ class RemoteProcessModule(IModule):
cmd_args=parser_proc_raw_dump,
action=action_proc_raw_dump
),
'list-process': dict(
help="list remote process and their params",
cmd_args=parser_proc_list_process,
action=action_proc_list_process
'list-tasks': dict(
help="list remote tasks and their params",
cmd_args=parser_proc_list_tasks,
action=action_proc_list_tasks
),
'inject': dict(
help="list remote process and their params",
cmd_args=parser_proc_inject_process,
action=action_proc_inject_process
help="inject a new task into broker",
cmd_args=parser_taks_inject_process,
action=action_task_inject_process
),
'remove': dict(
help="remove remote processes in server",

View File

@@ -13,10 +13,11 @@ def parser_proc_raw_dump(parser):
help="although all information be dumped do not stop")
gr.add_argument("-I", dest="interval", type=float, default=4,
help="timeout interval between tow connections")
gr.add_argument("--output", dest="output", help="store dumped information into file")
# ----------------------------------------------------------------------
def parser_proc_list_process(parser):
def parser_proc_list_tasks(parser):
parser.add_argument("-N", "--no-stream", dest="no_stream", action="store_true", default=False,
help="force to not listen until message is received")
@@ -29,7 +30,7 @@ def parser_proc_list_process(parser):
# ----------------------------------------------------------------------
def parser_proc_inject_process(parser):
def parser_taks_inject_process(parser):
gr = parser.add_argument_group("process importing options")
gr.add_argument("-f", "--function-file", dest="function_files", type=str, required=True,

View File

@@ -1,42 +0,0 @@
# -*- coding: utf-8 -*-
import six
import logging
from time import sleep
from kombu import Connection
from .utils import list_remote_process
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_raw_dump(config):
log.warning(" - Trying to connect with server...")
url = '%s://%s' % (config.broker_type, config.target)
# with Connection('redis://%s' % REDIS) as conn:
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
while 1:
for remote_process, remote_args in list_remote_process(config, in_queue):
# Show info
log.error("Found process information:")
log.error(" - Remote process name: '%s'" % remote_process)
log.error(" - Input parameters:")
for i, x in enumerate(remote_args):
log.error(" -> P%s: %s" % (i, x))
# Queue is empty -> wait
if config.streaming_mode:
log.error("No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(config.interval)
else:
log.error("No more messages from server. Exiting...")
return

View File

@@ -14,10 +14,10 @@ log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_inject_process(config):
def action_task_inject_process(config):
if config.function_files is None:
log.warning(" - input .json file with process files is needed")
log.error(" - input .json file with process files is needed")
return
# --------------------------------------------------------------------------
@@ -26,7 +26,7 @@ def action_proc_inject_process(config):
with open(config.function_files, "r") as f:
f_info = json.load(f)
log.warning(" - Building process...")
log.error(" - Building process...")
# Search and inject process
injections = []
@@ -68,7 +68,7 @@ def action_proc_inject_process(config):
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
log.warning(" - Sending processes to '%s'" % config.target)
log.error(" - Sending processes to '%s'" % config.target)
for i, e in enumerate(injections, 1):
log.warning(" %s) %s" % (i, e['task']))

View File

@@ -14,7 +14,7 @@ log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_list_process(config):
def action_proc_list_tasks(config):
log.warning(" - Trying to connect with server...")
@@ -29,7 +29,7 @@ def action_proc_list_process(config):
# Get remote process
first_msg = True
while 1:
for remote_process, remote_args in list_remote_process(config, in_queue):
for remote_process, remote_args, _ in list_remote_process(config, in_queue):
if remote_process not in process_info:
process_info[remote_process] = remote_args

View File

@@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-
import six
import csv
import logging
from time import sleep
from kombu import Connection
from .utils import list_remote_process
log = logging.getLogger()
# ----------------------------------------------------------------------
def action_proc_raw_dump(config):
log.warning(" - Trying to connect with server...")
url = '%s://%s' % (config.broker_type, config.target)
f_output = None
csv_output = None
if config.output is not None:
fixed_f = "%s.csv" % config.output if ".csv" not in config.output else config.output
f_output = open(fixed_f, "a")
csv_output = csv.writer(f_output)
log.error(" - Storing results at '%s'" % fixed_f)
# Write first col
csv_output.writerow([
"# Task name",
"Parameters (position#value)"
])
already_processed = set()
# with Connection('redis://%s' % REDIS) as conn:
with Connection(url) as conn:
in_queue = conn.SimpleQueue('celery')
while 1:
for remote_task, remote_args, task_id in list_remote_process(config, in_queue):
# Task already processed?
if task_id not in already_processed:
# Track
already_processed.add(task_id)
# Show info
log.error(" Found process information:")
log.error(" - Remote tasks name: '%s'" % remote_task)
log.error(" - Input parameters:")
to_csv = [remote_task]
for i, x in enumerate(remote_args):
log.error(" -> P%s: %s" % (i, x))
# Prepare to store JSON
to_csv.append("%s#%s" % (i, x))
# Store
if csv_output is not None:
csv_output.writerow(to_csv)
# Queue is empty -> wait
if config.streaming_mode:
log.error(" -> No more messages from server. Waiting for %s seconds and try again.." % config.interval)
sleep(config.interval)
else:
log.error(" -> No more messages from server. Exiting...")
return
# Close file descriptor
if f_output is not None:
f_output.close()
csv_output.close()

View File

@@ -23,4 +23,4 @@ def action_proc_remove(config):
for _ in get_remote_messages(config, in_queue, False):
pass
log.error(" - All processes removed from '%s'" % config.target)
log.error(" - All tasks removed from '%s'" % config.target)

View File

@@ -38,7 +38,7 @@ def get_param_type(value):
except Exception:
return "str"
elif type(value) == str:
elif type(value) in (str, unicode if six.PY2 else ""):
return "str"
else:
return "object"
@@ -138,11 +138,10 @@ def list_remote_process(config, queue):
# Read info
if msg_id not in already_processed:
# remote_process = deserialized['task'].split(".")[-1]
remote_process = deserialized['task']
remote_args = deserialized['args']
# Store as processed
already_processed.add(msg_id)
yield remote_process, remote_args
yield remote_process, remote_args, msg_id