Merge pull request #43 from filecoin-project/feat/composer
Add "composer" configuration UI
This commit is contained in:
commit
f72616981e
3
.gitignore
vendored
3
.gitignore
vendored
@ -1 +1,4 @@
|
||||
lotus
|
||||
venv/
|
||||
__pycache__/
|
||||
.ipynb_checkpoints/
|
||||
|
29
composer/Dockerfile
Normal file
29
composer/Dockerfile
Normal file
@ -0,0 +1,29 @@
|
||||
FROM golang:1.14.4-buster as tg-build
|
||||
|
||||
ARG TESTGROUND_REF="oni"
|
||||
WORKDIR /usr/src
|
||||
RUN git clone https://github.com/testground/testground.git
|
||||
RUN cd testground && git checkout $TESTGROUND_REF && go build .
|
||||
|
||||
FROM python:3.8-buster
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY --from=tg-build /usr/src/testground/testground /usr/bin/testground
|
||||
|
||||
RUN mkdir /composer && chmod 777 /composer
|
||||
RUN mkdir /testground && chmod 777 /testground
|
||||
|
||||
ENV HOME /composer
|
||||
ENV TESTGROUND_HOME /testground
|
||||
ENV LISTEN_PORT 5006
|
||||
ENV TESTGROUND_DAEMON_HOST host.docker.internal
|
||||
|
||||
VOLUME /testground/plans
|
||||
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install -r requirements.txt
|
||||
COPY . .
|
||||
|
||||
CMD panel serve --address 0.0.0.0 --port $LISTEN_PORT composer.ipynb
|
4
composer/Makefile
Normal file
4
composer/Makefile
Normal file
@ -0,0 +1,4 @@
|
||||
all: docker
|
||||
|
||||
docker:
|
||||
docker build -t "iptestground/composer:latest" .
|
63
composer/README.md
Normal file
63
composer/README.md
Normal file
@ -0,0 +1,63 @@
|
||||
# Testground Composer
|
||||
|
||||
This is a work-in-progress UI for configuring and running testground compositions.
|
||||
|
||||
The app code lives in [./app](./app), and there's a thin Jupyter notebook shell in [composer.ipynb](./composer.ipynb).
|
||||
|
||||
## Running
|
||||
|
||||
You can either run the app in docker, or in a local python virtualenv. Docker is recommended unless you're hacking
|
||||
on the code for Composer itself.
|
||||
|
||||
### Running with docker
|
||||
|
||||
Run the `./composer.sh` script to build a container with the latest source and run it. The first build
|
||||
will take a little while since it needs to build testground and fetch a bunch of python dependencies.
|
||||
|
||||
You can skip the build if you set `SKIP_BUILD=true` when running `composer.sh`, and you can rebuild
|
||||
manually with `make docker`.
|
||||
|
||||
The contents of `$TESTGROUND_HOME/plans` will be sync'd to a temporary directory and read-only mounted
|
||||
into the container.
|
||||
|
||||
After building and starting the container, the script will open a browser to the composer UI.
|
||||
|
||||
You should be able to load an existing composition or create a new one from one of the plans in
|
||||
`$TESTGROUND_HOME/plans`.
|
||||
|
||||
Right now docker only supports the standalone webapp UI; to run the UI in a Jupyter notebook, see below.
|
||||
|
||||
### Running with local python
|
||||
|
||||
To run without docker, make a python3 virtual environment somewhere and activate it:
|
||||
|
||||
```shell
|
||||
# make a virtualenv called "venv" in the current directory
|
||||
python3 -m venv ./venv
|
||||
|
||||
# activate (bash/zsh):
|
||||
source ./venv/bin/activate
|
||||
|
||||
# activate (fish):
|
||||
source ./venv/bin/activate.fish
|
||||
```
|
||||
|
||||
Then install the python dependencies:
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
And start the UI:
|
||||
|
||||
```shell
|
||||
panel serve composer.ipynb
|
||||
```
|
||||
|
||||
That will start the standalone webapp UI. If you want a Jupyter notebook instead, run:
|
||||
|
||||
```
|
||||
jupyter notebook
|
||||
```
|
||||
|
||||
and open `composer.ipynb` in the Jupyter file picker.
|
94
composer/app/app.py
Normal file
94
composer/app/app.py
Normal file
@ -0,0 +1,94 @@
|
||||
import param
|
||||
import panel as pn
|
||||
import toml
|
||||
from .util import get_plans, get_manifest
|
||||
from .composition import Composition
|
||||
from .runner import TestRunner
|
||||
|
||||
STAGE_WELCOME = 'Welcome'
|
||||
STAGE_CONFIG_COMPOSITION = 'Configure'
|
||||
STAGE_RUN_TEST = 'Run'
|
||||
|
||||
|
||||
class Welcome(param.Parameterized):
|
||||
composition = param.Parameter()
|
||||
composition_picker = pn.widgets.FileInput(accept='.toml')
|
||||
plan_picker = param.Selector()
|
||||
ready = param.Boolean()
|
||||
|
||||
def __init__(self, **params):
|
||||
super().__init__(**params)
|
||||
self.composition_picker.param.watch(self._composition_updated, 'value')
|
||||
self.param.watch(self._plan_selected, 'plan_picker')
|
||||
self.param['plan_picker'].objects = ['Select a Plan'] + get_plans()
|
||||
|
||||
def panel(self):
|
||||
tabs = pn.Tabs(
|
||||
('New Compostion', self.param['plan_picker']),
|
||||
('Existing Composition', self.composition_picker),
|
||||
)
|
||||
|
||||
return pn.Column(
|
||||
"Either choose an existing composition or select a plan to create a new composition:",
|
||||
tabs,
|
||||
)
|
||||
|
||||
def _composition_updated(self, *args):
|
||||
print('composition updated')
|
||||
content = self.composition_picker.value.decode('utf8')
|
||||
comp_toml = toml.loads(content)
|
||||
manifest = get_manifest(comp_toml['global']['plan'])
|
||||
self.composition = Composition.from_dict(comp_toml, manifest=manifest)
|
||||
print('existing composition: {}'.format(self.composition))
|
||||
self.ready = True
|
||||
|
||||
def _plan_selected(self, evt):
|
||||
if evt.new == 'Select a Plan':
|
||||
return
|
||||
print('plan selected: {}'.format(evt.new))
|
||||
manifest = get_manifest(evt.new)
|
||||
self.composition = Composition(manifest=manifest, add_default_group=True)
|
||||
print('new composition: ', self.composition)
|
||||
self.ready = True
|
||||
|
||||
|
||||
class ConfigureComposition(param.Parameterized):
|
||||
composition = param.Parameter()
|
||||
|
||||
@param.depends('composition')
|
||||
def panel(self):
|
||||
if self.composition is None:
|
||||
return pn.Pane("no composition :(")
|
||||
print('composition: ', self.composition)
|
||||
return self.composition.panel()
|
||||
|
||||
|
||||
class WorkflowPipeline(object):
|
||||
def __init__(self):
|
||||
stages = [
|
||||
(STAGE_WELCOME, Welcome(), dict(ready_parameter='ready')),
|
||||
(STAGE_CONFIG_COMPOSITION, ConfigureComposition()),
|
||||
(STAGE_RUN_TEST, TestRunner()),
|
||||
]
|
||||
|
||||
self.pipeline = pn.pipeline.Pipeline(debug=True, stages=stages)
|
||||
|
||||
def panel(self):
|
||||
return pn.Column(
|
||||
pn.Row(
|
||||
self.pipeline.title,
|
||||
self.pipeline.network,
|
||||
self.pipeline.prev_button,
|
||||
self.pipeline.next_button,
|
||||
),
|
||||
self.pipeline.stage,
|
||||
sizing_mode='stretch_width',
|
||||
)
|
||||
|
||||
|
||||
class App(object):
|
||||
def __init__(self):
|
||||
self.workflow = WorkflowPipeline()
|
||||
|
||||
def ui(self):
|
||||
return self.workflow.panel().servable("Testground Composer")
|
328
composer/app/composition.py
Normal file
328
composer/app/composition.py
Normal file
@ -0,0 +1,328 @@
|
||||
import param
|
||||
import panel as pn
|
||||
import toml
|
||||
from .util import get_manifest, print_err
|
||||
|
||||
|
||||
def value_dict(parameterized, renames=None, stringify=False):
|
||||
d = dict()
|
||||
if renames is None:
|
||||
renames = dict()
|
||||
for name, p in parameterized.param.objects().items():
|
||||
if name == 'name':
|
||||
continue
|
||||
if name in renames:
|
||||
name = renames[name]
|
||||
val = p.__get__(parameterized, type(p))
|
||||
if isinstance(val, param.Parameterized):
|
||||
try:
|
||||
val = val.to_dict()
|
||||
except:
|
||||
val = value_dict(val, renames=renames)
|
||||
if stringify:
|
||||
val = str(val)
|
||||
d[name] = val
|
||||
return d
|
||||
|
||||
|
||||
def make_group_params_class(testcase):
|
||||
"""Returns a subclass of param.Parameterized whose params are defined by the
|
||||
'params' dict inside of the given testcase dict"""
|
||||
tc_params = dict()
|
||||
for name, p in testcase.get('params', {}).items():
|
||||
tc_params[name] = make_param(p)
|
||||
|
||||
name = 'Test Params for testcase {}'.format(testcase.get('name', ''))
|
||||
cls = param.parameterized_class(name, tc_params, GroupParamsBase)
|
||||
return cls
|
||||
|
||||
|
||||
def make_param(pdef):
|
||||
"""
|
||||
:param pdef: a parameter definition dict from a testground plan manifest
|
||||
:return: a param.Parameter that has the type, bounds, default value, etc from the definition
|
||||
"""
|
||||
typ = pdef['type'].lower()
|
||||
if typ == 'int':
|
||||
return num_param(pdef, cls=param.Integer)
|
||||
elif typ == 'float':
|
||||
return num_param(pdef)
|
||||
elif typ.startswith('bool'):
|
||||
return bool_param(pdef)
|
||||
else:
|
||||
return str_param(pdef)
|
||||
|
||||
|
||||
def num_param(pdef, cls=param.Number):
|
||||
lo = pdef.get('min', None)
|
||||
hi = pdef.get('max', None)
|
||||
bounds = (lo, hi)
|
||||
if lo == hi and lo is not None:
|
||||
bounds = None
|
||||
|
||||
default_val = pdef.get('default', None)
|
||||
if default_val is not None:
|
||||
if cls == param.Integer:
|
||||
default_val = int(default_val)
|
||||
else:
|
||||
default_val = float(default_val)
|
||||
return cls(default=default_val, bounds=bounds, doc=pdef.get('desc', ''))
|
||||
|
||||
|
||||
def bool_param(pdef):
|
||||
default_val = str(pdef.get('default', 'false')).lower() == 'true'
|
||||
return param.Boolean(
|
||||
doc=pdef.get('desc', ''),
|
||||
default=default_val
|
||||
)
|
||||
|
||||
|
||||
def str_param(pdef):
|
||||
return param.String(
|
||||
default=pdef.get('default', ''),
|
||||
doc=pdef.get('desc', ''),
|
||||
)
|
||||
|
||||
|
||||
class Base(param.Parameterized):
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
return cls(**d)
|
||||
|
||||
def to_dict(self):
|
||||
return value_dict(self)
|
||||
|
||||
|
||||
class GroupParamsBase(Base):
|
||||
def to_dict(self):
|
||||
return value_dict(self, stringify=True)
|
||||
|
||||
|
||||
class Metadata(Base):
|
||||
composition_name = param.String()
|
||||
author = param.String()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
d['composition_name'] = d.get('name', '')
|
||||
del d['name']
|
||||
return Metadata(**d)
|
||||
|
||||
def to_dict(self):
|
||||
return value_dict(self, {'composition_name': 'name'})
|
||||
|
||||
|
||||
class Global(Base):
|
||||
plan = param.String()
|
||||
case = param.Selector()
|
||||
builder = param.String()
|
||||
runner = param.String()
|
||||
|
||||
# TODO: link to instance counts in groups
|
||||
total_instances = param.Integer()
|
||||
# TODO: add ui widget for key/value maps instead of using Dict param type
|
||||
build_config = param.Dict(default={}, allow_None=True)
|
||||
run_config = param.Dict(default={}, allow_None=True)
|
||||
|
||||
def set_manifest(self, manifest):
|
||||
if manifest is None:
|
||||
return
|
||||
print('manifest:', manifest)
|
||||
self.plan = manifest['name']
|
||||
cases = [tc['name'] for tc in manifest['testcases']]
|
||||
self.param['case'].objects = cases
|
||||
print('global config updated manifest. cases:', self.param['case'].objects)
|
||||
if len(cases) != 0:
|
||||
self.case = cases[0]
|
||||
|
||||
if 'defaults' in manifest:
|
||||
print('manifest defaults', manifest['defaults'])
|
||||
if self.builder == '':
|
||||
self.builder = manifest['defaults'].get('builder', '')
|
||||
if self.runner == '':
|
||||
self.runner = manifest['defaults'].get('runner', '')
|
||||
|
||||
|
||||
class Resources(Base):
|
||||
memory = param.String(allow_None=True)
|
||||
cpu = param.String(allow_None=True)
|
||||
|
||||
|
||||
class Instances(Base):
|
||||
count = param.Integer(allow_None=True)
|
||||
percentage = param.Number(allow_None=True)
|
||||
|
||||
|
||||
class Dependency(Base):
|
||||
module = param.String()
|
||||
version = param.String()
|
||||
|
||||
|
||||
class Build(Base):
|
||||
selectors = param.List(class_=str, allow_None=True)
|
||||
dependencies = param.List(allow_None=True)
|
||||
|
||||
|
||||
class Run(Base):
|
||||
artifact = param.String(allow_None=True)
|
||||
test_params = param.Parameter(instantiate=True)
|
||||
|
||||
def __init__(self, params_class=None, **params):
|
||||
super().__init__(**params)
|
||||
if params_class is not None:
|
||||
self.test_params = params_class()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d, params_class=None):
|
||||
return Run(artifact=d.get('artifact', None), params_class=params_class)
|
||||
|
||||
def panel(self):
|
||||
return pn.Column(
|
||||
self.param['artifact'],
|
||||
pn.Param(self.test_params)
|
||||
)
|
||||
|
||||
|
||||
class Group(Base):
|
||||
id = param.String()
|
||||
instances = param.Parameter(Instances(), instantiate=True)
|
||||
resources = param.Parameter(Resources(), allow_None=True, instantiate=True)
|
||||
build = param.Parameter(Build(), instantiate=True)
|
||||
run = param.Parameter(Run(), instantiate=True)
|
||||
|
||||
def __init__(self, params_class=None, **params):
|
||||
super().__init__(**params)
|
||||
if params_class is not None:
|
||||
self.run = Run(params_class=params_class)
|
||||
self._set_name(self.id)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d, params_class=None):
|
||||
return Group(
|
||||
id=d['id'],
|
||||
resources=Resources.from_dict(d.get('resources', {})),
|
||||
instances=Instances.from_dict(d.get('instances', {})),
|
||||
build=Build.from_dict(d.get('build', {})),
|
||||
run=Run.from_dict(d.get('params', {}), params_class=params_class),
|
||||
)
|
||||
|
||||
def panel(self):
|
||||
print('rendering groups panel for ' + self.id)
|
||||
return pn.Column(
|
||||
"**Group: {}**".format(self.id),
|
||||
self.param['id'],
|
||||
self.instances,
|
||||
self.resources,
|
||||
self.build,
|
||||
self.run.panel(),
|
||||
)
|
||||
|
||||
|
||||
class Composition(param.Parameterized):
|
||||
metadata = param.Parameter(Metadata(), instantiate=True)
|
||||
global_config = param.Parameter(Global(), instantiate=True)
|
||||
|
||||
groups = param.List(precedence=-1)
|
||||
group_tabs = pn.Tabs()
|
||||
groups_ui = None
|
||||
|
||||
def __init__(self, manifest=None, add_default_group=False, **params):
|
||||
super(Composition, self).__init__(**params)
|
||||
self.manifest = manifest
|
||||
self.testcase_param_classes = dict()
|
||||
self._set_manifest(manifest)
|
||||
if add_default_group:
|
||||
self._add_group()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d, manifest=None):
|
||||
if manifest is None:
|
||||
try:
|
||||
manifest = get_manifest(d['global']['plan'])
|
||||
except FileNotFoundError:
|
||||
print_err("Unable to find manifest for test plan {}. Please import into $TESTGROUND_HOME/plans and try again".format(d['global']['plan']))
|
||||
|
||||
c = Composition(
|
||||
manifest=manifest,
|
||||
metadata=Metadata.from_dict(d.get('metadata', {})),
|
||||
global_config=Global.from_dict(d.get('global', {})),
|
||||
)
|
||||
params_class = c._params_class_for_current_testcase()
|
||||
c.groups = [Group.from_dict(g, params_class=params_class) for g in d.get('groups', [])]
|
||||
|
||||
return c
|
||||
|
||||
@classmethod
|
||||
def from_toml_file(cls, filename, manifest=None):
|
||||
with open(filename, 'rt') as f:
|
||||
d = toml.load(f)
|
||||
return cls.from_dict(d, manifest=manifest)
|
||||
|
||||
@param.depends('groups', watch=True)
|
||||
def panel(self):
|
||||
add_group_button = pn.widgets.Button(name='Add Group')
|
||||
add_group_button.on_click(self._add_group)
|
||||
|
||||
self._refresh_tabs()
|
||||
|
||||
if self.groups_ui is None:
|
||||
self.groups_ui = pn.Column(
|
||||
add_group_button,
|
||||
self.group_tabs,
|
||||
)
|
||||
|
||||
return pn.Row(
|
||||
pn.Column(self.metadata, self.global_config),
|
||||
self.groups_ui,
|
||||
)
|
||||
|
||||
def _set_manifest(self, manifest):
|
||||
if manifest is None:
|
||||
return
|
||||
|
||||
g = self.global_config
|
||||
print('global conifg: ', g)
|
||||
g.set_manifest(manifest)
|
||||
for tc in manifest.get('testcases', []):
|
||||
self.testcase_param_classes[tc['name']] = make_group_params_class(tc)
|
||||
|
||||
def _params_class_for_current_testcase(self):
|
||||
case = self.global_config.case
|
||||
cls = self.testcase_param_classes.get(case, None)
|
||||
if cls is None:
|
||||
print_err("No testcase found in manifest named " + case)
|
||||
return cls
|
||||
|
||||
def _add_group(self, *args):
|
||||
group_id = 'group-{}'.format(len(self.groups) + 1)
|
||||
g = Group(id=group_id, params_class=self._params_class_for_current_testcase())
|
||||
g.param.watch(self._refresh_tabs, 'id')
|
||||
groups = self.groups
|
||||
groups.append(g)
|
||||
self.groups = groups
|
||||
self.group_tabs.active = len(groups)-1
|
||||
|
||||
@param.depends("global_config.case", watch=True)
|
||||
def _test_case_changed(self):
|
||||
print('test case changed', self.global_config.case)
|
||||
cls = self._params_class_for_current_testcase()
|
||||
for g in self.groups:
|
||||
g.run.test_params = cls()
|
||||
self._refresh_tabs()
|
||||
|
||||
def _refresh_tabs(self, *args):
|
||||
self.group_tabs[:] = [(g.id, g.panel()) for g in self.groups]
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'metadata': value_dict(self.metadata, renames={'composition_name': 'name'}),
|
||||
'global': value_dict(self.global_config),
|
||||
'groups': [g.to_dict() for g in self.groups]
|
||||
}
|
||||
|
||||
def to_toml(self):
|
||||
return toml.dumps(self.to_dict())
|
||||
|
||||
def write_to_file(self, filename):
|
||||
with open(filename, 'wt') as f:
|
||||
toml.dump(self.to_dict(), f)
|
111
composer/app/runner.py
Normal file
111
composer/app/runner.py
Normal file
@ -0,0 +1,111 @@
|
||||
import os
|
||||
import panel as pn
|
||||
import param
|
||||
from panel.io.server import unlocked
|
||||
from tornado.ioloop import IOLoop, PeriodicCallback
|
||||
from tornado.process import Subprocess
|
||||
from subprocess import STDOUT
|
||||
from bokeh.models.widgets import Div
|
||||
from ansi2html import Ansi2HTMLConverter
|
||||
|
||||
from .composition import Composition
|
||||
|
||||
TESTGROUND = 'testground'
|
||||
|
||||
|
||||
class AnsiColorText(pn.widgets.Widget):
|
||||
style = param.Dict(default=None, doc="""
|
||||
Dictionary of CSS property:value pairs to apply to this Div.""")
|
||||
|
||||
value = param.Parameter(default=None)
|
||||
|
||||
_format = '<div>{value}</div>'
|
||||
|
||||
_rename = {'name': None, 'value': 'text'}
|
||||
|
||||
# _target_transforms = {'value': 'target.text.split(": ")[0]+": "+value'}
|
||||
#
|
||||
# _source_transforms = {'value': 'value.split(": ")[1]'}
|
||||
|
||||
_widget_type = Div
|
||||
|
||||
_converter = Ansi2HTMLConverter(inline=True)
|
||||
|
||||
def _process_param_change(self, msg):
|
||||
msg = super(AnsiColorText, self)._process_property_change(msg)
|
||||
if 'value' in msg:
|
||||
text = str(msg.pop('value'))
|
||||
text = self._converter.convert(text)
|
||||
msg['text'] = text
|
||||
return msg
|
||||
|
||||
def scroll_down(self):
|
||||
# TODO: figure out how to automatically scroll down as text is added
|
||||
pass
|
||||
|
||||
|
||||
class CommandRunner(param.Parameterized):
|
||||
command_output = param.String()
|
||||
|
||||
def __init__(self, **params):
|
||||
super().__init__(**params)
|
||||
self._output_lines = []
|
||||
self.proc = None
|
||||
self._updater = PeriodicCallback(self._refresh_output, callback_time=1000)
|
||||
|
||||
@pn.depends('command_output')
|
||||
def panel(self):
|
||||
return pn.Param(self.param, show_name=False, sizing_mode='stretch_width', widgets={
|
||||
'command_output': dict(
|
||||
type=AnsiColorText,
|
||||
sizing_mode='stretch_width',
|
||||
height=800)
|
||||
})
|
||||
|
||||
def run(self, *cmd):
|
||||
self.command_output = ''
|
||||
self._output_lines = []
|
||||
self.proc = Subprocess(cmd, stdout=Subprocess.STREAM, stderr=STDOUT)
|
||||
self._get_next_line()
|
||||
self._updater.start()
|
||||
|
||||
def _get_next_line(self):
|
||||
if self.proc is None:
|
||||
return
|
||||
loop = IOLoop.current()
|
||||
loop.add_future(self.proc.stdout.read_until(bytes('\n', encoding='utf8')), self._append_output)
|
||||
|
||||
def _append_output(self, future):
|
||||
self._output_lines.append(future.result().decode('utf8'))
|
||||
self._get_next_line()
|
||||
|
||||
def _refresh_output(self):
|
||||
text = ''.join(self._output_lines)
|
||||
if len(text) != len(self.command_output):
|
||||
with unlocked():
|
||||
self.command_output = text
|
||||
|
||||
|
||||
class TestRunner(param.Parameterized):
|
||||
composition = param.ClassSelector(class_=Composition, precedence=-1)
|
||||
testground_daemon_endpoint = param.String(default="{}:8042".format(os.environ.get('TESTGROUND_DAEMON_HOST', 'localhost')))
|
||||
run_test = param.Action(lambda self: self.run())
|
||||
runner = CommandRunner()
|
||||
|
||||
def __init__(self, **params):
|
||||
super().__init__(**params)
|
||||
|
||||
def run(self):
|
||||
# TODO: temp file management - maybe we should mount a volume and save there?
|
||||
filename = '/tmp/composition.toml'
|
||||
self.composition.write_to_file(filename)
|
||||
|
||||
self.runner.run(TESTGROUND, '--endpoint', self.testground_daemon_endpoint, 'run', 'composition', '-f', filename)
|
||||
|
||||
def panel(self):
|
||||
return pn.Column(
|
||||
self.param['testground_daemon_endpoint'],
|
||||
self.param['run_test'],
|
||||
self.runner.panel(),
|
||||
sizing_mode='stretch_width',
|
||||
)
|
26
composer/app/util.py
Normal file
26
composer/app/util.py
Normal file
@ -0,0 +1,26 @@
|
||||
import toml
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def parse_manifest(manifest_path):
|
||||
with open(manifest_path, 'rt') as f:
|
||||
return toml.load(f)
|
||||
|
||||
|
||||
def tg_home():
|
||||
return os.environ.get('TESTGROUND_HOME',
|
||||
os.path.join(os.environ['HOME'], 'testground'))
|
||||
|
||||
|
||||
def get_plans():
|
||||
return list(os.listdir(os.path.join(tg_home(), 'plans')))
|
||||
|
||||
|
||||
def get_manifest(plan_name):
|
||||
manifest_path = os.path.join(tg_home(), 'plans', plan_name, 'manifest.toml')
|
||||
return parse_manifest(manifest_path)
|
||||
|
||||
|
||||
def print_err(*args):
|
||||
print(*args, file=sys.stderr)
|
45
composer/composer.ipynb
Normal file
45
composer/composer.ipynb
Normal file
@ -0,0 +1,45 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import param\n",
|
||||
"import panel as pn\n",
|
||||
"import app.app as app\n",
|
||||
"import importlib\n",
|
||||
"importlib.reload(app)\n",
|
||||
"\n",
|
||||
"pn.extension()\n",
|
||||
"\n",
|
||||
"a = app.App()\n",
|
||||
"a.ui()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
134
composer/composer.sh
Executable file
134
composer/composer.sh
Executable file
@ -0,0 +1,134 @@
|
||||
#!/bin/bash
|
||||
|
||||
# this script runs jupyter inside a docker container and copies
|
||||
# plan manifests from the user's local filesystem into a temporary
|
||||
# directory that's bind-mounted into the container.
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
set -e
|
||||
|
||||
err_report() {
|
||||
echo "Error on line $1"
|
||||
}
|
||||
|
||||
trap 'err_report $LINENO' ERR
|
||||
|
||||
|
||||
image_name="iptestground/composer"
|
||||
image_tag="latest"
|
||||
image_full_name="$image_name:$image_tag"
|
||||
tg_home=${TESTGROUND_HOME:-$HOME/testground}
|
||||
container_plans_dir="/testground/plans"
|
||||
jupyter_port=${JUPYTER_PORT:-8888}
|
||||
panel_port=${PANEL_PORT:-5006}
|
||||
|
||||
poll_interval=30
|
||||
|
||||
exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
require_cmds() {
|
||||
for cmd in $@; do
|
||||
exists $cmd || { echo "This script requires the $cmd command. Please install it and try again." >&2; exit 1; }
|
||||
done
|
||||
}
|
||||
|
||||
update_plans() {
|
||||
local dest_dir=$1
|
||||
rsync -avzh --quiet --copy-links "${tg_home}/plans/" ${dest_dir}
|
||||
}
|
||||
|
||||
watch_plans() {
|
||||
local plans_dest=$1
|
||||
while true; do
|
||||
update_plans ${plans_dest}
|
||||
sleep $poll_interval
|
||||
done
|
||||
}
|
||||
|
||||
open_url() {
|
||||
local url=$1
|
||||
if exists cmd.exe; then
|
||||
cmd.exe /c start ${url} >/dev/null 2>&1
|
||||
elif exists xdg-open; then
|
||||
xdg-open ${url} >/dev/null 2>&1 &
|
||||
elif exists open; then
|
||||
open ${url}
|
||||
else
|
||||
echo "unable to automatically open url. copy/paste this into a browser: $url"
|
||||
fi
|
||||
}
|
||||
|
||||
# delete temp dir and stop docker container
|
||||
cleanup () {
|
||||
if [[ "$container_id" != "" ]]; then
|
||||
docker stop ${container_id} >/dev/null
|
||||
fi
|
||||
|
||||
if [[ -d "$temp_plans_dir" ]]; then
|
||||
rm -rf ${temp_plans_dir}
|
||||
fi
|
||||
}
|
||||
|
||||
get_host_ip() {
|
||||
# get interface of default route
|
||||
local net_if=$(netstat -rn | awk '/^0.0.0.0/ {thif=substr($0,74,10); print thif;} /^default.*UG/ {thif=substr($0,65,10); print thif;}')
|
||||
# use ifconfig to get addr of that interface
|
||||
detected_host_ip=`ifconfig ${net_if} | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'`
|
||||
|
||||
if [ -z "$detected_host_ip" ]
|
||||
then
|
||||
detected_host_ip="host.docker.internal"
|
||||
fi
|
||||
|
||||
echo $detected_host_ip
|
||||
}
|
||||
|
||||
# run cleanup on exit
|
||||
trap "{ cleanup; }" EXIT
|
||||
|
||||
# make sure we have the commands we need
|
||||
require_cmds jq docker rsync
|
||||
|
||||
if [[ "$SKIP_BUILD" == "" ]]; then
|
||||
echo "Building latest docker image. Set SKIP_BUILD env var to any value to bypass."
|
||||
require_cmds make
|
||||
make docker
|
||||
fi
|
||||
|
||||
# make temp dir for manifests
|
||||
temp_base="/tmp"
|
||||
if [[ "$TEMP" != "" ]]; then
|
||||
temp_base=$TEMP
|
||||
fi
|
||||
|
||||
temp_plans_dir="$(mktemp -d ${temp_base}/testground-composer-XXXX)"
|
||||
echo "temp plans dir: $temp_plans_dir"
|
||||
|
||||
# copy testplans from $TESTGROUND_HOME/plans to the temp dir
|
||||
update_plans ${temp_plans_dir}
|
||||
|
||||
# run the container in detached mode and grab the id
|
||||
container_id=$(docker run -d \
|
||||
-e TESTGROUND_DAEMON_HOST=$(get_host_ip) \
|
||||
--user $(id -u):$(id -g) \
|
||||
-p ${panel_port}:5006 \
|
||||
-v ${temp_plans_dir}:${container_plans_dir}:ro \
|
||||
$image_full_name)
|
||||
|
||||
echo "container $container_id started"
|
||||
# print the log output
|
||||
docker logs -f ${container_id} &
|
||||
|
||||
# sleep for a couple seconds to let the server start up
|
||||
sleep 2
|
||||
|
||||
# open a browser to the app url
|
||||
panel_url="http://localhost:${panel_port}"
|
||||
open_url $panel_url
|
||||
|
||||
# poll & sync testplan changes every few seconds
|
||||
watch_plans ${temp_plans_dir}
|
214
composer/fixtures/all-both-k8s.toml
Normal file
214
composer/fixtures/all-both-k8s.toml
Normal file
@ -0,0 +1,214 @@
|
||||
[metadata]
|
||||
name = "all-both"
|
||||
author = "adin"
|
||||
|
||||
[global]
|
||||
plan = "dht"
|
||||
case = "all"
|
||||
total_instances = 1000
|
||||
builder = "docker:go"
|
||||
runner = "cluster:k8s"
|
||||
[global.build_config]
|
||||
push_registry = true
|
||||
registry_type = "aws"
|
||||
|
||||
[[groups]]
|
||||
id = "balsam-undialable-provider"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["balsam"]
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:701251a63b92"
|
||||
[groups.run.test_params]
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
expect_dht = "false"
|
||||
group_order = "4"
|
||||
latency = "100"
|
||||
record_count = "1"
|
||||
timeout_secs = "600"
|
||||
undialable = "true"
|
||||
|
||||
[[groups]]
|
||||
id = "balsam-undialable-searcher"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["balsam"]
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:701251a63b92"
|
||||
[groups.run.test_params]
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
expect_dht = "false"
|
||||
group_order = "5"
|
||||
latency = "100"
|
||||
search_records = "true"
|
||||
timeout_secs = "600"
|
||||
undialable = "true"
|
||||
|
||||
[[groups]]
|
||||
id = "balsam-dialable-passive"
|
||||
[groups.instances]
|
||||
count = 780
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["balsam"]
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:701251a63b92"
|
||||
[groups.run.test_params]
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
expect_dht = "false"
|
||||
group_order = "6"
|
||||
latency = "100"
|
||||
timeout_secs = "600"
|
||||
undialable = "false"
|
||||
|
||||
[[groups]]
|
||||
id = "balsam-dialable-provider"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["balsam"]
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:701251a63b92"
|
||||
[groups.run.test_params]
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
expect_dht = "false"
|
||||
group_order = "7"
|
||||
latency = "100"
|
||||
record_count = "1"
|
||||
timeout_secs = "600"
|
||||
undialable = "false"
|
||||
|
||||
[[groups]]
|
||||
id = "balsam-dialable-searcher"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["balsam"]
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:701251a63b92"
|
||||
[groups.run.test_params]
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
expect_dht = "false"
|
||||
group_order = "8"
|
||||
latency = "100"
|
||||
search_records = "true"
|
||||
timeout_secs = "600"
|
||||
undialable = "false"
|
||||
|
||||
[[groups]]
|
||||
id = "cypress-passive"
|
||||
[groups.instances]
|
||||
count = 185
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["cypress"]
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-kad-dht"
|
||||
version = "180be07b8303d536e39809bc39c58be5407fedd9"
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-xor"
|
||||
version = "df24f5b04bcbdc0059b27989163a6090f4f6dc7a"
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:ca78473d669d"
|
||||
[groups.run.test_params]
|
||||
alpha = "6"
|
||||
beta = "3"
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
group_order = "1"
|
||||
latency = "100"
|
||||
timeout_secs = "600"
|
||||
|
||||
[[groups]]
|
||||
id = "cypress-provider"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["cypress"]
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-kad-dht"
|
||||
version = "180be07b8303d536e39809bc39c58be5407fedd9"
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-xor"
|
||||
version = "df24f5b04bcbdc0059b27989163a6090f4f6dc7a"
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:ca78473d669d"
|
||||
[groups.run.test_params]
|
||||
alpha = "6"
|
||||
beta = "3"
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
group_order = "2"
|
||||
latency = "100"
|
||||
record_count = "1"
|
||||
timeout_secs = "600"
|
||||
|
||||
[[groups]]
|
||||
id = "cypress-searcher"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["cypress"]
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-kad-dht"
|
||||
version = "180be07b8303d536e39809bc39c58be5407fedd9"
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-xor"
|
||||
version = "df24f5b04bcbdc0059b27989163a6090f4f6dc7a"
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:ca78473d669d"
|
||||
[groups.run.test_params]
|
||||
alpha = "6"
|
||||
beta = "3"
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
group_order = "3"
|
||||
latency = "100"
|
||||
search_records = "true"
|
||||
timeout_secs = "600"
|
||||
|
||||
[[groups]]
|
||||
id = "cypress-bs"
|
||||
[groups.instances]
|
||||
count = 5
|
||||
percentage = 0.0
|
||||
[groups.build]
|
||||
selectors = ["cypress"]
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-kad-dht"
|
||||
version = "180be07b8303d536e39809bc39c58be5407fedd9"
|
||||
|
||||
[[groups.build.dependencies]]
|
||||
module = "github.com/libp2p/go-libp2p-xor"
|
||||
version = "df24f5b04bcbdc0059b27989163a6090f4f6dc7a"
|
||||
[groups.run]
|
||||
artifact = "909427826938.dkr.ecr.us-east-1.amazonaws.com/testground-us-east-1-dht:ca78473d669d"
|
||||
[groups.run.test_params]
|
||||
alpha = "6"
|
||||
beta = "3"
|
||||
bootstrapper = "true"
|
||||
bs_strategy = "7"
|
||||
bucket_size = "10"
|
||||
group_order = "0"
|
||||
latency = "100"
|
||||
timeout_secs = "600"
|
14
composer/fixtures/ping-pong-local.toml
Normal file
14
composer/fixtures/ping-pong-local.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[metadata]
|
||||
name = "ping-pong-local"
|
||||
author = "yusef"
|
||||
|
||||
[global]
|
||||
plan = "network"
|
||||
case = "ping-pong"
|
||||
total_instances = 2
|
||||
builder = "docker:go"
|
||||
runner = "local:docker"
|
||||
|
||||
[[groups]]
|
||||
id = "nodes"
|
||||
instances = { count = 2 }
|
6
composer/requirements.txt
Normal file
6
composer/requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
param
|
||||
toml
|
||||
jupyter
|
||||
panel
|
||||
holoviews
|
||||
ansi2html
|
Loading…
Reference in New Issue
Block a user