summaryrefslogtreecommitdiffstats
path: root/docs/development
diff options
context:
space:
mode:
Diffstat (limited to 'docs/development')
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt173
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt173
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPGbin0 -> 139518 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_s3p_logs.zipbin8364 -> 0 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_s3p_results.zipbin2245749 -> 0 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPGbin0 -> 171549 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPGbin0 -> 51328 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPGbin0 -> 72787 bytes
-rw-r--r--docs/development/devtools/apex-s3p.rst219
-rw-r--r--docs/development/devtools/clamp-s3p-results/Stability_after_stats.pngbin0 -> 24482 bytes
-rw-r--r--docs/development/devtools/clamp-s3p-results/Stability_before_stats.pngbin0 -> 24290 bytes
-rw-r--r--docs/development/devtools/clamp-s3p-results/cl-s3p-performance-result-jmeter.pngbin0 -> 62523 bytes
-rw-r--r--docs/development/devtools/clamp-s3p-results/controlloop_stability_jmeter.pngbin0 -> 81751 bytes
-rw-r--r--docs/development/devtools/clamp-s3p-results/controlloop_stability_table.pngbin0 -> 216697 bytes
-rw-r--r--docs/development/devtools/clamp-s3p.rst197
-rw-r--r--docs/development/devtools/devtools.rst1
-rw-r--r--docs/development/devtools/distribution-s3p.rst216
-rwxr-xr-xdocs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-monitor.pngbin50243 -> 0 bytes
-rwxr-xr-xdocs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-threads.pngbin68058 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex-s3p-vvm-sample.jpgbin147909 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_perf_jm_1.PNGbin91204 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_perf_jm_2.PNGbin222814 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_s3p_jm-1.pngbin39898 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_s3p_jm-2.pngbin166975 -> 0 bytes
-rw-r--r--docs/development/devtools/images/dist_perf_statistics.PNGbin48205 -> 0 bytes
-rw-r--r--docs/development/devtools/images/dist_perf_threshold.PNGbin26252 -> 0 bytes
-rw-r--r--docs/development/devtools/images/distribution/dist_stability_monitor.PNG (renamed from docs/development/devtools/images/dist_stability_monitor.PNG)bin81517 -> 81517 bytes
-rw-r--r--docs/development/devtools/images/distribution/dist_stability_statistics.PNG (renamed from docs/development/devtools/images/dist_stability_statistics.PNG)bin53205 -> 53205 bytes
-rw-r--r--docs/development/devtools/images/distribution/dist_stability_threads.PNG (renamed from docs/development/devtools/images/dist_stability_threads.PNG)bin77025 -> 77025 bytes
-rw-r--r--docs/development/devtools/images/distribution/dist_stability_threshold.PNG (renamed from docs/development/devtools/images/dist_stability_threshold.PNG)bin28238 -> 28238 bytes
-rw-r--r--docs/development/devtools/images/distribution/distribution-s3p-testplan.png (renamed from docs/development/devtools/images/distribution-s3p-testplan.png)bin53609 -> 53609 bytes
-rw-r--r--docs/development/devtools/images/distribution/distribution-s3p-vvm-sample.png (renamed from docs/development/devtools/images/distribution-s3p-vvm-sample.png)bin139259 -> 139259 bytes
-rw-r--r--docs/development/devtools/images/distribution/performance-monitor.pngbin0 -> 27349 bytes
-rw-r--r--docs/development/devtools/images/distribution/performance-statistics.pngbin0 -> 93384 bytes
-rwxr-xr-xdocs/development/devtools/images/distribution/performance-threads.pngbin0 -> 43635 bytes
-rw-r--r--docs/development/devtools/images/distribution/performance-threshold.pngbin0 -> 62947 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_jm-1.pngbin149789 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_jm-2.pngbin311321 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_vm-1.pngbin176069 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_vm-2.pngbin146018 -> 0 bytes
40 files changed, 805 insertions, 174 deletions
diff --git a/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt b/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt
new file mode 100644
index 00000000..38fe5054
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt
@@ -0,0 +1,173 @@
+# HELP jvm_info VM version info
+# TYPE jvm_info gauge
+jvm_info{runtime="OpenJDK Runtime Environment",vendor="Alpine",version="11.0.9+11-alpine-r1",} 1.0
+# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.
+# TYPE jvm_memory_objects_pending_finalization gauge
+jvm_memory_objects_pending_finalization 0.0
+# HELP jvm_memory_bytes_used Used bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_used gauge
+jvm_memory_bytes_used{area="heap",} 1.8570308E8
+jvm_memory_bytes_used{area="nonheap",} 1.1990352E8
+# HELP jvm_memory_bytes_committed Committed (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_committed gauge
+jvm_memory_bytes_committed{area="heap",} 2.55655936E8
+jvm_memory_bytes_committed{area="nonheap",} 1.65216256E8
+# HELP jvm_memory_bytes_max Max (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_max gauge
+jvm_memory_bytes_max{area="heap",} 4.064673792E9
+jvm_memory_bytes_max{area="nonheap",} -1.0
+# HELP jvm_memory_bytes_init Initial bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_init gauge
+jvm_memory_bytes_init{area="heap",} 2.64241152E8
+jvm_memory_bytes_init{area="nonheap",} 7667712.0
+# HELP jvm_memory_pool_bytes_used Used bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_used gauge
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-nmethods'",} 1357312.0
+jvm_memory_pool_bytes_used{pool="Metaspace",} 8.2427648E7
+jvm_memory_pool_bytes_used{pool="Tenured Gen",} 1.5998668E8
+jvm_memory_pool_bytes_used{pool="CodeHeap 'profiled nmethods'",} 4413696.0
+jvm_memory_pool_bytes_used{pool="Eden Space",} 2.1659816E7
+jvm_memory_pool_bytes_used{pool="Survivor Space",} 4056584.0
+jvm_memory_pool_bytes_used{pool="Compressed Class Space",} 8293152.0
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-profiled nmethods'",} 2.3411712E7
+# HELP jvm_memory_pool_bytes_committed Committed bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_committed gauge
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_committed{pool="Metaspace",} 9.4896128E7
+jvm_memory_pool_bytes_committed{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'profiled nmethods'",} 3.2309248E7
+jvm_memory_pool_bytes_committed{pool="Eden Space",} 7.0713344E7
+jvm_memory_pool_bytes_committed{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_committed{pool="Compressed Class Space",} 1.0223616E7
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-profiled nmethods'",} 2.523136E7
+# HELP jvm_memory_pool_bytes_max Max bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_max gauge
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-nmethods'",} 5828608.0
+jvm_memory_pool_bytes_max{pool="Metaspace",} -1.0
+jvm_memory_pool_bytes_max{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'profiled nmethods'",} 1.22912768E8
+jvm_memory_pool_bytes_max{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_bytes_max{pool="Survivor Space",} 1.40115968E8
+jvm_memory_pool_bytes_max{pool="Compressed Class Space",} 1.073741824E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-profiled nmethods'",} 1.22916864E8
+# HELP jvm_memory_pool_bytes_init Initial bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_init gauge
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Metaspace",} 0.0
+jvm_memory_pool_bytes_init{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_init{pool="CodeHeap 'profiled nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_bytes_init{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_init{pool="Compressed Class Space",} 0.0
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_used_bytes gauge
+jvm_memory_pool_collection_used_bytes{pool="Tenured Gen",} 1.00182296E8
+jvm_memory_pool_collection_used_bytes{pool="Eden Space",} 0.0
+jvm_memory_pool_collection_used_bytes{pool="Survivor Space",} 4056584.0
+# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_committed_bytes gauge
+jvm_memory_pool_collection_committed_bytes{pool="Tenured Gen",} 1.76357376E8
+jvm_memory_pool_collection_committed_bytes{pool="Eden Space",} 7.0713344E7
+jvm_memory_pool_collection_committed_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_max_bytes gauge
+jvm_memory_pool_collection_max_bytes{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_collection_max_bytes{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_collection_max_bytes{pool="Survivor Space",} 1.40115968E8
+# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_init_bytes gauge
+jvm_memory_pool_collection_init_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_init_bytes{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_collection_init_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_allocated_bytes_total Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_total counter
+jvm_memory_pool_allocated_bytes_total{pool="Eden Space",} 7.81369756592E11
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'profiled nmethods'",} 5.1833472E7
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-profiled nmethods'",} 3.6370048E7
+jvm_memory_pool_allocated_bytes_total{pool="Compressed Class Space",} 8986056.0
+jvm_memory_pool_allocated_bytes_total{pool="Metaspace",} 8.6383264E7
+jvm_memory_pool_allocated_bytes_total{pool="Tenured Gen",} 3.84983864E8
+jvm_memory_pool_allocated_bytes_total{pool="Survivor Space",} 1.506631592E9
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-nmethods'",} 1439232.0
+# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.
+# TYPE jvm_gc_collection_seconds summary
+jvm_gc_collection_seconds_count{gc="Copy",} 11053.0
+jvm_gc_collection_seconds_sum{gc="Copy",} 90.383
+jvm_gc_collection_seconds_count{gc="MarkSweepCompact",} 6.0
+jvm_gc_collection_seconds_sum{gc="MarkSweepCompact",} 1.44
+# HELP jvm_threads_current Current thread count of a JVM
+# TYPE jvm_threads_current gauge
+jvm_threads_current 31.0
+# HELP jvm_threads_daemon Daemon thread count of a JVM
+# TYPE jvm_threads_daemon gauge
+jvm_threads_daemon 16.0
+# HELP jvm_threads_peak Peak thread count of a JVM
+# TYPE jvm_threads_peak gauge
+jvm_threads_peak 80.0
+# HELP jvm_threads_started_total Started thread count of a JVM
+# TYPE jvm_threads_started_total counter
+jvm_threads_started_total 320599.0
+# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers
+# TYPE jvm_threads_deadlocked gauge
+jvm_threads_deadlocked 0.0
+# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors
+# TYPE jvm_threads_deadlocked_monitor gauge
+jvm_threads_deadlocked_monitor 0.0
+# HELP jvm_threads_state Current count of threads by state
+# TYPE jvm_threads_state gauge
+jvm_threads_state{state="BLOCKED",} 0.0
+jvm_threads_state{state="WAITING",} 13.0
+jvm_threads_state{state="RUNNABLE",} 7.0
+jvm_threads_state{state="TIMED_WAITING",} 11.0
+jvm_threads_state{state="TERMINATED",} 0.0
+jvm_threads_state{state="NEW",} 0.0
+# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_bytes gauge
+jvm_buffer_pool_used_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_used_bytes{pool="direct",} 3.385029E7
+# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_capacity_bytes gauge
+jvm_buffer_pool_capacity_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_capacity_bytes{pool="direct",} 3.3850289E7
+# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_buffers gauge
+jvm_buffer_pool_used_buffers{pool="mapped",} 0.0
+jvm_buffer_pool_used_buffers{pool="direct",} 16.0
+# HELP jvm_classes_loaded The number of classes that are currently loaded in the JVM
+# TYPE jvm_classes_loaded gauge
+jvm_classes_loaded 11238.0
+# HELP jvm_classes_loaded_total The total number of classes that have been loaded since the JVM has started execution
+# TYPE jvm_classes_loaded_total counter
+jvm_classes_loaded_total 12239.0
+# HELP jvm_classes_unloaded_total The total number of classes that have been unloaded since the JVM has started execution
+# TYPE jvm_classes_unloaded_total counter
+jvm_classes_unloaded_total 1001.0
+# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
+# TYPE process_cpu_seconds_total counter
+process_cpu_seconds_total 9308.0
+# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.
+# TYPE process_start_time_seconds gauge
+process_start_time_seconds 1.633111663176E9
+# HELP process_open_fds Number of open file descriptors.
+# TYPE process_open_fds gauge
+process_open_fds 354.0
+# HELP process_max_fds Maximum number of open file descriptors.
+# TYPE process_max_fds gauge
+process_max_fds 1048576.0
+# HELP process_virtual_memory_bytes Virtual memory size in bytes.
+# TYPE process_virtual_memory_bytes gauge
+process_virtual_memory_bytes 5.933633536E9
+# HELP process_resident_memory_bytes Resident memory size in bytes.
+# TYPE process_resident_memory_bytes gauge
+process_resident_memory_bytes 5.2523008E8
+# HELP jvm_memory_pool_allocated_bytes_created Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_created gauge
+jvm_memory_pool_allocated_bytes_created{pool="Eden Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'profiled nmethods'",} 1.633111669942E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-profiled nmethods'",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Compressed Class Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Metaspace",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Tenured Gen",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Survivor Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-nmethods'",} 1.633111669968E9
diff --git a/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt b/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt
new file mode 100644
index 00000000..551bd824
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt
@@ -0,0 +1,173 @@
+# HELP jvm_info VM version info
+# TYPE jvm_info gauge
+jvm_info{runtime="OpenJDK Runtime Environment",vendor="Alpine",version="11.0.9+11-alpine-r1",} 1.0
+# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.
+# TYPE jvm_memory_objects_pending_finalization gauge
+jvm_memory_objects_pending_finalization 0.0
+# HELP jvm_memory_bytes_used Used bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_used gauge
+jvm_memory_bytes_used{area="heap",} 4.7930528E7
+jvm_memory_bytes_used{area="nonheap",} 5.3899512E7
+# HELP jvm_memory_bytes_committed Committed (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_committed gauge
+jvm_memory_bytes_committed{area="heap",} 2.555904E8
+jvm_memory_bytes_committed{area="nonheap",} 5.8589184E7
+# HELP jvm_memory_bytes_max Max (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_max gauge
+jvm_memory_bytes_max{area="heap",} 4.064673792E9
+jvm_memory_bytes_max{area="nonheap",} -1.0
+# HELP jvm_memory_bytes_init Initial bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_init gauge
+jvm_memory_bytes_init{area="heap",} 2.64241152E8
+jvm_memory_bytes_init{area="nonheap",} 7667712.0
+# HELP jvm_memory_pool_bytes_used Used bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_used gauge
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-nmethods'",} 1271680.0
+jvm_memory_pool_bytes_used{pool="Metaspace",} 4.0208712E7
+jvm_memory_pool_bytes_used{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_bytes_used{pool="CodeHeap 'profiled nmethods'",} 5948416.0
+jvm_memory_pool_bytes_used{pool="Eden Space",} 1.780424E7
+jvm_memory_pool_bytes_used{pool="Survivor Space",} 1949992.0
+jvm_memory_pool_bytes_used{pool="Compressed Class Space",} 4855216.0
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-profiled nmethods'",} 1615488.0
+# HELP jvm_memory_pool_bytes_committed Committed bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_committed gauge
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_committed{pool="Metaspace",} 4.194304E7
+jvm_memory_pool_bytes_committed{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'profiled nmethods'",} 6029312.0
+jvm_memory_pool_bytes_committed{pool="Eden Space",} 7.0647808E7
+jvm_memory_pool_bytes_committed{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_committed{pool="Compressed Class Space",} 5505024.0
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_bytes_max Max bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_max gauge
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-nmethods'",} 5828608.0
+jvm_memory_pool_bytes_max{pool="Metaspace",} -1.0
+jvm_memory_pool_bytes_max{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'profiled nmethods'",} 1.22912768E8
+jvm_memory_pool_bytes_max{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_bytes_max{pool="Survivor Space",} 1.40115968E8
+jvm_memory_pool_bytes_max{pool="Compressed Class Space",} 1.073741824E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-profiled nmethods'",} 1.22916864E8
+# HELP jvm_memory_pool_bytes_init Initial bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_init gauge
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Metaspace",} 0.0
+jvm_memory_pool_bytes_init{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_init{pool="CodeHeap 'profiled nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_bytes_init{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_init{pool="Compressed Class Space",} 0.0
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_used_bytes gauge
+jvm_memory_pool_collection_used_bytes{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_collection_used_bytes{pool="Eden Space",} 0.0
+jvm_memory_pool_collection_used_bytes{pool="Survivor Space",} 1949992.0
+# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_committed_bytes gauge
+jvm_memory_pool_collection_committed_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_committed_bytes{pool="Eden Space",} 7.0647808E7
+jvm_memory_pool_collection_committed_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_max_bytes gauge
+jvm_memory_pool_collection_max_bytes{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_collection_max_bytes{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_collection_max_bytes{pool="Survivor Space",} 1.40115968E8
+# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_init_bytes gauge
+jvm_memory_pool_collection_init_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_init_bytes{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_collection_init_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_allocated_bytes_total Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_total counter
+jvm_memory_pool_allocated_bytes_total{pool="Eden Space",} 1.62790696E8
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'profiled nmethods'",} 5150080.0
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-profiled nmethods'",} 1428864.0
+jvm_memory_pool_allocated_bytes_total{pool="Compressed Class Space",} 4785752.0
+jvm_memory_pool_allocated_bytes_total{pool="Metaspace",} 3.9463568E7
+jvm_memory_pool_allocated_bytes_total{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_allocated_bytes_total{pool="Survivor Space",} 5422680.0
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-nmethods'",} 1271680.0
+# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.
+# TYPE jvm_gc_collection_seconds summary
+jvm_gc_collection_seconds_count{gc="Copy",} 3.0
+jvm_gc_collection_seconds_sum{gc="Copy",} 0.076
+jvm_gc_collection_seconds_count{gc="MarkSweepCompact",} 2.0
+jvm_gc_collection_seconds_sum{gc="MarkSweepCompact",} 0.126
+# HELP jvm_threads_current Current thread count of a JVM
+# TYPE jvm_threads_current gauge
+jvm_threads_current 31.0
+# HELP jvm_threads_daemon Daemon thread count of a JVM
+# TYPE jvm_threads_daemon gauge
+jvm_threads_daemon 16.0
+# HELP jvm_threads_peak Peak thread count of a JVM
+# TYPE jvm_threads_peak gauge
+jvm_threads_peak 31.0
+# HELP jvm_threads_started_total Started thread count of a JVM
+# TYPE jvm_threads_started_total counter
+jvm_threads_started_total 32.0
+# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers
+# TYPE jvm_threads_deadlocked gauge
+jvm_threads_deadlocked 0.0
+# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors
+# TYPE jvm_threads_deadlocked_monitor gauge
+jvm_threads_deadlocked_monitor 0.0
+# HELP jvm_threads_state Current count of threads by state
+# TYPE jvm_threads_state gauge
+jvm_threads_state{state="BLOCKED",} 0.0
+jvm_threads_state{state="WAITING",} 13.0
+jvm_threads_state{state="RUNNABLE",} 7.0
+jvm_threads_state{state="TIMED_WAITING",} 11.0
+jvm_threads_state{state="TERMINATED",} 0.0
+jvm_threads_state{state="NEW",} 0.0
+# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_bytes gauge
+jvm_buffer_pool_used_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_used_bytes{pool="direct",} 187392.0
+# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_capacity_bytes gauge
+jvm_buffer_pool_capacity_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_capacity_bytes{pool="direct",} 187392.0
+# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_buffers gauge
+jvm_buffer_pool_used_buffers{pool="mapped",} 0.0
+jvm_buffer_pool_used_buffers{pool="direct",} 8.0
+# HELP jvm_classes_loaded The number of classes that are currently loaded in the JVM
+# TYPE jvm_classes_loaded gauge
+jvm_classes_loaded 7287.0
+# HELP jvm_classes_loaded_total The total number of classes that have been loaded since the JVM has started execution
+# TYPE jvm_classes_loaded_total counter
+jvm_classes_loaded_total 7287.0
+# HELP jvm_classes_unloaded_total The total number of classes that have been unloaded since the JVM has started execution
+# TYPE jvm_classes_unloaded_total counter
+jvm_classes_unloaded_total 0.0
+# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
+# TYPE process_cpu_seconds_total counter
+process_cpu_seconds_total 19.23
+# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.
+# TYPE process_start_time_seconds gauge
+process_start_time_seconds 1.633111663176E9
+# HELP process_open_fds Number of open file descriptors.
+# TYPE process_open_fds gauge
+process_open_fds 350.0
+# HELP process_max_fds Maximum number of open file descriptors.
+# TYPE process_max_fds gauge
+process_max_fds 1048576.0
+# HELP process_virtual_memory_bytes Virtual memory size in bytes.
+# TYPE process_virtual_memory_bytes gauge
+process_virtual_memory_bytes 5.825941504E9
+# HELP process_resident_memory_bytes Resident memory size in bytes.
+# TYPE process_resident_memory_bytes gauge
+process_resident_memory_bytes 2.13909504E8
+# HELP jvm_memory_pool_allocated_bytes_created Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_created gauge
+jvm_memory_pool_allocated_bytes_created{pool="Eden Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'profiled nmethods'",} 1.633111669942E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-profiled nmethods'",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Compressed Class Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Metaspace",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Tenured Gen",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Survivor Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-nmethods'",} 1.633111669968E9
diff --git a/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG b/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG
new file mode 100644
index 00000000..89447874
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip b/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip
deleted file mode 100644
index cdeca387..00000000
--- a/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip b/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip
deleted file mode 100644
index a4f268e5..00000000
--- a/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG b/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG
new file mode 100644
index 00000000..1cdabb7e
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG b/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG
new file mode 100644
index 00000000..e51b942a
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG b/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG
new file mode 100644
index 00000000..cc2da54f
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p.rst b/docs/development/devtools/apex-s3p.rst
index f5c0e7cf..bfed24e0 100644
--- a/docs/development/devtools/apex-s3p.rst
+++ b/docs/development/devtools/apex-s3p.rst
@@ -10,143 +10,200 @@
Policy APEX PDP component
~~~~~~~~~~~~~~~~~~~~~~~~~
-Setting up Stability Tests in APEX
-++++++++++++++++++++++++++++++++++
+Both the Stability and the Performance tests were executed in a full ONAP OOM deployment in Nordix lab.
-Introduction
-------------
-The 72 hour Stability Test for apex-pdp has the goal of introducing a steady flow of transactions initiated from a test client server running JMeter. The pdp is configured to start a rest server inside it and take input from rest clients (JMeter) and send back output to the rest clients (JMeter).
+Setup Details
++++++++++++++
-The input events will be submitted through rest interface of apex-pdp and the results are verified using the rest responses coming out from apex-pdp.
+- APEX-PDP along with all policy components deployed as part of a full ONAP OOM deployment.
+- Policy-models-simulator is deployed to use CDS and DMaaP simulators during policy execution.
+ Simulator configurations used are available in apex-pdp repository:
+ testsuites/apex-pdp-stability/src/main/resources/simulatorConfig/
+- Two APEX policies are executed in the APEX-PDP engine, and are triggered by multiple threads during the tests.
+- Both tests were run via jMeter.
-The test will be performed in a multi-threaded environment where 5 threads running in JMeter will keep sending events to apex-pdp for the duration of 72 hours.
+ Stability test script is available in apex-pdp repository:
+ testsuites/apex-pdp-stability/src/main/resources/apexPdpStabilityTestPlan.jmx
-Setup details
--------------
+ Performance test script is available in apex-pdp repository:
+ testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx
-Stability test is performed on VM's running in OpenStack cloud environment. APEX-PDP along with other components in Policy Framework is deployed in an OOM ONAP installation. JMeter runs on a separate VM to simulate a steady flow of transactions.
+.. Note::
+ Policy executions are validated in a more strict fashion during the tests.
+ There are test cases where upto 80 events are expected on the DMaaP topic.
+ DMaaP simulator is used to keep it simple and avoid any message pickup timing related issues.
+Stability Test of APEX-PDP
+++++++++++++++++++++++++++
Test Plan
---------
-The 72 hours stability test will run the following steps in a 5 threaded loop.
+The 72 hours stability test ran the following steps.
-Setup stage (these calls run only once - at the beginning)
-- **Create Policy** - creates a policy using the policy/api component
-- **Deploy Policy** - deploys the policy in the existing PdpGroup
-Test stage (these calls run over and over again)
-- **Check Health** - checks the health status of apex
-- **Send Input Event** - triggers 'unauthenticated.DCAE_CL_OUTPUT' event of DMaaP.
-- **Get Output Event Response** - checks for the triggered output event.
-Teardown stage (this calls run only once - at the end)
-- **Undeploy Policy** - undeploys the policy from PdpGroup
-- **Delete Policy** - deletes the policy using the policy/api component
+Setup Phase
+"""""""""""
-The following steps can be used to configure the parameters of the test plan.
+Policies are created and deployed to APEX-PDP during this phase. Only one thread is in action and this step is done only once.
-- **HTTP Header Manager** - used to store headers which will be used for making HTTP requests.
-- **HTTP Request Defaults** - used to store HTTP request details like Server Name or IP, Port, Protocol etc.
-- **User Defined Variables** - used to store the following user defined parameters:
+- **Create Policy onap.policies.apex.Simplecontrolloop** - creates the first APEX policy using policy/api component.
+ This is a sample policy used for PNF testing.
+- **Create Policy onap.policies.apex.Example** - creates the second APEX policy using policy/api component.
+ This is a sample policy used for VNF testing.
+- **Deploy Policies** - Deploy both the policies created to APEX-PDP using policy/pap component
-================== ============================================================================ ============================
-**Name** **Description** **Default Value**
-================== ============================================================================ ============================
-wait Wait time after each request (in milliseconds) 10000
-threads Number of threads to run test cases in parallel. 5
-threadsTimeOutInMs Synchronization timer for threads running in parallel (in milliseconds). 5000
-PAP_PORT Port number of PAP for making REST API calls
-API_PORT Port number of API for making REST API calls
-APEX_PORT Port number of APEX for making REST API calls
-DMAAP_PORT Port number of DMAAP for making REST API calls
-HOSTNAME Server IP address for making REST API calls
-================== ============================================================================ ============================
+Main Phase
+""""""""""
+Once the policies are created and deployed to APEX-PDP by the setup thread, five threads execute the below tests for 72 hours.
-Download and update the jmx file presented in the apex-pdp git repository - `jmx file path <https://gerrit.onap.org/r/gitweb?p=policy/apex-pdp.git;a=tree;f=testsuites/apex-pdp-stability/src/main/resources;h=99d373033a190a690d4e05012bc3a656cae7bc3f;hb=refs/heads/master>`_.
+- **Healthcheck** - checks the health status of APEX-PDP
+- **Prometheus Metrics** - checks that APEX-PDP is exposing prometheus metrics
+- **Test Simplecontrolloop policy success case** - Send a trigger event to *unauthenticated.DCAE_CL_OUTPUT* DMaaP topic.
+ If the policy execution is successful, 3 different notification events are sent to *APEX-CL-MGT* topic by each one of the 5 threads.
+ So, it is checked if 15 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Simplecontrolloop policy failure case** - Send a trigger event with invalid pnfName to *unauthenticated.DCAE_CL_OUTPUT* DMaaP topic.
+ The policy execution is expected to fail due to AAI failure response. 2 notification events are expected on *APEX-CL-MGT* topic by a thread in this case.
+ It is checked if 10 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Example policy success case** - Send a trigger event to *unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT* DMaaP topic.
+ If the policy execution is successful, 4 different notification events are sent to *APEX-CL-MGT* topic by each one of the 5 threads.
+ So, it is checked if 20 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Example policy failure case** - Send a trigger event with invalid vnfName to *unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT* DMaaP topic.
+ The policy execution is expected to fail due to AAI failure response. 2 notification events are expected on *APEX-CL-MGT* topic by a thread in this case.
+ So, it is checked if 10 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Clean up DMaaP notification topic** - DMaaP notification topic which is *APEX-CL-MGT* is cleaned up after each test to make sure that one failure doesn't lead to cascading errors.
-- ThreadGroup.duration - Set the duration to 259200 seconds (72 hours)
-Use the CLI mode to start the test
+Teardown Phase
+""""""""""""""
-.. code-block:: bash
+Policies are undeployed from APEX-PDP and deleted during this phase.
+Only one thread is in action and this step is done only once after the Main phase is complete.
- nohup ./jmeter.sh -n -t ~/apexPdpStabilityTestPlan.jmx -Jusers=1 -l ~/stability.log
+- **Undeploy Policies** - Undeploy both the policies from APEX-PDP using policy/pap component
+- **Delete Policy onap.policies.apex.Simplecontrolloop** - delete the first APEX policy using policy/api component.
+- **Delete Policy onap.policies.apex.Example** - delete the second APEX policy also using policy/api component.
-Stability Test Results
-----------------------
+The following steps can be used to configure the parameters of test plan.
-The stability test plan was triggered for 72 hours, injecting input events to apex-pdp pod from 5 client threads running in JMeter.
+- **HTTP Authorization Manager** - used to store user/password authentication details.
+- **HTTP Header Manager** - used to store headers which will be used for making HTTP requests.
+- **User Defined Variables** - used to store following user defined parameters.
+
+=================== ===============================================================================
+ **Name** **Description**
+=================== ===============================================================================
+ HOSTNAME IP Address or host name to access the components
+ PAP_PORT Port number of PAP for making REST API calls such as deploy/undeploy of policy
+ API_PORT Port number of API for making REST API calls such as create/ delete of policy
+ APEX_PORT Port number of APEX for making REST API calls such as healthcheck/metrics
+ wait Wait time if required after a request (in milliseconds)
+ threads Number of threads to run test cases in parallel
+ threadsTimeOutInMs Synchronization timer for threads running in parallel (in milliseconds)
+=================== ================================================================================
-The stability tests were executed as part of a full ONAP OOM deployment in Nordix lab.
+Run Test
+--------
-Once the tests complete, we can generate an HTML test report via the command:
+The test was run in the background via "nohup", to prevent it from being interrupted:
.. code-block:: bash
- ~/jMeter/apache-jmeter-5.2.1/bin/jmeter -g stability.log -o ./result/
+ nohup ./apache-jmeter-5.4.1/bin/jmeter.sh -n -t apexPdpStabilityTestPlan.jmx -l stabilityTestResults.jtl
+
+Test Results
+------------
+
+**Summary**
-============================================== ================================ ============= ============ ============================
-**Number of Client Threads running in JMeter** **Total number of input events** **Success %** **Error %** **Average Time per Request**
-============================================== ================================ ============= ============ ============================
-5 129326 100% 0% 6716.12
-============================================== ================================ ============= ============ ============================
+Stability test plan was triggered for 72 hours. There were no failures during the 72 hours test.
+**Test Statistics**
+
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+428661 100 % 0.00 % 162 ms
+======================= ================= ================== ==================================
+
+.. Note::
+
+ There were no failures during the 72 hours test.
+
**JMeter Screenshot**
-.. image:: images/apex_s3p_jm-1.png
-.. image:: images/apex_s3p_jm-2.png
+.. image:: apex-s3p-results/apex_stability_jmeter_results.JPG
-:download:`result.zip <apex-s3p-results/apex_s3p_results.zip>`
+**Memory and CPU usage**
-Setting up Performance Tests in APEX
-++++++++++++++++++++++++++++++++++++
+The memory and CPU usage can be monitored by running "top" command in the APEX-PDP pod.
+A snapshot is taken before and after test execution to monitor the changes in resource utilization.
+Prometheus metrics is also collected before and after the test execution.
-The Performance test is performed on a similar setup to the Stability test. JMeter will send a large number of REST requests and will then retrieve those requests.
+Memory and CPU usage before test execution:
-Performance test plan will be the same as the stability test plan except for some differences listed below:
+.. image:: apex-s3p-results/apex_top_before_72h.JPG
-- Increase the number of threads from 5 to 20.
-- Reduce test time to ninety minutes. (ThreadGroup.duration - Set the duration to 5400 seconds)
+:download:`Prometheus metrics before 72h test <apex-s3p-results/apex_metrics_before_72h.txt>`
-Download and update the jmx file presented in the apex-pdp git repository - `jmx file path <https://gerrit.onap.org/r/gitweb?p=policy/apex-pdp.git;a=tree;f=testsuites/performance/performance-benchmark-test/src/main/resources;h=b0ed1058b11f82b42fb5be1a07009114e1e8b593;hb=refs/heads/master>`_.
+Memory and CPU usage after test execution:
+.. image:: apex-s3p-results/apex_top_after_72h.JPG
-Run Test
---------
+:download:`Prometheus metrics after 72h test <apex-s3p-results/apex_metrics_after_72h.txt>`
-Running the performance test will be the same as the stability test. That is, launch JMeter pointing to corresponding *.jmx* test plan. The *API_HOST* , *API_PORT* , *PAP_HOST* , *PAP_PORT* are already set up in *.jmx*.
+Performance Test of APEX-PDP
+++++++++++++++++++++++++++++
+
+Introduction
+------------
+
+Performance test of APEX-PDP is done similar to the stability test, but in a more extreme manner using higher thread count.
+
+Setup Details
+-------------
+
+The performance test is performed on a similar setup as Stability test.
-.. code-block:: bash
- nohup ./jmeter.sh -n -t ~/performance.jmx -Jusers=1 -l ~/perf.log
+Test Plan
+---------
+
+Performance test plan is the same as the stability test plan above except for the few differences listed below.
-Once the tests have completed, run the following the gather results.
+- Increase the number of threads used in the Main Phase from 5 to 20.
+- Reduce the test time to 2 hours.
+
+Run Test
+--------
.. code-block:: bash
- ~/jMeter/apache-jmeter-5.2.1/bin/jmeter -g perf.log -o ./performance_result/
+ nohup ./apache-jmeter-5.4.1/bin/jmeter.sh -n -t apexPdpPerformanceTestPlan.jmx -l perftestresults.jtl
-Performance Test Result
------------------------
-**Summary**
+Test Results
+------------
-Performance test was triggered for 90 minutes. The results are shown below.
+Test results are shown as below.
**Test Statistics**
-============================ =========== ========= ==================================
-**Total Number of Requests** **Success** **Error** **Average Time Taken per Request**
-============================ =========== ========= ==================================
-32304 99,99 % 0.01 % 8746.50 ms
-============================ =========== ========= ==================================
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+46946 100 % 0.00 % 198 ms
+======================= ================= ================== ==================================
**JMeter Screenshot**
-.. image:: images/apex_perf_jm_1.PNG
+.. image:: apex-s3p-results/apex_perf_jmeter_results.JPG
+
+Summary
++++++++
-.. image:: images/apex_perf_jm_2.PNG
+Multiple policies were executed in a multi threaded fashion for both stability and performance tests.
+Both tests ran smoothly without any issues.
diff --git a/docs/development/devtools/clamp-s3p-results/Stability_after_stats.png b/docs/development/devtools/clamp-s3p-results/Stability_after_stats.png
new file mode 100644
index 00000000..8370f0df
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p-results/Stability_after_stats.png
Binary files differ
diff --git a/docs/development/devtools/clamp-s3p-results/Stability_before_stats.png b/docs/development/devtools/clamp-s3p-results/Stability_before_stats.png
new file mode 100644
index 00000000..46eaddf6
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p-results/Stability_before_stats.png
Binary files differ
diff --git a/docs/development/devtools/clamp-s3p-results/cl-s3p-performance-result-jmeter.png b/docs/development/devtools/clamp-s3p-results/cl-s3p-performance-result-jmeter.png
new file mode 100644
index 00000000..30fc4bba
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p-results/cl-s3p-performance-result-jmeter.png
Binary files differ
diff --git a/docs/development/devtools/clamp-s3p-results/controlloop_stability_jmeter.png b/docs/development/devtools/clamp-s3p-results/controlloop_stability_jmeter.png
new file mode 100644
index 00000000..058b98ae
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p-results/controlloop_stability_jmeter.png
Binary files differ
diff --git a/docs/development/devtools/clamp-s3p-results/controlloop_stability_table.png b/docs/development/devtools/clamp-s3p-results/controlloop_stability_table.png
new file mode 100644
index 00000000..0289a289
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p-results/controlloop_stability_table.png
Binary files differ
diff --git a/docs/development/devtools/clamp-s3p.rst b/docs/development/devtools/clamp-s3p.rst
new file mode 100644
index 00000000..e01848da
--- /dev/null
+++ b/docs/development/devtools/clamp-s3p.rst
@@ -0,0 +1,197 @@
+.. This work is licensed under a
+.. Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _controlloop-s3p-label:
+
+.. toctree::
+ :maxdepth: 2
+
+Policy Clamp Controlloop
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+Both the Performance and the Stability tests were executed by performing requests
+against controlloop components installed as docker images in local environment.
+
+Setup Details
++++++++++++++
+
+- Controlloop runtime component docker image is started and running.
+- Participant docker images policy-clamp-cl-pf-ppnt, policy-clamp-cl-http-ppnt, policy-clamp-cl-k8s-ppnt are started and running.
+- Dmaap simulator for communication between components.
+- mariadb docker container for policy and controlloop database.
+- policy-api for communication between policy participant and policy-framework
+- Both tests were run via jMeter, which was installed on a separate VM.
+
+Stability Test of Controlloop components
+++++++++++++++++++++++++++++++++++++++++
+
+Test Plan
+---------
+The 72 hours stability test ran the following steps sequentially in a single threaded loop.
+
+- **Create Policy defaultDomain** - creates an operational policy using policy/api component
+- **Delete Policy sampleDomain** - deletes the operational policy sampleDomain using policy/api component
+- **Commission Contorlloop definition** - commissions the controlloop definition in runtime
+- **Instantiate controlloop** - Instantiate the controlloop towards participants
+- **Check controlloop state** - check the current state of controlloop
+- **Change State to PASSIVE** - change the state of the controlloop to PASSIVE
+- **Check controlloop state** - check the current state of controlloop
+- **Change State to UNINITIALISED** - change the state of the controloop to UNINITIALISED
+- **Check controlloop state** - check the current state of controlloop
+- **Delete instantiated controlloop** - delete the instantiated controlloop from all participants
+- **Delete ControlLoop Definition** - delete the controlloop definition on runtime
+
+The following steps can be used to configure the parameters of test plan.
+
+- **HTTP Authorization Manager** - used to store user/password authentication details.
+- **HTTP Header Manager** - used to store headers which will be used for making HTTP requests.
+- **User Defined Variables** - used to store following user defined parameters.
+
+=========== ===================================================================
+ **Name** **Description**
+=========== ===================================================================
+ RUNTIME_HOST IP Address or host name of controlloop runtime component
+ RUNTIME_PORT Port number of controlloop runtime components for making REST API calls
+ POLICY_PARTICIPANT_HOST IP Address or host name of policy participant
+ POLICY_PARTICIPANT_HOST_PORT Port number of policy participant
+=========== ===================================================================
+
+The test was run in the background via "nohup", to prevent it from being interrupted:
+
+.. code-block:: bash
+
+ nohup ./jMeter/apache-jmeter-5.2.1/bin/jmeter -n -t stability.jmx -l testresults.jtl
+
+Test Results
+------------
+
+**Summary**
+
+Stability test plan was triggered for 72 hours.
+
+.. Note::
+
+ .. container:: paragraph
+
+ The assertions of state changes are not completely taken care of, as the stability is ran with controlloop componenets
+ alone, and not including complete policy framework deployment, which makes it difficult for actual state changes from
+ PASSIVE to RUNNING etc to happen.
+
+**Test Statistics**
+
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+99992 100.00 % 0.00 % 192 ms
+======================= ================= ================== ==================================
+
+**Controloop component Setup**
+
+================ ======================= ================== ==========================
+**CONTAINER ID** **IMAGE** **PORTS** **NAMES**
+================ ======================= ================== ================================== ==========================
+ a9cb0cd103cf onap/policy-clamp-cl-runtime:latest 6969/tcp policy-clamp-cl-runtime
+ 886e572b8438 onap/policy-clamp-cl-pf-ppnt:latest 6973/tcp policy-clamp-cl-pf-ppnt
+ 035707b1b95f nexus3.onap.org:10001/onap/policy-api:latest 6969/tcp policy-api
+ d34204f95ff3 onap/policy-clamp-cl-http-ppnt:latest 6971/tcp policy-clamp-cl-http-ppnt
+ 4470e608c9a8 onap/policy-clamp-cl-k8s-ppnt:latest 6972/tcp, 8083/tcp policy-clamp-cl-k8s-ppnt
+ 62229d46b79c nexus3.onap.org:10001/onap/policy-models-simulator:latest 3905/tcp, 6666/tcp, 6668-6670/tcp, 6680/tcp simulator
+ efaf0ca5e1f0 nexus3.onap.org:10001/mariadb:10.5.8 3306/tcp mariadb
+======================= ================= ================== ====================================== ===========================
+
+.. Note::
+
+ .. container:: paragraph
+
+ There were no failures during the 72 hours test.
+
+**JMeter Screenshot**
+
+.. image:: clamp-s3p-results/controlloop_stability_jmeter.PNG
+
+**JMeter Screenshot**
+
+.. image:: clamp-s3p-results/controlloop_stability_table.PNG
+
+**Memory and CPU usage**
+
+The memory and CPU usage can be monitored by running "docker stats" command. A snapshot is taken before and after test execution to monitor the changes in resource utilization.
+
+Memory and CPU usage before test execution:
+
+.. image:: clamp-s3p-results/Stability_before_stats.PNG
+
+Memory and CPU usage after test execution:
+
+.. image:: clamp-s3p-results/Stability_after_stats.PNG
+
+
+Performance Test of Controlloop components
+++++++++++++++++++++++++++++++++++++++++++
+
+Introduction
+------------
+
+Performance test of Controlloop components has the goal of testing the min/avg/max processing time and rest call throughput for all the requests with multiple requests at the same time.
+
+Setup Details
+-------------
+
+The performance test is performed on a similar setup as Stability test. The JMeter VM will be sending a large number of REST requests to the runtime component and collecting the statistics.
+
+
+Test Plan
+---------
+
+Performance test plan is the same as the stability test plan above except for the few differences listed below.
+
+- Increase the number of threads up to 5 (simulating 5 users' behaviours at the same time).
+- Reduce the test time to 2 hours.
+
+Run Test
+--------
+
+Running/Triggering the performance test will be the same as the stability test. That is, launch JMeter pointing to corresponding *.jmx* test plan. The *RUNTIME_HOST*, *RUNTIME_PORT*, *POLICY_PARTICIPANT_HOST*, *POLICY_PARTICIPANT_HOST_PORT* are already set up in *.jmx*
+
+.. code-block:: bash
+
+ nohup ./jMeter/apache-jmeter-5.2.1/bin/jmeter -n -t performance.jmx -l testresults.jtl
+
+Once the test execution is completed, execute the below script to get the statistics:
+
+.. code-block:: bash
+
+ $ cd ./clamp/testsuites/performance/src/main/resources/testplans
+ $ ./results.sh resultTree.log
+
+Test Results
+------------
+
+Test results are shown as below.
+
+**Test Statistics**
+
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+13809 100 % 0.00 % 206 ms
+======================= ================= ================== ==================================
+
+**Controloop component Setup**
+
+================ ======================= ================== ==========================
+**CONTAINER ID** **IMAGE** **PORTS** **NAMES**
+================ ======================= ================== ================================== ==========================
+ a9cb0cd103cf onap/policy-clamp-cl-runtime:latest 6969/tcp policy-clamp-cl-runtime
+ 886e572b8438 onap/policy-clamp-cl-pf-ppnt:latest 6973/tcp policy-clamp-cl-pf-ppnt
+ 035707b1b95f nexus3.onap.org:10001/onap/policy-api:latest 6969/tcp policy-api
+ d34204f95ff3 onap/policy-clamp-cl-http-ppnt:latest 6971/tcp policy-clamp-cl-http-ppnt
+ 4470e608c9a8 onap/policy-clamp-cl-k8s-ppnt:latest 6972/tcp, 8083/tcp policy-clamp-cl-k8s-ppnt
+ 62229d46b79c nexus3.onap.org:10001/onap/policy-models-simulator:latest 3905/tcp, 6666/tcp, 6668-6670/tcp, 6680/tcp simulator
+ efaf0ca5e1f0 nexus3.onap.org:10001/mariadb:10.5.8 3306/tcp mariadb
+======================= ================= ================== ====================================== ===========================
+
+**JMeter Screenshot**
+
+.. image:: clamp-s3p-results/cl-s3p-performance-result-jmeter.PNG
diff --git a/docs/development/devtools/devtools.rst b/docs/development/devtools/devtools.rst
index 1a001b02..0cf11a4c 100644
--- a/docs/development/devtools/devtools.rst
+++ b/docs/development/devtools/devtools.rst
@@ -312,6 +312,7 @@ familiar with the Policy Framework components and test any local changes.
drools-s3p.rst
xacml-s3p.rst
distribution-s3p.rst
+ clamp-s3p.rst
Running the Pairwise Tests
***********************
diff --git a/docs/development/devtools/distribution-s3p.rst b/docs/development/devtools/distribution-s3p.rst
index 13c47924..015e10bb 100644
--- a/docs/development/devtools/distribution-s3p.rst
+++ b/docs/development/devtools/distribution-s3p.rst
@@ -13,7 +13,10 @@ Policy Distribution component
VM Details
----------
-The stability and performance tests are performed on VM's running in the OpenStack cloud environment in the ONAP integration lab. There are two separate VMs, one for running backend policy services which policy distribution needs, and the other for the policy distribution service itself and Jmeter.
+The stability and performance tests are performed on VM's running in the OpenStack cloud
+environment in the ONAP integration lab. There are two separate VMs, one for running backend policy
+services which policy distribution needs, and the other for the policy distribution service itself
+and Jmeter.
**OpenStack environment details**
@@ -28,20 +31,9 @@ The stability and performance tests are performed on VM's running in the OpenSta
- Docker version 19.03.8, build afacb8b7f0
- Java: openjdk 11.0.8 2020-07-14
-**JMeter and Distribution VM details (VM2)**
-- OS: Ubuntu 18.04.5 LTS
-- CPU: 8 core, Intel Xeon E3-12xx v2 (Ivy Bridge), 2693.668 MHz, 16384 kB cache
-- RAM: 32 GB
-- HardDisk: 200 GB
-- Docker version 19.03.8, build afacb8b7f0
-- Java: openjdk 11.0.8 2020-07-14
-- JMeter: 5.1.1
-
-
-VM1 & VM2: Common Setup
------------------------
-Make sure to execute below commands on both VM1 & VM2
+Common Setup
+------------
Update the ubuntu software installer
@@ -64,20 +56,20 @@ Ensure that the Java version that is executing is OpenJDK version 11
OpenJDK Runtime Environment (build 11.0.8+10-post-Ubuntu-0ubuntu118.04.1)
OpenJDK 64-Bit Server VM (build 11.0.8+10-post-Ubuntu-0ubuntu118.04.1, mixed mode, sharing)
-Install Docker
+Install Docker and Docker Compose
.. code-block:: bash
# Add docker repository
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
- sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
- sudo apt update
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+ echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
- # Check available docker versions (if necessary)
- apt-cache policy docker-ce
+ sudo apt update
# Install docker
- sudo apt install -y docker-ce=5:19.03.8~3-0~ubuntu-bionic docker-ce-cli=5:19.03.8~3-0~ubuntu-bionic containerd.io
+ sudo apt-get install docker-ce docker-ce-cli containerd.io
Change the permissions of the Docker socket file
@@ -89,81 +81,85 @@ Check the status of the Docker service and ensure it is running correctly
.. code-block:: bash
- $ systemctl status --no-pager docker
+ systemctl status --no-pager docker
docker.service - Docker Application Container Engine
Loaded: loaded (/lib/systemd/system/docker.service; enabled; vendor preset: enabled)
Active: active (running) since Wed 2020-10-14 13:59:40 UTC; 1 weeks 0 days ago
# ... (truncated for brevity)
- $ docker ps
+ docker ps
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
-Clone the policy-distribution repo to access the test scripts
+Install and verify docker-compose
.. code-block:: bash
- git clone https://gerrit.onap.org/r/policy/distribution
+ # Install compose
+ sudo curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+ sudo chmod +x /usr/local/bin/docker-compose
+ # Check if install was successful
+ docker-compose --version
-VM1 Only: Install Simulators, Policy-PAP, Policy-API and MariaDB
-----------------------------------------------------------------
+Clone the policy-distribution repo to access the test scripts
-Modify the setup_components.sh script located at:
+.. code-block:: bash
-- ~/distribution/testsuites/stability/src/main/resources/simulatorsetup/setup_components.sh
+ git clone https://gerrit.onap.org/r/policy/distribution
-Ensure the correct docker image versions are specified - e.g. for Guilin-RC0
+.. _setup-distribution-s3p-components:
-- nexus3.onap.org:10001/onap/policy-api:2.3.2
-- nexus3.onap.org:10001/onap/policy-pap:2.3.2
+Start services for MariaDB, Policy API, PAP and Distribution
+------------------------------------------------------------
-Run the setup_components.sh script to start the test support components:
+Navigate to the main folder for scripts to setup services:
.. code-block:: bash
- ~/distribution/testsuites/stability/src/main/resources/simulatorsetup/setup_components.sh
+ cd ~/distribution/testsuites/stability/src/main/resources/setup
-After installation, ensure the following docker containers are up and running:
+Modify the versions.sh script to match all the versions being tested.
.. code-block:: bash
- $ docker ps
- CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
- a187cb0ff08a nexus3.onap.org:10001/onap/policy-pap:2.3.2 "bash ./policy-pap.sh" 4 days ago Up 4 days 0.0.0.0:7000->6969/tcp policy-pap
- 2f7632fe90c3 nexus3.onap.org:10001/onap/policy-api:2.3.2 "bash ./policy-api.sh" 4 days ago Up 4 days 0.0.0.0:6969->6969/tcp policy-api
- 70fa27d6d992 pdp/simulator:latest "bash pdp-sim.sh" 4 days ago Up 4 days pdp-simulator
- 3c9ff28ba050 dmaap/simulator:latest "bash dmaap-sim.sh" 4 days ago Up 4 days 0.0.0.0:3904->3904/tcp message-router
- 60cfcf8cfe65 mariadb:10.2.14 "docker-entrypoint.s…" 4 days ago Up 4 days 0.0.0.0:3306->3306/tcp mariadb
-
+ vi ~/distribution/testsuites/stability/src/main/resources/setup/versions.sh
-VM2 Only: Install Distribution
-------------------------------
+Ensure the correct docker image versions are specified - e.g. for Istanbul-M4
-Modify the setup_distribution.sh script located at:
+- export POLICY_DIST_VERSION=2.6.1-SNAPSHOT
-- ~/distribution/testsuites/stability/src/main/resources/distributionsetup/setup_distribution.sh
-
-Ensure the correct docker image version is specified - e.g. for Guilin-RC0:
-
-- nexus3.onap.org:10001/onap/policy-distribution:2.4.2
-
-Run the setup_distribution.sh script to install the distribution service, provide the IP of VM1 (twice) as the arguments to the script:
+Run the start.sh script to start the components. After installation, script will execute
+``docker ps`` and show the running containers.
.. code-block:: bash
- ~/distribution/testsuites/stability/src/main/resources/distributionsetup/setup_distribution.sh <vm1-ipaddr> <vm1-ipaddr>
+ ./start.sh
-Ensure the distribution container is running.
+ Creating network "setup_default" with the default driver
+ Creating policy-distribution ... done
+ Creating mariadb ... done
+ Creating simulator ... done
+ Creating policy-db-migrator ... done
+ Creating policy-api ... done
+ Creating policy-pap ... done
+
+ CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
+ f91be98ad1f4 nexus3.onap.org:10001/onap/policy-pap:2.5.1-SNAPSHOT "/opt/app/policy/pap…" 1 second ago Up Less than a second 6969/tcp policy-pap
+ d92cdbe971d4 nexus3.onap.org:10001/onap/policy-api:2.5.1-SNAPSHOT "/opt/app/policy/api…" 1 second ago Up Less than a second 6969/tcp policy-api
+ 9a019f5d641e nexus3.onap.org:10001/onap/policy-db-migrator:2.3.1-SNAPSHOT "/opt/app/policy/bin…" 2 seconds ago Up 1 second 6824/tcp policy-db-migrator
+ 108ba238edeb nexus3.onap.org:10001/mariadb:10.5.8 "docker-entrypoint.s…" 3 seconds ago Up 1 second 3306/tcp mariadb
+ bec9b223e79f nexus3.onap.org:10001/onap/policy-models-simulator:2.5.1-SNAPSHOT "simulators.sh" 3 seconds ago Up 1 second 3905/tcp simulator
+ 74aa5abeeb08 nexus3.onap.org:10001/onap/policy-distribution:2.6.1-SNAPSHOT "/opt/app/policy/bin…" 3 seconds ago Up 1 second 6969/tcp, 9090/tcp policy-distribution
-.. code-block:: bash
- $ docker ps
- CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
- 9a8db2bad156 nexus3.onap.org:10001/onap/policy-distribution:2.4.2 "bash ./policy-dist.…" 29 hours ago Up 29 hours 0.0.0.0:6969->6969/tcp, 0.0.0.0:9090->9090/tcp policy-distribution
+.. note::
+ The containers on this docker-compose are running with HTTP configuration. For HTTPS, ports
+ and configurations will need to be changed, as well certificates and keys must be generated
+ for security.
-VM2 Only: Install JMeter
-------------------------
+Install JMeter
+--------------
Download and install JMeter
@@ -174,15 +170,17 @@ Download and install JMeter
# Install JMeter
mkdir -p jmeter
- wget https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.1.1.zip
- unzip -qd jmeter apache-jmeter-5.1.1.zip
- rm apache-jmeter-5.1.1.zip
+ cd jmeter
+ wget https://dlcdn.apache.org//jmeter/binaries/apache-jmeter-5.4.1.zip
+ unzip -q apache-jmeter-5.4.1.zip
+ rm apache-jmeter-5.4.1.zip
-VM2 Only: Install & configure visualVM
+Install & configure visualVM
--------------------------------------
-VisualVM needs to be installed in the virtual machine running Distribution (VM2). It will be used to monitor CPU, Memory and GC for Distribution while the stability tests are running.
+VisualVM needs to be installed in the virtual machine running Distribution. It will be used to
+monitor CPU, Memory and GC for Distribution while the stability tests are running.
.. code-block:: bash
@@ -192,6 +190,9 @@ Run these commands to configure permissions
.. code-block:: bash
+ # Set globally accessable permissions on policy file
+ sudo chmod 777 /usr/lib/jvm/java-11-openjdk-amd64/bin/visualvm.policy
+
# Create Java security policy file for VisualVM
sudo cat > /usr/lib/jvm/java-11-openjdk-amd64/bin/visualvm.policy << EOF
grant codebase "jrt:/jdk.jstatd" {
@@ -202,19 +203,20 @@ Run these commands to configure permissions
};
EOF
- # Set globally accessable permissions on policy file
- sudo chmod 777 /usr/lib/jvm/java-11-openjdk-amd64/bin/visualvm.policy
-
Run the following command to start jstatd using port 1111
.. code-block:: bash
/usr/lib/jvm/java-11-openjdk-amd64/bin/jstatd -p 1111 -J-Djava.security.policy=/usr/lib/jvm/java-11-openjdk-amd64/bin/visualvm.policy &
-Run visualVM to connect to localhost:9090
+Run visualVM to connect to POLICY_DISTRIBUTION_IP:9090
.. code-block:: bash
+ # Get the Policy Distribution container IP
+ echo $(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' policy-distribution)
+
+ # Start visual vm
visualvm &
This will load up the visualVM GUI
@@ -222,12 +224,13 @@ This will load up the visualVM GUI
Connect to Distribution JMX Port.
1. Right click on "Local" in the left panel of the screen and select "Add JMX Connection"
- 2. Enter the Port 9090. this is the JMX port exposed by the distribution container
+ 2. Enter the Distribution container IP and Port 9090. This is the JMX port exposed by the
+ distribution container
3. Double click on the newly added nodes under "Local" to start monitoring CPU, Memory & GC.
Example Screenshot of visualVM
-.. image:: images/distribution-s3p-vvm-sample.png
+.. image:: images/distribution/distribution-s3p-vvm-sample.png
Stability Test of Policy Distribution
@@ -236,9 +239,17 @@ Stability Test of Policy Distribution
Introduction
------------
-The 72 hour Stability Test for policy distribution has the goal of introducing a steady flow of transactions initiated from a test client server running JMeter. The policy distribution is configured with a special FileSystemReception plugin to monitor a local directory for newly added csar files to be processed by itself. The input CSAR will be added/removed by the test client(JMeter) and the result will be pulled from the backend (PAP and PolicyAPI) by the test client (JMeter).
+The 72 hour Stability Test for policy distribution has the goal of introducing a steady flow of
+transactions initiated from a test client server running JMeter. The policy distribution is
+configured with a special FileSystemReception plugin to monitor a local directory for newly added
+csar files to be processed by itself. The input CSAR will be added/removed by the test client
+(JMeter) and the result will be pulled from the backend (PAP and PolicyAPI) by the test client
+(JMeter).
-The test will be performed in an environment where Jmeter will continuously add/remove a test csar into the special directory where policy distribution is monitoring and will then get the processed results from PAP and PolicyAPI to verify the successful deployment of the policy. The policy will then be undeployed and the test will loop continuously until 72 hours have elapsed.
+The test will be performed in an environment where Jmeter will continuously add/remove a test csar
+into the special directory where policy distribution is monitoring and will then get the processed
+results from PAP and PolicyAPI to verify the successful deployment of the policy. The policy will
+then be undeployed and the test will loop continuously until 72 hours have elapsed.
Test Plan Sequence
@@ -274,24 +285,33 @@ The following steps can be used to configure the parameters of the test plan.
Screenshot of Distribution stability test plan
-.. image:: images/distribution-s3p-testplan.png
+.. image:: images/distribution/distribution-s3p-testplan.png
Running the Test Plan
---------------------
-Edit the /tmp folder permissions to allow the testplan to insert the CSAR into the /tmp/policydistribution/distributionmount folder
+Check if the /tmp/policydistribution/distributionmount exists as it was created during the start.sh
+script execution. If not, run the following commands to create folder and change folder permissions
+to allow the testplan to insert the CSAR into the /tmp/policydistribution/distributionmount folder.
.. code-block:: bash
sudo mkdir -p /tmp/policydistribution/distributionmount
sudo chmod -R a+trwx /tmp
-From the apache JMeter folder run the test for 72h, pointing it towards the stability.jmx file inside the testplans folder and specifying a logfile to collect the results
+
+Navigate to the stability test folder.
+
+.. code-block:: bash
+
+ cd ~/distribution/testsuites/stability/src/main/resources/testplans/
+
+Execute the run_test.sh
.. code-block:: bash
- ~/jmeter/apache-jmeter-5.1.1/bin/jmeter -n -t ~/distribution/testsuites/stability/src/main/resources/testplans/stability.jmx -Jduration=259200 -l ~/distr-stability.jtl &
+ ./run_test.sh
Test Results
@@ -304,13 +324,13 @@ Test Results
**Test Statistics**
-.. image:: images/dist_stability_statistics.PNG
-.. image:: images/dist_stability_threshold.PNG
+.. image:: images/distribution/dist_stability_statistics.PNG
+.. image:: images/distribution/dist_stability_threshold.PNG
**VisualVM Screenshots**
-.. image:: images/dist_stability_monitor.PNG
-.. image:: images/dist_stability_threads.PNG
+.. image:: images/distribution/dist_stability_monitor.PNG
+.. image:: images/distribution/dist_stability_threads.PNG
Performance Test of Policy Distribution
@@ -319,9 +339,12 @@ Performance Test of Policy Distribution
Introduction
------------
-The 4h Performance Test of Policy Distribution has the goal of testing the min/avg/max processing time and rest call throughput for all the requests when the number of requests are large enough to saturate the resource and find the bottleneck.
+The 4h Performance Test of Policy Distribution has the goal of testing the min/avg/max processing
+time and rest call throughput for all the requests when the number of requests are large enough to
+saturate the resource and find the bottleneck.
-It also tests that distribution can handle multiple policy CSARs and that these are deployed within 30 seconds consistently.
+It also tests that distribution can handle multiple policy CSARs and that these are deployed within
+30 seconds consistently.
Setup Details
@@ -335,26 +358,33 @@ Test Plan Sequence
Performance test plan is different from the stability test plan.
-- Instead of handling one policy csar at a time, multiple csar's are deployed within the watched folder at the exact same time.
+- Instead of handling one policy csar at a time, multiple csar's are deployed within the watched
+ folder at the exact same time.
- We expect all policies from these csar's to be deployed within 30 seconds.
-- There are also multithreaded tests running towards the healthcheck and statistics endpoints of the distribution service.
+- There are also multithreaded tests running towards the healthcheck and statistics endpoints of
+ the distribution service.
Running the Test Plan
---------------------
-Edit the /tmp folder permissions to allow the Testplan to insert the CSAR into the /tmp/policydistribution/distributionmount folder.
+Check if /tmp folder permissions to allow the Testplan to insert the CSAR into the
+/tmp/policydistribution/distributionmount folder.
+Clean up from previous run. If necessary, put containers down with script `down.sh` from setup
+folder mentioned on :ref:`Setup components <setup-distribution-s3p-components>`
.. code-block:: bash
sudo mkdir -p /tmp/policydistribution/distributionmount
sudo chmod -R a+trwx /tmp
-From the apache JMeter folder run the test for 4h, pointing it towards the performance.jmx file inside the testplans folder and specifying a logfile to collect the results
+Navigate to the testplan folder and execute the test script:
.. code-block:: bash
- ~/jmeter/apache-jmeter-5.1.1/bin/jmeter -n -t ~/distribution/testsuites/performance/src/main/resources/testplans/performance.jmx -Jduration=14400 -l ~/distr-performance.jtl &
+ cd ~/distribution/testsuites/performance/src/main/resources/testplans/
+ ./run_test.sh
+
Test Results
------------
@@ -366,10 +396,10 @@ Test Results
**Test Statistics**
-.. image:: images/dist_perf_statistics.PNG
-.. image:: images/dist_perf_threshold.PNG
+.. image:: images/distribution/performance-statistics.png
+.. image:: images/distribution/performance-threshold.png
**VisualVM Screenshots**
-.. image:: images/20201020-1730-distr-performance-20201020T2025-monitor.png
-.. image:: images/20201020-1730-distr-performance-20201020T2025-threads.png
+.. image:: images/distribution/performance-monitor.png
+.. image:: images/distribution/performance-threads.png
diff --git a/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-monitor.png b/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-monitor.png
deleted file mode 100755
index 8ef443b1..00000000
--- a/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-monitor.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-threads.png b/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-threads.png
deleted file mode 100755
index 8f4731c4..00000000
--- a/docs/development/devtools/images/20201020-1730-distr-performance-20201020T2025-threads.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex-s3p-vvm-sample.jpg b/docs/development/devtools/images/apex-s3p-vvm-sample.jpg
deleted file mode 100644
index 20fac3cc..00000000
--- a/docs/development/devtools/images/apex-s3p-vvm-sample.jpg
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_perf_jm_1.PNG b/docs/development/devtools/images/apex_perf_jm_1.PNG
deleted file mode 100644
index a1852be6..00000000
--- a/docs/development/devtools/images/apex_perf_jm_1.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_perf_jm_2.PNG b/docs/development/devtools/images/apex_perf_jm_2.PNG
deleted file mode 100644
index d91ec4a3..00000000
--- a/docs/development/devtools/images/apex_perf_jm_2.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_s3p_jm-1.png b/docs/development/devtools/images/apex_s3p_jm-1.png
deleted file mode 100644
index 92ca5765..00000000
--- a/docs/development/devtools/images/apex_s3p_jm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_s3p_jm-2.png b/docs/development/devtools/images/apex_s3p_jm-2.png
deleted file mode 100644
index 8cd24c89..00000000
--- a/docs/development/devtools/images/apex_s3p_jm-2.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/dist_perf_statistics.PNG b/docs/development/devtools/images/dist_perf_statistics.PNG
deleted file mode 100644
index eeefeeee..00000000
--- a/docs/development/devtools/images/dist_perf_statistics.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/dist_perf_threshold.PNG b/docs/development/devtools/images/dist_perf_threshold.PNG
deleted file mode 100644
index 58fbffd1..00000000
--- a/docs/development/devtools/images/dist_perf_threshold.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/dist_stability_monitor.PNG b/docs/development/devtools/images/distribution/dist_stability_monitor.PNG
index 83eae8cc..83eae8cc 100644
--- a/docs/development/devtools/images/dist_stability_monitor.PNG
+++ b/docs/development/devtools/images/distribution/dist_stability_monitor.PNG
Binary files differ
diff --git a/docs/development/devtools/images/dist_stability_statistics.PNG b/docs/development/devtools/images/distribution/dist_stability_statistics.PNG
index dce9b7cc..dce9b7cc 100644
--- a/docs/development/devtools/images/dist_stability_statistics.PNG
+++ b/docs/development/devtools/images/distribution/dist_stability_statistics.PNG
Binary files differ
diff --git a/docs/development/devtools/images/dist_stability_threads.PNG b/docs/development/devtools/images/distribution/dist_stability_threads.PNG
index 13e27c99..13e27c99 100644
--- a/docs/development/devtools/images/dist_stability_threads.PNG
+++ b/docs/development/devtools/images/distribution/dist_stability_threads.PNG
Binary files differ
diff --git a/docs/development/devtools/images/dist_stability_threshold.PNG b/docs/development/devtools/images/distribution/dist_stability_threshold.PNG
index d65e8cc3..d65e8cc3 100644
--- a/docs/development/devtools/images/dist_stability_threshold.PNG
+++ b/docs/development/devtools/images/distribution/dist_stability_threshold.PNG
Binary files differ
diff --git a/docs/development/devtools/images/distribution-s3p-testplan.png b/docs/development/devtools/images/distribution/distribution-s3p-testplan.png
index 7a8559ce..7a8559ce 100644
--- a/docs/development/devtools/images/distribution-s3p-testplan.png
+++ b/docs/development/devtools/images/distribution/distribution-s3p-testplan.png
Binary files differ
diff --git a/docs/development/devtools/images/distribution-s3p-vvm-sample.png b/docs/development/devtools/images/distribution/distribution-s3p-vvm-sample.png
index 4b2aa663..4b2aa663 100644
--- a/docs/development/devtools/images/distribution-s3p-vvm-sample.png
+++ b/docs/development/devtools/images/distribution/distribution-s3p-vvm-sample.png
Binary files differ
diff --git a/docs/development/devtools/images/distribution/performance-monitor.png b/docs/development/devtools/images/distribution/performance-monitor.png
new file mode 100644
index 00000000..e7a12ed7
--- /dev/null
+++ b/docs/development/devtools/images/distribution/performance-monitor.png
Binary files differ
diff --git a/docs/development/devtools/images/distribution/performance-statistics.png b/docs/development/devtools/images/distribution/performance-statistics.png
new file mode 100644
index 00000000..6530a1bc
--- /dev/null
+++ b/docs/development/devtools/images/distribution/performance-statistics.png
Binary files differ
diff --git a/docs/development/devtools/images/distribution/performance-threads.png b/docs/development/devtools/images/distribution/performance-threads.png
new file mode 100755
index 00000000..b59b7db6
--- /dev/null
+++ b/docs/development/devtools/images/distribution/performance-threads.png
Binary files differ
diff --git a/docs/development/devtools/images/distribution/performance-threshold.png b/docs/development/devtools/images/distribution/performance-threshold.png
new file mode 100644
index 00000000..df15ba0f
--- /dev/null
+++ b/docs/development/devtools/images/distribution/performance-threshold.png
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png b/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png
deleted file mode 100644
index 07b28590..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png b/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png
deleted file mode 100644
index cb68c897..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png b/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png
deleted file mode 100644
index 7ecbbea9..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png b/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png
deleted file mode 100644
index 548f2b72..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png
+++ /dev/null
Binary files differ