aboutsummaryrefslogtreecommitdiffstats
path: root/docs
diff options
context:
space:
mode:
authorLiam Fallon <liam.fallon@est.tech>2021-10-13 13:59:47 +0000
committerGerrit Code Review <gerrit@onap.org>2021-10-13 13:59:47 +0000
commitf41b4025e6da865556db59d333669f8b7b35d602 (patch)
tree3781b64e5fe4eaaaf86fd9630a01565dc670ef5e /docs
parent2f20721bc5d4b03fb528ca47944c4a3d38849ec2 (diff)
parent2201d0a0e2fcfe0ac16a90b64667f2511f695ba8 (diff)
Merge "APEX S3P documentation updates for Istanbul"
Diffstat (limited to 'docs')
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt173
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt173
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPGbin0 -> 139518 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_s3p_logs.zipbin8364 -> 0 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_s3p_results.zipbin2245749 -> 0 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPGbin0 -> 171549 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPGbin0 -> 51328 bytes
-rw-r--r--docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPGbin0 -> 72787 bytes
-rw-r--r--docs/development/devtools/apex-s3p.rst219
-rw-r--r--docs/development/devtools/images/apex-s3p-vvm-sample.jpgbin147909 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_perf_jm_1.PNGbin91204 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_perf_jm_2.PNGbin222814 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_s3p_jm-1.pngbin39898 -> 0 bytes
-rw-r--r--docs/development/devtools/images/apex_s3p_jm-2.pngbin166975 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_jm-1.pngbin149789 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_jm-2.pngbin311321 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_vm-1.pngbin176069 -> 0 bytes
-rw-r--r--docs/development/devtools/images/frankfurt/apex_s3p_vm-2.pngbin146018 -> 0 bytes
18 files changed, 484 insertions, 81 deletions
diff --git a/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt b/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt
new file mode 100644
index 00000000..38fe5054
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_metrics_after_72h.txt
@@ -0,0 +1,173 @@
+# HELP jvm_info VM version info
+# TYPE jvm_info gauge
+jvm_info{runtime="OpenJDK Runtime Environment",vendor="Alpine",version="11.0.9+11-alpine-r1",} 1.0
+# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.
+# TYPE jvm_memory_objects_pending_finalization gauge
+jvm_memory_objects_pending_finalization 0.0
+# HELP jvm_memory_bytes_used Used bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_used gauge
+jvm_memory_bytes_used{area="heap",} 1.8570308E8
+jvm_memory_bytes_used{area="nonheap",} 1.1990352E8
+# HELP jvm_memory_bytes_committed Committed (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_committed gauge
+jvm_memory_bytes_committed{area="heap",} 2.55655936E8
+jvm_memory_bytes_committed{area="nonheap",} 1.65216256E8
+# HELP jvm_memory_bytes_max Max (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_max gauge
+jvm_memory_bytes_max{area="heap",} 4.064673792E9
+jvm_memory_bytes_max{area="nonheap",} -1.0
+# HELP jvm_memory_bytes_init Initial bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_init gauge
+jvm_memory_bytes_init{area="heap",} 2.64241152E8
+jvm_memory_bytes_init{area="nonheap",} 7667712.0
+# HELP jvm_memory_pool_bytes_used Used bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_used gauge
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-nmethods'",} 1357312.0
+jvm_memory_pool_bytes_used{pool="Metaspace",} 8.2427648E7
+jvm_memory_pool_bytes_used{pool="Tenured Gen",} 1.5998668E8
+jvm_memory_pool_bytes_used{pool="CodeHeap 'profiled nmethods'",} 4413696.0
+jvm_memory_pool_bytes_used{pool="Eden Space",} 2.1659816E7
+jvm_memory_pool_bytes_used{pool="Survivor Space",} 4056584.0
+jvm_memory_pool_bytes_used{pool="Compressed Class Space",} 8293152.0
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-profiled nmethods'",} 2.3411712E7
+# HELP jvm_memory_pool_bytes_committed Committed bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_committed gauge
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_committed{pool="Metaspace",} 9.4896128E7
+jvm_memory_pool_bytes_committed{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'profiled nmethods'",} 3.2309248E7
+jvm_memory_pool_bytes_committed{pool="Eden Space",} 7.0713344E7
+jvm_memory_pool_bytes_committed{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_committed{pool="Compressed Class Space",} 1.0223616E7
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-profiled nmethods'",} 2.523136E7
+# HELP jvm_memory_pool_bytes_max Max bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_max gauge
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-nmethods'",} 5828608.0
+jvm_memory_pool_bytes_max{pool="Metaspace",} -1.0
+jvm_memory_pool_bytes_max{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'profiled nmethods'",} 1.22912768E8
+jvm_memory_pool_bytes_max{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_bytes_max{pool="Survivor Space",} 1.40115968E8
+jvm_memory_pool_bytes_max{pool="Compressed Class Space",} 1.073741824E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-profiled nmethods'",} 1.22916864E8
+# HELP jvm_memory_pool_bytes_init Initial bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_init gauge
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Metaspace",} 0.0
+jvm_memory_pool_bytes_init{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_init{pool="CodeHeap 'profiled nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_bytes_init{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_init{pool="Compressed Class Space",} 0.0
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_used_bytes gauge
+jvm_memory_pool_collection_used_bytes{pool="Tenured Gen",} 1.00182296E8
+jvm_memory_pool_collection_used_bytes{pool="Eden Space",} 0.0
+jvm_memory_pool_collection_used_bytes{pool="Survivor Space",} 4056584.0
+# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_committed_bytes gauge
+jvm_memory_pool_collection_committed_bytes{pool="Tenured Gen",} 1.76357376E8
+jvm_memory_pool_collection_committed_bytes{pool="Eden Space",} 7.0713344E7
+jvm_memory_pool_collection_committed_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_max_bytes gauge
+jvm_memory_pool_collection_max_bytes{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_collection_max_bytes{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_collection_max_bytes{pool="Survivor Space",} 1.40115968E8
+# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_init_bytes gauge
+jvm_memory_pool_collection_init_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_init_bytes{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_collection_init_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_allocated_bytes_total Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_total counter
+jvm_memory_pool_allocated_bytes_total{pool="Eden Space",} 7.81369756592E11
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'profiled nmethods'",} 5.1833472E7
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-profiled nmethods'",} 3.6370048E7
+jvm_memory_pool_allocated_bytes_total{pool="Compressed Class Space",} 8986056.0
+jvm_memory_pool_allocated_bytes_total{pool="Metaspace",} 8.6383264E7
+jvm_memory_pool_allocated_bytes_total{pool="Tenured Gen",} 3.84983864E8
+jvm_memory_pool_allocated_bytes_total{pool="Survivor Space",} 1.506631592E9
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-nmethods'",} 1439232.0
+# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.
+# TYPE jvm_gc_collection_seconds summary
+jvm_gc_collection_seconds_count{gc="Copy",} 11053.0
+jvm_gc_collection_seconds_sum{gc="Copy",} 90.383
+jvm_gc_collection_seconds_count{gc="MarkSweepCompact",} 6.0
+jvm_gc_collection_seconds_sum{gc="MarkSweepCompact",} 1.44
+# HELP jvm_threads_current Current thread count of a JVM
+# TYPE jvm_threads_current gauge
+jvm_threads_current 31.0
+# HELP jvm_threads_daemon Daemon thread count of a JVM
+# TYPE jvm_threads_daemon gauge
+jvm_threads_daemon 16.0
+# HELP jvm_threads_peak Peak thread count of a JVM
+# TYPE jvm_threads_peak gauge
+jvm_threads_peak 80.0
+# HELP jvm_threads_started_total Started thread count of a JVM
+# TYPE jvm_threads_started_total counter
+jvm_threads_started_total 320599.0
+# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers
+# TYPE jvm_threads_deadlocked gauge
+jvm_threads_deadlocked 0.0
+# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors
+# TYPE jvm_threads_deadlocked_monitor gauge
+jvm_threads_deadlocked_monitor 0.0
+# HELP jvm_threads_state Current count of threads by state
+# TYPE jvm_threads_state gauge
+jvm_threads_state{state="BLOCKED",} 0.0
+jvm_threads_state{state="WAITING",} 13.0
+jvm_threads_state{state="RUNNABLE",} 7.0
+jvm_threads_state{state="TIMED_WAITING",} 11.0
+jvm_threads_state{state="TERMINATED",} 0.0
+jvm_threads_state{state="NEW",} 0.0
+# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_bytes gauge
+jvm_buffer_pool_used_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_used_bytes{pool="direct",} 3.385029E7
+# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_capacity_bytes gauge
+jvm_buffer_pool_capacity_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_capacity_bytes{pool="direct",} 3.3850289E7
+# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_buffers gauge
+jvm_buffer_pool_used_buffers{pool="mapped",} 0.0
+jvm_buffer_pool_used_buffers{pool="direct",} 16.0
+# HELP jvm_classes_loaded The number of classes that are currently loaded in the JVM
+# TYPE jvm_classes_loaded gauge
+jvm_classes_loaded 11238.0
+# HELP jvm_classes_loaded_total The total number of classes that have been loaded since the JVM has started execution
+# TYPE jvm_classes_loaded_total counter
+jvm_classes_loaded_total 12239.0
+# HELP jvm_classes_unloaded_total The total number of classes that have been unloaded since the JVM has started execution
+# TYPE jvm_classes_unloaded_total counter
+jvm_classes_unloaded_total 1001.0
+# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
+# TYPE process_cpu_seconds_total counter
+process_cpu_seconds_total 9308.0
+# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.
+# TYPE process_start_time_seconds gauge
+process_start_time_seconds 1.633111663176E9
+# HELP process_open_fds Number of open file descriptors.
+# TYPE process_open_fds gauge
+process_open_fds 354.0
+# HELP process_max_fds Maximum number of open file descriptors.
+# TYPE process_max_fds gauge
+process_max_fds 1048576.0
+# HELP process_virtual_memory_bytes Virtual memory size in bytes.
+# TYPE process_virtual_memory_bytes gauge
+process_virtual_memory_bytes 5.933633536E9
+# HELP process_resident_memory_bytes Resident memory size in bytes.
+# TYPE process_resident_memory_bytes gauge
+process_resident_memory_bytes 5.2523008E8
+# HELP jvm_memory_pool_allocated_bytes_created Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_created gauge
+jvm_memory_pool_allocated_bytes_created{pool="Eden Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'profiled nmethods'",} 1.633111669942E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-profiled nmethods'",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Compressed Class Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Metaspace",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Tenured Gen",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Survivor Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-nmethods'",} 1.633111669968E9
diff --git a/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt b/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt
new file mode 100644
index 00000000..551bd824
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_metrics_before_72h.txt
@@ -0,0 +1,173 @@
+# HELP jvm_info VM version info
+# TYPE jvm_info gauge
+jvm_info{runtime="OpenJDK Runtime Environment",vendor="Alpine",version="11.0.9+11-alpine-r1",} 1.0
+# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.
+# TYPE jvm_memory_objects_pending_finalization gauge
+jvm_memory_objects_pending_finalization 0.0
+# HELP jvm_memory_bytes_used Used bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_used gauge
+jvm_memory_bytes_used{area="heap",} 4.7930528E7
+jvm_memory_bytes_used{area="nonheap",} 5.3899512E7
+# HELP jvm_memory_bytes_committed Committed (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_committed gauge
+jvm_memory_bytes_committed{area="heap",} 2.555904E8
+jvm_memory_bytes_committed{area="nonheap",} 5.8589184E7
+# HELP jvm_memory_bytes_max Max (bytes) of a given JVM memory area.
+# TYPE jvm_memory_bytes_max gauge
+jvm_memory_bytes_max{area="heap",} 4.064673792E9
+jvm_memory_bytes_max{area="nonheap",} -1.0
+# HELP jvm_memory_bytes_init Initial bytes of a given JVM memory area.
+# TYPE jvm_memory_bytes_init gauge
+jvm_memory_bytes_init{area="heap",} 2.64241152E8
+jvm_memory_bytes_init{area="nonheap",} 7667712.0
+# HELP jvm_memory_pool_bytes_used Used bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_used gauge
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-nmethods'",} 1271680.0
+jvm_memory_pool_bytes_used{pool="Metaspace",} 4.0208712E7
+jvm_memory_pool_bytes_used{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_bytes_used{pool="CodeHeap 'profiled nmethods'",} 5948416.0
+jvm_memory_pool_bytes_used{pool="Eden Space",} 1.780424E7
+jvm_memory_pool_bytes_used{pool="Survivor Space",} 1949992.0
+jvm_memory_pool_bytes_used{pool="Compressed Class Space",} 4855216.0
+jvm_memory_pool_bytes_used{pool="CodeHeap 'non-profiled nmethods'",} 1615488.0
+# HELP jvm_memory_pool_bytes_committed Committed bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_committed gauge
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_committed{pool="Metaspace",} 4.194304E7
+jvm_memory_pool_bytes_committed{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'profiled nmethods'",} 6029312.0
+jvm_memory_pool_bytes_committed{pool="Eden Space",} 7.0647808E7
+jvm_memory_pool_bytes_committed{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_committed{pool="Compressed Class Space",} 5505024.0
+jvm_memory_pool_bytes_committed{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_bytes_max Max bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_max gauge
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-nmethods'",} 5828608.0
+jvm_memory_pool_bytes_max{pool="Metaspace",} -1.0
+jvm_memory_pool_bytes_max{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'profiled nmethods'",} 1.22912768E8
+jvm_memory_pool_bytes_max{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_bytes_max{pool="Survivor Space",} 1.40115968E8
+jvm_memory_pool_bytes_max{pool="Compressed Class Space",} 1.073741824E9
+jvm_memory_pool_bytes_max{pool="CodeHeap 'non-profiled nmethods'",} 1.22916864E8
+# HELP jvm_memory_pool_bytes_init Initial bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_bytes_init gauge
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Metaspace",} 0.0
+jvm_memory_pool_bytes_init{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_bytes_init{pool="CodeHeap 'profiled nmethods'",} 2555904.0
+jvm_memory_pool_bytes_init{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_bytes_init{pool="Survivor Space",} 8781824.0
+jvm_memory_pool_bytes_init{pool="Compressed Class Space",} 0.0
+jvm_memory_pool_bytes_init{pool="CodeHeap 'non-profiled nmethods'",} 2555904.0
+# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_used_bytes gauge
+jvm_memory_pool_collection_used_bytes{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_collection_used_bytes{pool="Eden Space",} 0.0
+jvm_memory_pool_collection_used_bytes{pool="Survivor Space",} 1949992.0
+# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_committed_bytes gauge
+jvm_memory_pool_collection_committed_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_committed_bytes{pool="Eden Space",} 7.0647808E7
+jvm_memory_pool_collection_committed_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_max_bytes gauge
+jvm_memory_pool_collection_max_bytes{pool="Tenured Gen",} 2.803236864E9
+jvm_memory_pool_collection_max_bytes{pool="Eden Space",} 1.12132096E9
+jvm_memory_pool_collection_max_bytes{pool="Survivor Space",} 1.40115968E8
+# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.
+# TYPE jvm_memory_pool_collection_init_bytes gauge
+jvm_memory_pool_collection_init_bytes{pool="Tenured Gen",} 1.76160768E8
+jvm_memory_pool_collection_init_bytes{pool="Eden Space",} 7.0516736E7
+jvm_memory_pool_collection_init_bytes{pool="Survivor Space",} 8781824.0
+# HELP jvm_memory_pool_allocated_bytes_total Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_total counter
+jvm_memory_pool_allocated_bytes_total{pool="Eden Space",} 1.62790696E8
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'profiled nmethods'",} 5150080.0
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-profiled nmethods'",} 1428864.0
+jvm_memory_pool_allocated_bytes_total{pool="Compressed Class Space",} 4785752.0
+jvm_memory_pool_allocated_bytes_total{pool="Metaspace",} 3.9463568E7
+jvm_memory_pool_allocated_bytes_total{pool="Tenured Gen",} 2.8176296E7
+jvm_memory_pool_allocated_bytes_total{pool="Survivor Space",} 5422680.0
+jvm_memory_pool_allocated_bytes_total{pool="CodeHeap 'non-nmethods'",} 1271680.0
+# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.
+# TYPE jvm_gc_collection_seconds summary
+jvm_gc_collection_seconds_count{gc="Copy",} 3.0
+jvm_gc_collection_seconds_sum{gc="Copy",} 0.076
+jvm_gc_collection_seconds_count{gc="MarkSweepCompact",} 2.0
+jvm_gc_collection_seconds_sum{gc="MarkSweepCompact",} 0.126
+# HELP jvm_threads_current Current thread count of a JVM
+# TYPE jvm_threads_current gauge
+jvm_threads_current 31.0
+# HELP jvm_threads_daemon Daemon thread count of a JVM
+# TYPE jvm_threads_daemon gauge
+jvm_threads_daemon 16.0
+# HELP jvm_threads_peak Peak thread count of a JVM
+# TYPE jvm_threads_peak gauge
+jvm_threads_peak 31.0
+# HELP jvm_threads_started_total Started thread count of a JVM
+# TYPE jvm_threads_started_total counter
+jvm_threads_started_total 32.0
+# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers
+# TYPE jvm_threads_deadlocked gauge
+jvm_threads_deadlocked 0.0
+# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors
+# TYPE jvm_threads_deadlocked_monitor gauge
+jvm_threads_deadlocked_monitor 0.0
+# HELP jvm_threads_state Current count of threads by state
+# TYPE jvm_threads_state gauge
+jvm_threads_state{state="BLOCKED",} 0.0
+jvm_threads_state{state="WAITING",} 13.0
+jvm_threads_state{state="RUNNABLE",} 7.0
+jvm_threads_state{state="TIMED_WAITING",} 11.0
+jvm_threads_state{state="TERMINATED",} 0.0
+jvm_threads_state{state="NEW",} 0.0
+# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_bytes gauge
+jvm_buffer_pool_used_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_used_bytes{pool="direct",} 187392.0
+# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_capacity_bytes gauge
+jvm_buffer_pool_capacity_bytes{pool="mapped",} 0.0
+jvm_buffer_pool_capacity_bytes{pool="direct",} 187392.0
+# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.
+# TYPE jvm_buffer_pool_used_buffers gauge
+jvm_buffer_pool_used_buffers{pool="mapped",} 0.0
+jvm_buffer_pool_used_buffers{pool="direct",} 8.0
+# HELP jvm_classes_loaded The number of classes that are currently loaded in the JVM
+# TYPE jvm_classes_loaded gauge
+jvm_classes_loaded 7287.0
+# HELP jvm_classes_loaded_total The total number of classes that have been loaded since the JVM has started execution
+# TYPE jvm_classes_loaded_total counter
+jvm_classes_loaded_total 7287.0
+# HELP jvm_classes_unloaded_total The total number of classes that have been unloaded since the JVM has started execution
+# TYPE jvm_classes_unloaded_total counter
+jvm_classes_unloaded_total 0.0
+# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
+# TYPE process_cpu_seconds_total counter
+process_cpu_seconds_total 19.23
+# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.
+# TYPE process_start_time_seconds gauge
+process_start_time_seconds 1.633111663176E9
+# HELP process_open_fds Number of open file descriptors.
+# TYPE process_open_fds gauge
+process_open_fds 350.0
+# HELP process_max_fds Maximum number of open file descriptors.
+# TYPE process_max_fds gauge
+process_max_fds 1048576.0
+# HELP process_virtual_memory_bytes Virtual memory size in bytes.
+# TYPE process_virtual_memory_bytes gauge
+process_virtual_memory_bytes 5.825941504E9
+# HELP process_resident_memory_bytes Resident memory size in bytes.
+# TYPE process_resident_memory_bytes gauge
+process_resident_memory_bytes 2.13909504E8
+# HELP jvm_memory_pool_allocated_bytes_created Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
+# TYPE jvm_memory_pool_allocated_bytes_created gauge
+jvm_memory_pool_allocated_bytes_created{pool="Eden Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'profiled nmethods'",} 1.633111669942E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-profiled nmethods'",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Compressed Class Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Metaspace",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Tenured Gen",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="Survivor Space",} 1.633111669968E9
+jvm_memory_pool_allocated_bytes_created{pool="CodeHeap 'non-nmethods'",} 1.633111669968E9
diff --git a/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG b/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG
new file mode 100644
index 00000000..89447874
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_perf_jmeter_results.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip b/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip
deleted file mode 100644
index cdeca387..00000000
--- a/docs/development/devtools/apex-s3p-results/apex_s3p_logs.zip
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip b/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip
deleted file mode 100644
index a4f268e5..00000000
--- a/docs/development/devtools/apex-s3p-results/apex_s3p_results.zip
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG b/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG
new file mode 100644
index 00000000..1cdabb7e
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_stability_jmeter_results.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG b/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG
new file mode 100644
index 00000000..e51b942a
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_top_after_72h.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG b/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG
new file mode 100644
index 00000000..cc2da54f
--- /dev/null
+++ b/docs/development/devtools/apex-s3p-results/apex_top_before_72h.JPG
Binary files differ
diff --git a/docs/development/devtools/apex-s3p.rst b/docs/development/devtools/apex-s3p.rst
index f5c0e7cf..bfed24e0 100644
--- a/docs/development/devtools/apex-s3p.rst
+++ b/docs/development/devtools/apex-s3p.rst
@@ -10,143 +10,200 @@
Policy APEX PDP component
~~~~~~~~~~~~~~~~~~~~~~~~~
-Setting up Stability Tests in APEX
-++++++++++++++++++++++++++++++++++
+Both the Stability and the Performance tests were executed in a full ONAP OOM deployment in Nordix lab.
-Introduction
-------------
-The 72 hour Stability Test for apex-pdp has the goal of introducing a steady flow of transactions initiated from a test client server running JMeter. The pdp is configured to start a rest server inside it and take input from rest clients (JMeter) and send back output to the rest clients (JMeter).
+Setup Details
++++++++++++++
-The input events will be submitted through rest interface of apex-pdp and the results are verified using the rest responses coming out from apex-pdp.
+- APEX-PDP along with all policy components deployed as part of a full ONAP OOM deployment.
+- Policy-models-simulator is deployed to use CDS and DMaaP simulators during policy execution.
+ Simulator configurations used are available in apex-pdp repository:
+ testsuites/apex-pdp-stability/src/main/resources/simulatorConfig/
+- Two APEX policies are executed in the APEX-PDP engine, and are triggered by multiple threads during the tests.
+- Both tests were run via jMeter.
-The test will be performed in a multi-threaded environment where 5 threads running in JMeter will keep sending events to apex-pdp for the duration of 72 hours.
+ Stability test script is available in apex-pdp repository:
+ testsuites/apex-pdp-stability/src/main/resources/apexPdpStabilityTestPlan.jmx
-Setup details
--------------
+ Performance test script is available in apex-pdp repository:
+ testsuites/performance/performance-benchmark-test/src/main/resources/apexPdpPerformanceTestPlan.jmx
-Stability test is performed on VM's running in OpenStack cloud environment. APEX-PDP along with other components in Policy Framework is deployed in an OOM ONAP installation. JMeter runs on a separate VM to simulate a steady flow of transactions.
+.. Note::
+ Policy executions are validated in a more strict fashion during the tests.
+ There are test cases where upto 80 events are expected on the DMaaP topic.
+ DMaaP simulator is used to keep it simple and avoid any message pickup timing related issues.
+Stability Test of APEX-PDP
+++++++++++++++++++++++++++
Test Plan
---------
-The 72 hours stability test will run the following steps in a 5 threaded loop.
+The 72 hours stability test ran the following steps.
-Setup stage (these calls run only once - at the beginning)
-- **Create Policy** - creates a policy using the policy/api component
-- **Deploy Policy** - deploys the policy in the existing PdpGroup
-Test stage (these calls run over and over again)
-- **Check Health** - checks the health status of apex
-- **Send Input Event** - triggers 'unauthenticated.DCAE_CL_OUTPUT' event of DMaaP.
-- **Get Output Event Response** - checks for the triggered output event.
-Teardown stage (this calls run only once - at the end)
-- **Undeploy Policy** - undeploys the policy from PdpGroup
-- **Delete Policy** - deletes the policy using the policy/api component
+Setup Phase
+"""""""""""
-The following steps can be used to configure the parameters of the test plan.
+Policies are created and deployed to APEX-PDP during this phase. Only one thread is in action and this step is done only once.
-- **HTTP Header Manager** - used to store headers which will be used for making HTTP requests.
-- **HTTP Request Defaults** - used to store HTTP request details like Server Name or IP, Port, Protocol etc.
-- **User Defined Variables** - used to store the following user defined parameters:
+- **Create Policy onap.policies.apex.Simplecontrolloop** - creates the first APEX policy using policy/api component.
+ This is a sample policy used for PNF testing.
+- **Create Policy onap.policies.apex.Example** - creates the second APEX policy using policy/api component.
+ This is a sample policy used for VNF testing.
+- **Deploy Policies** - Deploy both the policies created to APEX-PDP using policy/pap component
-================== ============================================================================ ============================
-**Name** **Description** **Default Value**
-================== ============================================================================ ============================
-wait Wait time after each request (in milliseconds) 10000
-threads Number of threads to run test cases in parallel. 5
-threadsTimeOutInMs Synchronization timer for threads running in parallel (in milliseconds). 5000
-PAP_PORT Port number of PAP for making REST API calls
-API_PORT Port number of API for making REST API calls
-APEX_PORT Port number of APEX for making REST API calls
-DMAAP_PORT Port number of DMAAP for making REST API calls
-HOSTNAME Server IP address for making REST API calls
-================== ============================================================================ ============================
+Main Phase
+""""""""""
+Once the policies are created and deployed to APEX-PDP by the setup thread, five threads execute the below tests for 72 hours.
-Download and update the jmx file presented in the apex-pdp git repository - `jmx file path <https://gerrit.onap.org/r/gitweb?p=policy/apex-pdp.git;a=tree;f=testsuites/apex-pdp-stability/src/main/resources;h=99d373033a190a690d4e05012bc3a656cae7bc3f;hb=refs/heads/master>`_.
+- **Healthcheck** - checks the health status of APEX-PDP
+- **Prometheus Metrics** - checks that APEX-PDP is exposing prometheus metrics
+- **Test Simplecontrolloop policy success case** - Send a trigger event to *unauthenticated.DCAE_CL_OUTPUT* DMaaP topic.
+ If the policy execution is successful, 3 different notification events are sent to *APEX-CL-MGT* topic by each one of the 5 threads.
+ So, it is checked if 15 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Simplecontrolloop policy failure case** - Send a trigger event with invalid pnfName to *unauthenticated.DCAE_CL_OUTPUT* DMaaP topic.
+ The policy execution is expected to fail due to AAI failure response. 2 notification events are expected on *APEX-CL-MGT* topic by a thread in this case.
+ It is checked if 10 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Example policy success case** - Send a trigger event to *unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT* DMaaP topic.
+ If the policy execution is successful, 4 different notification events are sent to *APEX-CL-MGT* topic by each one of the 5 threads.
+ So, it is checked if 20 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Test Example policy failure case** - Send a trigger event with invalid vnfName to *unauthenticated.DCAE_POLICY_EXAMPLE_OUTPUT* DMaaP topic.
+ The policy execution is expected to fail due to AAI failure response. 2 notification events are expected on *APEX-CL-MGT* topic by a thread in this case.
+ So, it is checked if 10 notification messages are received in total on *APEX-CL-MGT* topic with the relevant messages.
+- **Clean up DMaaP notification topic** - DMaaP notification topic which is *APEX-CL-MGT* is cleaned up after each test to make sure that one failure doesn't lead to cascading errors.
-- ThreadGroup.duration - Set the duration to 259200 seconds (72 hours)
-Use the CLI mode to start the test
+Teardown Phase
+""""""""""""""
-.. code-block:: bash
+Policies are undeployed from APEX-PDP and deleted during this phase.
+Only one thread is in action and this step is done only once after the Main phase is complete.
- nohup ./jmeter.sh -n -t ~/apexPdpStabilityTestPlan.jmx -Jusers=1 -l ~/stability.log
+- **Undeploy Policies** - Undeploy both the policies from APEX-PDP using policy/pap component
+- **Delete Policy onap.policies.apex.Simplecontrolloop** - delete the first APEX policy using policy/api component.
+- **Delete Policy onap.policies.apex.Example** - delete the second APEX policy also using policy/api component.
-Stability Test Results
-----------------------
+The following steps can be used to configure the parameters of test plan.
-The stability test plan was triggered for 72 hours, injecting input events to apex-pdp pod from 5 client threads running in JMeter.
+- **HTTP Authorization Manager** - used to store user/password authentication details.
+- **HTTP Header Manager** - used to store headers which will be used for making HTTP requests.
+- **User Defined Variables** - used to store following user defined parameters.
+
+=================== ===============================================================================
+ **Name** **Description**
+=================== ===============================================================================
+ HOSTNAME IP Address or host name to access the components
+ PAP_PORT Port number of PAP for making REST API calls such as deploy/undeploy of policy
+ API_PORT Port number of API for making REST API calls such as create/ delete of policy
+ APEX_PORT Port number of APEX for making REST API calls such as healthcheck/metrics
+ wait Wait time if required after a request (in milliseconds)
+ threads Number of threads to run test cases in parallel
+ threadsTimeOutInMs Synchronization timer for threads running in parallel (in milliseconds)
+=================== ================================================================================
-The stability tests were executed as part of a full ONAP OOM deployment in Nordix lab.
+Run Test
+--------
-Once the tests complete, we can generate an HTML test report via the command:
+The test was run in the background via "nohup", to prevent it from being interrupted:
.. code-block:: bash
- ~/jMeter/apache-jmeter-5.2.1/bin/jmeter -g stability.log -o ./result/
+ nohup ./apache-jmeter-5.4.1/bin/jmeter.sh -n -t apexPdpStabilityTestPlan.jmx -l stabilityTestResults.jtl
+
+Test Results
+------------
+
+**Summary**
-============================================== ================================ ============= ============ ============================
-**Number of Client Threads running in JMeter** **Total number of input events** **Success %** **Error %** **Average Time per Request**
-============================================== ================================ ============= ============ ============================
-5 129326 100% 0% 6716.12
-============================================== ================================ ============= ============ ============================
+Stability test plan was triggered for 72 hours. There were no failures during the 72 hours test.
+**Test Statistics**
+
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+428661 100 % 0.00 % 162 ms
+======================= ================= ================== ==================================
+
+.. Note::
+
+ There were no failures during the 72 hours test.
+
**JMeter Screenshot**
-.. image:: images/apex_s3p_jm-1.png
-.. image:: images/apex_s3p_jm-2.png
+.. image:: apex-s3p-results/apex_stability_jmeter_results.JPG
-:download:`result.zip <apex-s3p-results/apex_s3p_results.zip>`
+**Memory and CPU usage**
-Setting up Performance Tests in APEX
-++++++++++++++++++++++++++++++++++++
+The memory and CPU usage can be monitored by running "top" command in the APEX-PDP pod.
+A snapshot is taken before and after test execution to monitor the changes in resource utilization.
+Prometheus metrics is also collected before and after the test execution.
-The Performance test is performed on a similar setup to the Stability test. JMeter will send a large number of REST requests and will then retrieve those requests.
+Memory and CPU usage before test execution:
-Performance test plan will be the same as the stability test plan except for some differences listed below:
+.. image:: apex-s3p-results/apex_top_before_72h.JPG
-- Increase the number of threads from 5 to 20.
-- Reduce test time to ninety minutes. (ThreadGroup.duration - Set the duration to 5400 seconds)
+:download:`Prometheus metrics before 72h test <apex-s3p-results/apex_metrics_before_72h.txt>`
-Download and update the jmx file presented in the apex-pdp git repository - `jmx file path <https://gerrit.onap.org/r/gitweb?p=policy/apex-pdp.git;a=tree;f=testsuites/performance/performance-benchmark-test/src/main/resources;h=b0ed1058b11f82b42fb5be1a07009114e1e8b593;hb=refs/heads/master>`_.
+Memory and CPU usage after test execution:
+.. image:: apex-s3p-results/apex_top_after_72h.JPG
-Run Test
---------
+:download:`Prometheus metrics after 72h test <apex-s3p-results/apex_metrics_after_72h.txt>`
-Running the performance test will be the same as the stability test. That is, launch JMeter pointing to corresponding *.jmx* test plan. The *API_HOST* , *API_PORT* , *PAP_HOST* , *PAP_PORT* are already set up in *.jmx*.
+Performance Test of APEX-PDP
+++++++++++++++++++++++++++++
+
+Introduction
+------------
+
+Performance test of APEX-PDP is done similar to the stability test, but in a more extreme manner using higher thread count.
+
+Setup Details
+-------------
+
+The performance test is performed on a similar setup as Stability test.
-.. code-block:: bash
- nohup ./jmeter.sh -n -t ~/performance.jmx -Jusers=1 -l ~/perf.log
+Test Plan
+---------
+
+Performance test plan is the same as the stability test plan above except for the few differences listed below.
-Once the tests have completed, run the following the gather results.
+- Increase the number of threads used in the Main Phase from 5 to 20.
+- Reduce the test time to 2 hours.
+
+Run Test
+--------
.. code-block:: bash
- ~/jMeter/apache-jmeter-5.2.1/bin/jmeter -g perf.log -o ./performance_result/
+ nohup ./apache-jmeter-5.4.1/bin/jmeter.sh -n -t apexPdpPerformanceTestPlan.jmx -l perftestresults.jtl
-Performance Test Result
------------------------
-**Summary**
+Test Results
+------------
-Performance test was triggered for 90 minutes. The results are shown below.
+Test results are shown as below.
**Test Statistics**
-============================ =========== ========= ==================================
-**Total Number of Requests** **Success** **Error** **Average Time Taken per Request**
-============================ =========== ========= ==================================
-32304 99,99 % 0.01 % 8746.50 ms
-============================ =========== ========= ==================================
+======================= ================= ================== ==================================
+**Total # of requests** **Success %** **Error %** **Average time taken per request**
+======================= ================= ================== ==================================
+46946 100 % 0.00 % 198 ms
+======================= ================= ================== ==================================
**JMeter Screenshot**
-.. image:: images/apex_perf_jm_1.PNG
+.. image:: apex-s3p-results/apex_perf_jmeter_results.JPG
+
+Summary
++++++++
-.. image:: images/apex_perf_jm_2.PNG
+Multiple policies were executed in a multi threaded fashion for both stability and performance tests.
+Both tests ran smoothly without any issues.
diff --git a/docs/development/devtools/images/apex-s3p-vvm-sample.jpg b/docs/development/devtools/images/apex-s3p-vvm-sample.jpg
deleted file mode 100644
index 20fac3cc..00000000
--- a/docs/development/devtools/images/apex-s3p-vvm-sample.jpg
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_perf_jm_1.PNG b/docs/development/devtools/images/apex_perf_jm_1.PNG
deleted file mode 100644
index a1852be6..00000000
--- a/docs/development/devtools/images/apex_perf_jm_1.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_perf_jm_2.PNG b/docs/development/devtools/images/apex_perf_jm_2.PNG
deleted file mode 100644
index d91ec4a3..00000000
--- a/docs/development/devtools/images/apex_perf_jm_2.PNG
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_s3p_jm-1.png b/docs/development/devtools/images/apex_s3p_jm-1.png
deleted file mode 100644
index 92ca5765..00000000
--- a/docs/development/devtools/images/apex_s3p_jm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/apex_s3p_jm-2.png b/docs/development/devtools/images/apex_s3p_jm-2.png
deleted file mode 100644
index 8cd24c89..00000000
--- a/docs/development/devtools/images/apex_s3p_jm-2.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png b/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png
deleted file mode 100644
index 07b28590..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_jm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png b/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png
deleted file mode 100644
index cb68c897..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_jm-2.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png b/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png
deleted file mode 100644
index 7ecbbea9..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_vm-1.png
+++ /dev/null
Binary files differ
diff --git a/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png b/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png
deleted file mode 100644
index 548f2b72..00000000
--- a/docs/development/devtools/images/frankfurt/apex_s3p_vm-2.png
+++ /dev/null
Binary files differ