diff options
author | da490c <dave.adams@amdocs.com> | 2018-03-22 00:32:52 -0400 |
---|---|---|
committer | da490c <dave.adams@amdocs.com> | 2018-03-22 09:34:25 -0400 |
commit | ba31685194c77ef140411531299696ae701385d4 (patch) | |
tree | 912f7d5b3378901ccabb8df52b26866d74572f10 /sparkybe-onap-service | |
parent | ef7f255958e541ffaec0fd2a977440dd7b6fd6b8 (diff) |
Convert Sparky to Spring-Boot
Issue-ID: AAI-599
Change-Id: If474dd02794f442fdddcd90f62fb75e0d6b907e7
Signed-off-by: da490c <dave.adams@amdocs.com>
Diffstat (limited to 'sparkybe-onap-service')
426 files changed, 54619 insertions, 0 deletions
diff --git a/sparkybe-onap-service/LICENSE b/sparkybe-onap-service/LICENSE new file mode 100644 index 0000000..c8636af --- /dev/null +++ b/sparkybe-onap-service/LICENSE @@ -0,0 +1,18 @@ +============LICENSE_START======================================================= +org.onap.aai +================================================================================ +Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. +Copyright © 2017-2018 Amdocs +================================================================================ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +============LICENSE_END=========================================================
\ No newline at end of file diff --git a/sparkybe-onap-service/eclipse-config/eclipse-java-google-style.xml b/sparkybe-onap-service/eclipse-config/eclipse-java-google-style.xml new file mode 100644 index 0000000..03c2420 --- /dev/null +++ b/sparkybe-onap-service/eclipse-config/eclipse-java-google-style.xml @@ -0,0 +1,295 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<profiles version="12"> +<profile kind="CodeFormatterProfile" name="GoogleStyle" version="12"> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/> +<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="2"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/> +<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="100"/> +<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/> +<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="3"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/> +<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/> +<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/> +<setting id="org.eclipse.jdt.core.compiler.source" value="1.8"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/> +<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.8"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/> +<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.8"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/> +<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/> +<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/> +<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/> +<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/> +</profile> +</profiles> diff --git a/sparkybe-onap-service/pom.xml b/sparkybe-onap-service/pom.xml new file mode 100644 index 0000000..37c464f --- /dev/null +++ b/sparkybe-onap-service/pom.xml @@ -0,0 +1,555 @@ +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + + <parent> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-parent</artifactId> + <version>1.5.10.RELEASE</version> + </parent> + + <groupId>org.onap.aai.sparky-be</groupId> + <artifactId>sparkybe-onap-service</artifactId> + <version>1.2.0-SNAPSHOT</version> + <packaging>jar</packaging> + + <properties> + <java.version>1.8</java.version> + <version.jacoco.maven.plugin>0.7.9</version.jacoco.maven.plugin> + <frontEndGroupdId>org.onap.aai</frontEndGroupdId> + <frontEndArtifactId>sparky-fe</frontEndArtifactId> + <frontEndVersion>1.1.0-SNAPSHOT</frontEndVersion> + <serverPort>9517</serverPort> + <sslport>8000</sslport> + <nexusproxy>https://nexus.onap.org</nexusproxy> + <camel-spring-boot.version>2.20.0</camel-spring-boot.version> + <config-home>${basedir}/</config-home> + </properties> + + + + + <dependencyManagement> + <dependencies> + + <!-- Camel BOM --> + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-spring-boot-dependencies</artifactId> + <version>${camel-spring-boot.version}</version> + <type>pom</type> + <scope>import</scope> + </dependency> + </dependencies> + </dependencyManagement> + +<!-- + +some of the depedencies should probably have a scope of provided so they don't automatically become part of the final jar + + --> + + <dependencies> + + <!-- Spring dependencies --> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-web</artifactId> + </dependency> + + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-actuator</artifactId> + </dependency> + + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-restlet</artifactId> + <version>${camel-spring-boot.version}</version> + <exclusions> + <exclusion> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-servlet</artifactId> + <version>${camel-spring-boot.version}</version> + <exclusions> + <exclusion> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + </exclusions> + </dependency> + + <!-- Camel BOM --> + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-spring-boot-dependencies</artifactId> + <version>${camel-spring-boot.version}</version> + <type>pom</type> + <scope>import</scope> + </dependency> + + <!-- https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl --> + <!-- <dependency> <groupId>com.sun.xml.bind</groupId> <artifactId>jaxb-impl</artifactId> + </dependency> <dependency> <groupId>com.sun.xml.bind</groupId> <artifactId>jaxb-core</artifactId> + </dependency> --> + + <!-- Camel --> + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-spring-boot-starter</artifactId> + </dependency> + + <!-- https://mvnrepository.com/artifact/org.apache.camel/camel-restlet --> + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-restlet</artifactId> + </dependency> + + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + + <!-- https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util --> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-util</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.apache.camel</groupId> + <artifactId>camel-servlet-starter</artifactId> + </dependency> + + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-actuator</artifactId> + </dependency> + + <!-- https://mvnrepository.com/artifact/commons-cli/commons-cli --> + <dependency> + <groupId>commons-cli</groupId> + <artifactId>commons-cli</artifactId> + <version>1.2</version> + </dependency> + + <!-- <dependency> <groupId>org.apache.tomcat.embed</groupId> <artifactId>tomcat-embed-jasper</artifactId> + </dependency> --> + + + <!-- https://mvnrepository.com/artifact/org.apache.camel/camel-http-common --> + <!-- <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-http</artifactId> + <version>2.15.5</version> </dependency> <dependency> <groupId>abc.def</groupId> + <artifactId>att-camel-dme2-servlet</artifactId> <version>2.15.5</version> + <scope>system</scope> <systemPath>x:/222/att-camel-dme2-servlet-2.15.5.jar</systemPath> + </dependency> <dependency> <groupId>abc.def</groupId> <artifactId>att-camel-static-content</artifactId> + <version>2.11.2.1</version> <scope>system</scope> <systemPath>x:/222/att-camel-static-content-2.11.2.1.jar</systemPath> + </dependency> --> + + <!-- Utility dependencies --> + + <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>19.0</version> + </dependency> + + + <dependency> + <groupId>org.onap.aai.logging-service</groupId> + <artifactId>common-logging</artifactId> + <version>1.2.0</version> + </dependency> + + + <dependency> + <groupId>dom4j</groupId> + <artifactId>dom4j</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.eclipse.persistence</groupId> + <artifactId>eclipselink</artifactId> + <version>2.6.2</version> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-core</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.onap.aai.aai-common</groupId> + <artifactId>aai-schema</artifactId> + <version>1.2.0</version> + </dependency> + + <dependency> + <groupId>org.onap.aai</groupId> + <artifactId>rest-client</artifactId> + <version>1.2.0</version> + </dependency> + + + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>2.4</version> + </dependency> + + <!-- https://mvnrepository.com/artifact/org.restlet.jee/org.restlet.ext.servlet --> + <dependency> + <groupId>org.restlet.jee</groupId> + <artifactId>org.restlet.ext.servlet</artifactId> + <version>2.1.1</version> + </dependency> + + + <dependency> + <groupId>com.openpojo</groupId> + <artifactId>openpojo</artifactId> + <version>0.8.6</version> + </dependency> + + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <scope>provided</scope> + </dependency> + + + <dependency> + <groupId>org.json</groupId> + <artifactId>json</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <scope>provided</scope> + </dependency> + + <dependency> + <groupId>org.onap.portal.sdk</groupId> + <artifactId>epsdk-fw</artifactId> + <version>1.3.0</version> + <exclusions> + <exclusion> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>apache-log4j-extras</artifactId> + </exclusion> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </exclusion> + </exclusions> + </dependency> + + <!-- Test dependencies --> + + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-all</artifactId> + <version>1.10.19</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-module-junit4</artifactId> + <version>1.6.2</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-api-mockito</artifactId> + <version>1.6.2</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-module-javaagent</artifactId> + <version>1.6.2</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.powermock</groupId> + <artifactId>powermock-module-junit4-rule-agent</artifactId> + <version>1.6.2</version> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.hamcrest</groupId> + <artifactId>hamcrest-library</artifactId> + <scope>test</scope> + </dependency> + + </dependencies> + + <build> + + <pluginManagement> + <plugins> + + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + <source>1.8</source> + <target>1.8</target> + </configuration> + </plugin> + +<!-- <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-resources-plugin</artifactId> + <version>2.7</version> + <executions> + <execution> + <id>copy-docker-file</id> + <phase>package</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>target</outputDirectory> + <overwrite>true</overwrite> + <resources> + <resource> + <directory>${basedir}/src/main/docker</directory> + <filtering>true</filtering> + <includes> + <include>**/*</include> + </includes> + </resource> + <resource> + <directory>${basedir}/src/main/scripts/</directory> + </resource> + </resources> + </configuration> + </execution> + </executions> + </plugin> --> + + </plugins> + + </pluginManagement> + + <plugins> +<!-- <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + </plugin> + --> + + <!-- <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-resources-plugin</artifactId> + <version>2.7</version> + <executions> + <execution> + <id>copy-docker-file</id> + <phase>package</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>target</outputDirectory> + <overwrite>true</overwrite> + <resources> + <resource> + <directory>${basedir}/src/main/docker</directory> + <filtering>true</filtering> + <includes> + <include>**/*</include> + </includes> + </resource> + <resource> + <directory>${basedir}/src/main/bin/</directory> + </resource> + </resources> + </configuration> + </execution> + </executions> + </plugin> --> + +<!-- <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-dependency-plugin</artifactId> + <version>3.0.0</version> + <executions> + <execution> + <id>copy-installed</id> + <phase>install</phase> + <goals> + <goal>copy</goal> + </goals> + <configuration> + <artifactItems> + <artifactItem> + <groupId>${frontEndGroupdId}</groupId> + <artifactId>${frontEndArtifactId}</artifactId> + <version>${frontEndVersion}</version> + <type>war</type> + <outputDirectory>${basedir}/target/</outputDirectory> + <destFileName>aai.war</destFileName> + </artifactItem> + </artifactItems> + </configuration> + </execution> + </executions> + </plugin> + --> + <plugin> + <groupId>org.jacoco</groupId> + <artifactId>jacoco-maven-plugin</artifactId> + <version>${version.jacoco.maven.plugin}</version> + <executions> + <execution> + <id>prepare-agent</id> + <goals> + <goal>prepare-agent</goal> + </goals> + </execution> + <execution> + <id>report</id> + <phase>package</phase> + <goals> + <goal>report</goal> + </goals> + </execution> + </executions> + </plugin> + +<!-- <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>cobertura-maven-plugin</artifactId> + <executions> + <execution> + <phase /> + </execution> + </executions> + </plugin> + --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <version>3.3</version> + <configuration> + <reportPlugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <version>2.17</version> + <reportSets> + <reportSet> + <reports> + <report>checkstyle</report> + </reports> + </reportSet> + </reportSets> + </plugin> + </reportPlugins> + </configuration> + </plugin> + + <!-- <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-resources-plugin</artifactId> + <version>2.7</version> + <executions> + <execution> + <id>copy-docker-file</id> + <phase>package</phase> + <goals> + <goal>copy-resources</goal> + </goals> + <configuration> + <outputDirectory>target</outputDirectory> + <overwrite>true</overwrite> + <resources> + <resource> + <directory>${basedir}/src/main/docker</directory> + <filtering>true</filtering> + <includes> + <include>**/*</include> + </includes> + </resource> + <resource> + <directory>${basedir}/src/main/scripts/</directory> + </resource> + </resources> + </configuration> + </execution> + </executions> + </plugin> --> + +<!-- <plugin> + <groupId>com.spotify</groupId> + <artifactId>docker-maven-plugin</artifactId> + <version>0.4.11</version> + <configuration> + <verbose>true</verbose> + <serverId>docker-hub</serverId> + <imageName>${docker.push.registry}/onap/${project.artifactId}</imageName> + <dockerDirectory>${docker.location}</dockerDirectory> + <imageTags> + <imageTag>latest</imageTag> + </imageTags> + <forceTags>true</forceTags> + </configuration> + </plugin> --> + <!-- license plugin --> + <plugin> + <groupId>com.mycila</groupId> + <artifactId>license-maven-plugin</artifactId> + <version>3.0</version> + <configuration> + <header>LICENSE</header> + <includes> + <include>src/main/java/**</include> + </includes> + </configuration> + <executions> + <execution> + <goals> + <goal>format</goal> + </goals> + <phase>process-sources</phase> + </execution> + </executions> + </plugin> + + </plugins> + </build> +</project> + + + diff --git a/sparkybe-onap-service/project-configs/code-tools/sonar-secret.txt b/sparkybe-onap-service/project-configs/code-tools/sonar-secret.txt new file mode 100644 index 0000000..9036e07 --- /dev/null +++ b/sparkybe-onap-service/project-configs/code-tools/sonar-secret.txt @@ -0,0 +1 @@ +7TP5jKdtMb+0EtW4Trbbnw==
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java new file mode 100644 index 0000000..be29889 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateSummaryProcessor.java @@ -0,0 +1,210 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.search.filters.FilterQueryBuilder; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.entity.SearchFilter; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +public class AggregateSummaryProcessor { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(AggregateSummaryProcessor.class); + + private static final String KEY_FILTERS = "filters"; + + private ElasticSearchAdapter elasticSearchAdapter = null; + + private String vnfAggregationIndexName; + private FiltersConfig filtersConfig; + + public AggregateSummaryProcessor(ElasticSearchAdapter elasticSearchAdapter, FiltersConfig filtersConfig) { + this.elasticSearchAdapter = elasticSearchAdapter; + this.filtersConfig = filtersConfig; + } + + public void setVnfAggregationIndexName(String vnfAggregationIndexName) { + this.vnfAggregationIndexName = vnfAggregationIndexName; + } + + public void getFilteredAggregation(Exchange exchange) { + + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + ServletUtils.setUpMdcContext(exchange, request); + + + try { + String payload = exchange.getIn().getBody(String.class); + + if (payload == null || payload.isEmpty()) { + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty"); + + /* + * Don't throw back an error, just return an empty set + */ + + } else { + + JSONObject parameters = new JSONObject(payload); + + JSONArray requestFilters = null; + if (parameters.has(KEY_FILTERS)) { + requestFilters = parameters.getJSONArray(KEY_FILTERS); + } else { + + JSONObject zeroResponsePayload = new JSONObject(); + zeroResponsePayload.put("count", 0); + //response.setStatus(Status.SUCCESS_OK); + //response.setEntity(zeroResponsePayload.toString(), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(zeroResponsePayload.toString()); + + LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND); + return; + } + + if (requestFilters != null && requestFilters.length() > 0) { + List<JSONObject> filtersToQuery = new ArrayList<JSONObject>(); + for(int i = 0; i < requestFilters.length(); i++) { + JSONObject filterEntry = requestFilters.getJSONObject(i); + filtersToQuery.add(filterEntry); + } + + String jsonResponsePayload = getVnfFilterAggregations(filtersToQuery); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200); + exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "application/json"); + exchange.getOut().setBody(jsonResponsePayload); + + } else { + String emptyResponse = getEmptyAggResponse(); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200); + exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "application/json"); + exchange.getOut().setBody(emptyResponse); + LOG.error(AaiUiMsgs.ERROR_FILTERS_NOT_FOUND); + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to get filter list due to error = " + exc.getMessage()); + } + } + + private String getEmptyAggResponse() { + JSONObject aggPayload = new JSONObject(); + aggPayload.put("totalChartHits", 0); + aggPayload.put("buckets", new JSONArray()); + JSONObject payload = new JSONObject(); + payload.append("groupby_aggregation", aggPayload); + + return payload.toString(); + } + + private static final String FILTER_ID_KEY = "filterId"; + private static final String FILTER_VALUE_KEY = "filterValue"; + private static final int DEFAULT_SHOULD_MATCH_SCORE = 1; + private static final String VNF_FILTER_AGGREGATION = "vnfFilterAggregation"; + + + private String getVnfFilterAggregations(List<JSONObject> filtersToQuery) throws IOException { + + List<SearchFilter> searchFilters = new ArrayList<SearchFilter>(); + for(JSONObject filterEntry : filtersToQuery) { + + String filterId = filterEntry.getString(FILTER_ID_KEY); + if(filterId != null) { + SearchFilter filter = new SearchFilter(); + filter.setFilterId(filterId); + + if(filterEntry.has(FILTER_VALUE_KEY)) { + String filterValue = filterEntry.getString(FILTER_VALUE_KEY); + filter.addValue(filterValue); + } + + searchFilters.add(filter); + } + } + + // Create query for summary by entity type + JsonObject vnfSearch = FilterQueryBuilder.createCombinedBoolAndAggQuery(filtersConfig, searchFilters, DEFAULT_SHOULD_MATCH_SCORE); + + // Parse response for summary by entity type query + OperationResult opResult = elasticSearchAdapter.doPost( + elasticSearchAdapter.buildElasticSearchUrlForApi(vnfAggregationIndexName, + SparkyConstants.ES_SEARCH_API), + vnfSearch.toString(), javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE); + + if ( opResult.wasSuccessful()) { + return buildAggregateVnfResponseJson(opResult.getResult()); + } else { + return buildEmptyAggregateVnfResponseJson(); + } + } + + private String buildEmptyAggregateVnfResponseJson() { + JSONObject finalOutputToFe = new JSONObject(); + finalOutputToFe.put("total", 0); + return finalOutputToFe.toString(); + } + + private String buildAggregateVnfResponseJson(String responseJsonStr) { + + JSONObject finalOutputToFe = new JSONObject(); + JSONObject responseJson = new JSONObject(responseJsonStr); + + + JSONObject hits = responseJson.getJSONObject("hits"); + int totalHits = hits.getInt("total"); + finalOutputToFe.put("total", totalHits); + + JSONObject aggregations = responseJson.getJSONObject("aggregations"); + String[] aggKeys = JSONObject.getNames(aggregations); + JSONObject aggregationsList = new JSONObject(); + + for(String aggName : aggKeys) { + JSONObject aggregation = aggregations.getJSONObject(aggName); + JSONArray buckets = aggregation.getJSONArray("buckets"); + aggregationsList.put(aggName, buckets); + } + + finalOutputToFe.put("aggregations", aggregationsList); + + return finalOutputToFe.toString(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java new file mode 100644 index 0000000..6e7b456 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/AggregateVnfSearchProvider.java @@ -0,0 +1,129 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; +import javax.ws.rs.core.MediaType; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class AggregateVnfSearchProvider implements SearchProvider { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(AggregateVnfSearchProvider.class); + + private ObjectMapper mapper; + private ElasticSearchAdapter elasticSearchAdapter = null; + private String autoSuggestIndexName; + private String vnfSearchSuggestionRoute; + + public AggregateVnfSearchProvider(ElasticSearchAdapter elasticSearchAdapter, + String autoSuggestIndexName, String vnfSearchSuggestionRoute) { + mapper = new ObjectMapper(); + this.elasticSearchAdapter = elasticSearchAdapter; + this.autoSuggestIndexName = autoSuggestIndexName; + this.vnfSearchSuggestionRoute = vnfSearchSuggestionRoute; + } + + public void setAutoSuggestIndexName(String autoSuggestIndexName) { + this.autoSuggestIndexName = autoSuggestIndexName; + } + + @Override + public List<SearchSuggestion> search(QuerySearchEntity queryRequest) { + + List<SearchSuggestion> returnList = new ArrayList<SearchSuggestion>(); + + try { + + /* Create suggestions query */ + JsonObject vnfSearch = VnfSearchQueryBuilder.createSuggestionsQuery(String.valueOf(queryRequest.getMaxResults()), queryRequest.getQueryStr()); + + /* Parse suggestions response */ + OperationResult opResult = elasticSearchAdapter.doPost( + elasticSearchAdapter.buildElasticSearchUrlForApi(autoSuggestIndexName, + SparkyConstants.ES_SUGGEST_API), + vnfSearch.toString(), MediaType.APPLICATION_JSON_TYPE); + + String result = opResult.getResult(); + + if (!opResult.wasSuccessful()) { + LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, result); + return returnList; + } + + JSONObject responseJson = new JSONObject(result); + String suggestionsKey = "vnfs"; + JSONArray suggestionsArray = new JSONArray(); + JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); + if (suggestions.length() > 0) { + suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); + for (int i = 0; i < suggestionsArray.length(); i++) { + JSONObject querySuggestion = suggestionsArray.getJSONObject(i); + if (querySuggestion != null) { + CommonSearchSuggestion responseSuggestion = new CommonSearchSuggestion(); + responseSuggestion.setText(querySuggestion.getString("text")); + responseSuggestion.setRoute(vnfSearchSuggestionRoute); + responseSuggestion.setHashId(NodeUtils.generateUniqueShaDigest(querySuggestion.getString("text"))); + + // Extract filter list from JSON and add to response suggestion + JSONObject payload = querySuggestion.getJSONObject("payload"); + if (payload.length() > 0) { + JSONArray filterList = payload.getJSONArray("filterList"); + for (int filter = 0; filter < filterList.length(); filter++) { + String filterValueString = filterList.getJSONObject(filter).toString(); + UiFilterValueEntity filterValue = mapper.readValue(filterValueString, UiFilterValueEntity.class); + responseSuggestion.getFilterValues().add(filterValue); + } + } + returnList.add(responseSuggestion); + } + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Search failed due to error = " + exc.getMessage()); + } + + return returnList; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java new file mode 100644 index 0000000..2645433 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregatevnf/search/VnfSearchQueryBuilder.java @@ -0,0 +1,176 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregatevnf.search; + +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + + +/** + * Build a JSON payload to send to elastic search to get vnf search data. + */ + +public class VnfSearchQueryBuilder { + + + /** + * Creates the suggestions query. + * + * @param maxResults maximum number of suggestions to fetch + * @param queryStr query string + * @return the json object + */ + + /* + * { "vnfs" : { "text" : "VNFs", "completion" : { "field" : "entity_suggest", "size": 1 } } } + */ + public static JsonObject createSuggestionsQuery(String maxResults, String queryStr) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + JsonObjectBuilder completionBlob = Json.createObjectBuilder(); + completionBlob.add("field", "entity_suggest"); + completionBlob.add("size", maxResults); + + JsonObjectBuilder jsonAllBuilder = Json.createObjectBuilder(); + jsonAllBuilder.add("text", queryStr); + jsonAllBuilder.add("completion", completionBlob); + + jsonBuilder.add("vnfs", jsonAllBuilder.build()); + return jsonBuilder.build(); + } + + public static JsonObject getTermBlob(String key, String value) { + JsonObjectBuilder termBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder().add(key, value); + return termBlobBuilder.add("term", jsonBuilder.build()).build(); + } + + public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String aggsKey, + int resultSize) { + JsonObjectBuilder fieldBuilder = + Json.createObjectBuilder().add("field", aggsKey).add("size", resultSize); + JsonObject aggsFieldBlob = fieldBuilder.build(); + JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob); + JsonObject defaultBlob = defaultBlobBuilder.build(); + aggsBlobBuilder.add("default", defaultBlob); + } + + public static void buildSingleTermCountQuery(JsonObjectBuilder jsonBuilder, String key, + String value) { + jsonBuilder.add("query", getTermBlob(key, value)); + } + + public static void buildSingleTermSummaryQuery(JsonObjectBuilder jsonBuilder, String key, + String value, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + queryBlobBuilder.add("constant_score", + Json.createObjectBuilder().add("filter", getTermBlob(key, value))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermSummaryQuery(JsonObjectBuilder jsonBuilder, + Map<String, String> attributes, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + JsonArray mustBlob = mustBlobBuilder.build(); + + queryBlobBuilder.add("constant_score", Json.createObjectBuilder().add("filter", + Json.createObjectBuilder().add("bool", Json.createObjectBuilder().add("must", mustBlob)))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String groupByKey) { + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermCountQuery(JsonObjectBuilder jsonBuilder, + Map<String, String> attributes) { + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + jsonBuilder.add("query", Json.createObjectBuilder().add("bool", + Json.createObjectBuilder().add("must", mustBlobBuilder))); + } + + + + public static JsonObject createSummaryByEntityTypeQuery(Map<String, String> attributes, + String groupByKey) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("size", "0"); // avoid source data + if (attributes.size() == 0) { + buildZeroTermSummaryQuery(jsonBuilder, groupByKey); + } else if (attributes.size() == 1) { + Map.Entry<String, String> entry = attributes.entrySet().iterator().next(); + buildSingleTermSummaryQuery(jsonBuilder, entry.getKey(), entry.getValue(), groupByKey); + } else { + buildMultiTermSummaryQuery(jsonBuilder, attributes, groupByKey); + } + return jsonBuilder.build(); + } + + public static JsonObject createEntityCountsQuery(Map<String, String> attributes) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + if (attributes.size() == 1) { + Map.Entry<String, String> entry = attributes.entrySet().iterator().next(); + buildSingleTermCountQuery(jsonBuilder, entry.getKey(), entry.getValue()); + } else { + buildMultiTermCountQuery(jsonBuilder, attributes); + } + return jsonBuilder.build(); + } + + public static JsonArray getSortCriteria(String sortFieldName, String sortOrder) { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add(sortFieldName, + Json.createObjectBuilder().add("order", sortOrder))); + + return jsonBuilder.build(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java new file mode 100644 index 0000000..8681853 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSyncControllerFactory.java @@ -0,0 +1,241 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncController; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class AggregationSyncControllerFactory implements SyncControllerRegistrar { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSyncControllerFactory.class); + + private ActiveInventoryAdapter aaiAdapter; + private ElasticSearchAdapter esAdapter; + private SuggestionEntityLookup suggestionEntityLookup; + + private Map<String, String> aggregationEntityToIndexMap; + private Map<String, ElasticSearchSchemaConfig> indexNameToSchemaConfigMap; + + private ElasticSearchEndpointConfig elasticSearchEndpointConfig; + private SyncControllerConfig syncControllerConfig; + private SyncControllerRegistry syncControllerRegistry; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + private OxmEntityLookup oxmEntityLookup; + private ElasticSearchSchemaFactory elasticSearchSchemaFactory; + + private List<SyncController> syncControllers; + + public AggregationSyncControllerFactory(ElasticSearchEndpointConfig esEndpointConfig, + SyncControllerConfig syncControllerConfig, SyncControllerRegistry syncControllerRegistry, + SuggestionEntityLookup suggestionEntityLookup, + OxmEntityLookup oxmEntityLookup, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) { + this.elasticSearchSchemaFactory = elasticSearchSchemaFactory; + this.syncControllers = new ArrayList<SyncController>(); + this.elasticSearchEndpointConfig = esEndpointConfig; + this.syncControllerConfig = syncControllerConfig; + this.syncControllerRegistry = syncControllerRegistry; + this.suggestionEntityLookup = suggestionEntityLookup; + this.oxmEntityLookup = oxmEntityLookup; + } + + public NetworkStatisticsConfig getAaiStatConfig() { + return aaiStatConfig; + } + + public void setAaiStatConfig(NetworkStatisticsConfig aaiStatConfig) { + this.aaiStatConfig = aaiStatConfig; + } + + public NetworkStatisticsConfig getEsStatConfig() { + return esStatConfig; + } + + public void setEsStatConfig(NetworkStatisticsConfig esStatConfig) { + this.esStatConfig = esStatConfig; + } + + public Map<String, ElasticSearchSchemaConfig> getIndexNameToSchemaConfigMap() { + return indexNameToSchemaConfigMap; + } + + public void setIndexNameToSchemaConfigMap( + Map<String, ElasticSearchSchemaConfig> indexNameToSchemaConfigMap) { + this.indexNameToSchemaConfigMap = indexNameToSchemaConfigMap; + } + + public ElasticSearchEndpointConfig getElasticSearchEndpointConfig() { + return elasticSearchEndpointConfig; + } + + public void setElasticSearchEndpointConfig( + ElasticSearchEndpointConfig elasticSearchEndpointConfig) { + this.elasticSearchEndpointConfig = elasticSearchEndpointConfig; + } + + public SyncControllerConfig getSyncControllerConfig() { + return syncControllerConfig; + } + + public void setSyncControllerConfig(SyncControllerConfig syncControllerConfig) { + this.syncControllerConfig = syncControllerConfig; + } + + public ActiveInventoryAdapter getAaiAdapter() { + return aaiAdapter; + } + + public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) { + this.aaiAdapter = aaiAdapter; + } + + public ElasticSearchAdapter getEsAdapter() { + return esAdapter; + } + + public void setEsAdapter(ElasticSearchAdapter esAdapter) { + this.esAdapter = esAdapter; + } + + public SuggestionEntityLookup getSuggestionEntityLookup() { + return suggestionEntityLookup; + } + + public void setSuggestionEntityLookup(SuggestionEntityLookup suggestionEntityLookup) { + this.suggestionEntityLookup = suggestionEntityLookup; + } + + public Map<String, String> getAggregationEntityToIndexMap() { + return aggregationEntityToIndexMap; + } + + public void setAggregationEntityToIndexMap(Map<String, String> aggregationEntityToIndexMap) { + this.aggregationEntityToIndexMap = aggregationEntityToIndexMap; + } + + public void buildControllers() { + + if (syncControllerConfig.isEnabled()) { + + Map<String, SuggestionEntityDescriptor> suggestionEntitites = + suggestionEntityLookup.getSuggestionSearchEntityDescriptors(); + SyncControllerImpl aggregationSyncController = null; + + for (String entityType : suggestionEntitites.keySet()) { + + String indexName = aggregationEntityToIndexMap.get(entityType); + + if (indexName == null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Could not determine aggregation index name" + " for entity type: " + entityType); + continue; + } + + try { + + aggregationSyncController = new SyncControllerImpl(syncControllerConfig, entityType); + + ElasticSearchSchemaConfig schemaConfig = indexNameToSchemaConfigMap.get(indexName); + + if (schemaConfig == null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Could not determine elastic search schema config for index name: " + indexName); + continue; + } + + IndexIntegrityValidator aggregationIndexValidator = new IndexIntegrityValidator(esAdapter, + schemaConfig, elasticSearchEndpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + aggregationSyncController.registerIndexValidator(aggregationIndexValidator); + + AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entityType, + schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, + oxmEntityLookup); + + aggSynchronizer.setAaiAdapter(aaiAdapter); + aggSynchronizer.setElasticSearchAdapter(esAdapter); + + aggregationSyncController.registerEntitySynchronizer(aggSynchronizer); + + IndexCleaner entityDataIndexCleaner = + new ElasticSearchIndexCleaner(esAdapter, elasticSearchEndpointConfig, schemaConfig); + + aggregationSyncController.registerIndexCleaner(entityDataIndexCleaner); + + syncControllers.add(aggregationSyncController); + } catch (Exception exc) { + + exc.printStackTrace(); + + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Failed to build aggregation sync controller. Error : " + exc.getMessage()); + } + + } + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, "Sync controller with name = " + + syncControllerConfig.getControllerName() + " is disabled"); + } + } + + @Override + public void registerController() { + + buildControllers(); + + if ( syncControllerRegistry != null ) { + for ( SyncController controller : syncControllers ) { + syncControllerRegistry.registerSyncController(controller); + } + } + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java new file mode 100644 index 0000000..a438215 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/AggregationSynchronizer.java @@ -0,0 +1,782 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Deque; +import java.util.EnumSet; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.AggregationEntity; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AggregationSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryAggregationEntitySyncContainer. + */ + private class RetryAggregationEntitySyncContainer { + NetworkTransaction txn; + AggregationEntity ae; + + /** + * Instantiates a new retry aggregation entity sync container. + * + * @param txn the txn + * @param ae the se + */ + public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) { + this.txn = txn; + this.ae = ae; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public AggregationEntity getAggregationEntity() { + return ae; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetryAggregationEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + protected ExecutorService esPutExecutor; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + private String entityType; + private ElasticSearchSchemaConfig schemaConfig; + private OxmEntityLookup oxmEntityLookup; + + /** + * Instantiates a new entity aggregation synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AggregationSynchronizer(String entityType, ElasticSearchSchemaConfig schemaConfig, + int numSyncWorkers, int numActiveInventoryWorkers, int numElasticWorkers, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig, + OxmEntityLookup oxmEntityLookup) throws Exception { + + super(LOG, "AGGES-" + schemaConfig.getIndexName().toUpperCase(), numSyncWorkers, + numActiveInventoryWorkers, numElasticWorkers, schemaConfig.getIndexName(),aaiStatConfig, esStatConfig); + + this.oxmEntityLookup = oxmEntityLookup; + + this.schemaConfig = schemaConfig; + this.entityType = entityType; + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Entity Aggregation Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetryAggregationEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + + this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG); + + this.aaiEntityStats.intializeEntityCounters(entityType); + this.esEntityStats.intializeEntityCounters(entityType); + + this.contextMap = MDC.getCopyOfContextMap(); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + final String entity = this.getEntityType(); + try { + + aaiWorkOnHand.set(1); + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(entity); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Processing execption while building working set. Error:" + + exc.getMessage()); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryAggregationEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + AggregationEntity ae = rsc.getAggregationEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + + } + } + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param ae the ae + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(ae.getAsJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = ae.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = + elasticSearchAdapter.buildBulkImportOperationRequest(schemaConfig.getIndexName(), + schemaConfig.getIndexDocType(), ae.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during aggregation entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Aggregation entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param ae the ae + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, AggregationEntity ae) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(ae.getId())) { + esWorkOnHand.incrementAndGet(); + + RetryAggregationEntitySyncContainer rsc = + new RetryAggregationEntitySyncContainer(esGetResult, ae); + retryQueue.push(rsc); + + String message = "Store document failed during aggregation entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ERROR_GENERIC, message); + } + } else { + String message = + "Store document failed during aggregation entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + // modified + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + try { + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + AggregationEntity ae = new AggregationEntity(); + ae.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink())); + populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor()); + ae.deriveFields(); + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), ae.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + } + + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "There was a JSON processing error fetching the elastic document for upsert. Error: " + + exc.getMessage()); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "There was an IO error fetching the elastic document for upsert. Error: " + exc.getMessage()); + } + } + + + /** + * Populate aggregation entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateAggregationEntityDocument(AggregationEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + doc.setEntityType(resultDescriptor.getEntityName()); + JsonNode entityNode = mapper.readTree(result); + Map<String, Object> map = mapper.convertValue(entityNode, Map.class); + doc.copyAttributeKeyValuePair(map); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + if ( operationResult == null ) { + return; + } + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = + "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java new file mode 100644 index 0000000..9063e92 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySummarizer.java @@ -0,0 +1,384 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import javax.json.Json; +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class HistoricalEntitySummarizer. + */ +public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + private ElasticSearchSchemaConfig schemaConfig; + private SearchableEntityLookup searchableEntityLookup; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public HistoricalEntitySummarizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, SearchableEntityLookup searchableEntityLookup) + throws Exception { + super(LOG, "HES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig); + + this.schemaConfig = schemaConfig; + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Historical Entity Summarizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + this.syncDurationInMs = -1; + this.searchableEntityLookup = searchableEntityLookup; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + + Map<String, SearchableOxmEntityDescriptor> descriptorMap = + searchableEntityLookup.getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities"); + + return OperationState.ERROR; + } + + Collection<String> entityTypes = descriptorMap.keySet(); + + AtomicInteger asyncWoH = new AtomicInteger(0); + + asyncWoH.set(entityTypes.size()); + + try { + for (String entityType : entityTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + try { + OperationResult typeLinksResult = + aaiAdapter.getSelfLinksByEntityType(entityType); + updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult); + processEntityTypeSelfLinks(entityType, typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage()); + + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + asyncWoH.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage()); + } + + }); + + } + + + while (asyncWoH.get() > 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + " summarizer waiting for all the links to be processed."); + } + + Thread.sleep(250); + } + + esWorkOnHand.set(entityCounters.size()); + + // start doing the real work + allWorkEnumerated = true; + + insertEntityTypeCounters(); + + if (LOG.isDebugEnabled()) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n\nHistorical Entity Counters:"); + + for (Entry<String, AtomicInteger> entry : entityCounters.entrySet()) { + sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get()); + } + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); + + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage()); + + + esWorkOnHand.set(0); + allWorkEnumerated = true; + + return OperationState.ERROR; + } + + return OperationState.OK; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", ""); + + if (syncInProgress) { + LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING); + return OperationState.PENDING; + } + + clearCache(); + + syncInProgress = true; + this.syncStartedTimeStampInMs = System.currentTimeMillis(); + allWorkEnumerated = false; + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param entityType the entity type + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage()); + return; + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData != null && resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size())); + } + } + + } + + /** + * Insert entity type counters. + */ + private void insertEntityTypeCounters() { + + if (esWorkOnHand.get() <= 0) { + return; + } + + SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + + Set<Entry<String, AtomicInteger>> entityCounterEntries = entityCounters.entrySet(); + + for (Entry<String, AtomicInteger> entityCounterEntry : entityCounterEntries) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + String jsonString = Json.createObjectBuilder().add( + "count", entityCounterEntry.getValue().get()) + .add("entityType", entityCounterEntry.getKey()) + .add("timestamp", currentFormattedTimeStamp).build().toString(); + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchPostUrl(indexName); + OperationResult or = elasticSearchAdapter.doPost(link, jsonString, MediaType.APPLICATION_JSON_TYPE); + updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage() ); + } + + return null; + } + + }, esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + }); + + } + + while (esWorkOnHand.get() > 0) { + + try { + Thread.sleep(500); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage()); + } + } + + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC,indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand + + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Historical Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java new file mode 100644 index 0000000..eb42489 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/aggregation/sync/HistoricalEntitySyncController.java @@ -0,0 +1,94 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.aggregation.sync; + +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class HistoricalEntitySyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public HistoricalEntitySyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + int syncFrequencyInMinutes, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, SearchableEntityLookup searchableEntityLookup, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Historical Entity Count Synchronizer"; + + long taskFrequencyInMs = syncFrequencyInMinutes * 60 * 1000; + + setDelayInMs(taskFrequencyInMs); + setSyncFrequencyInMs(taskFrequencyInMs); + + IndexIntegrityValidator entityCounterHistoryValidator = new IndexIntegrityValidator(esAdapter, + schemaConfig, endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(entityCounterHistoryValidator); + + HistoricalEntitySummarizer historicalSummarizer = new HistoricalEntitySummarizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(),aaiStatConfig, esStatConfig,searchableEntityLookup); + + historicalSummarizer.setAaiAdapter(aaiAdapter); + historicalSummarizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(historicalSummarizer); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java new file mode 100644 index 0000000..8197398 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AbstractStatistics.java @@ -0,0 +1,178 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.analytics; + +import java.util.HashMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * The Class AbstractStatistics. + */ +public class AbstractStatistics implements ComponentStatistics { + + private HashMap<String, AtomicInteger> namedCounters; + private HashMap<String, HistogramSampler> namedHistograms; + + /** + * Instantiates a new abstract statistics. + */ + protected AbstractStatistics() { + namedCounters = new HashMap<String, AtomicInteger>(); + namedHistograms = new HashMap<String, HistogramSampler>(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#addCounter(java.lang.String) + */ + /* + * sync-lock the creation of counters during initialization, but run time should not use lock + * synchronization, only thread safe types + * + */ + @Override + public synchronized void addCounter(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter == null) { + counter = new AtomicInteger(0); + namedCounters.put(key, counter); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#pegCounter(java.lang.String) + */ + @Override + public void pegCounter(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter != null) { + counter.incrementAndGet(); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#incrementCounter(java.lang.String, int) + */ + @Override + public void incrementCounter(String key, int value) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter != null) { + counter.addAndGet(value); + } + + } + + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#addHistogram(java.lang.String, java.lang.String, long, int, int) + */ + @Override + public synchronized void addHistogram(String key, String histName, long maxYValue, int numBins, + int numDecimalPoints) { + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler == null) { + histSampler = new HistogramSampler(histName, maxYValue, numBins, numDecimalPoints); + namedHistograms.put(key, histSampler); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#updateHistogram(java.lang.String, long) + */ + @Override + public void updateHistogram(String key, long value) { + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler != null) { + histSampler.track(value); + } + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#reset() + */ + @Override + public void reset() { + + for (HistogramSampler h : namedHistograms.values()) { + h.clear(); + } + + for (AtomicInteger c : namedCounters.values()) { + c.set(0); + } + + } + + /** + * Gets the counter value. + * + * @param key the key + * @return the counter value + */ + protected int getCounterValue(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter == null) { + return -1; + } + + return counter.get(); + + } + + /** + * Gets the histogram stats. + * + * @param key the key + * @param verboseEnabled the verbose enabled + * @param indentPadding the indent padding + * @return the histogram stats + */ + protected String getHistogramStats(String key, boolean verboseEnabled, String indentPadding) { + + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler == null) { + return null; + } + + return histSampler.getStats(verboseEnabled, indentPadding); + + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java new file mode 100644 index 0000000..fd5f277 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/AveragingRingBuffer.java @@ -0,0 +1,121 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.analytics; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * TODO: Fill in description. + * + * @author davea + */ +public class AveragingRingBuffer { + + private int numElements; + + private long[] data; + + private AtomicInteger index; + + private long average; + + private boolean initialAverageCalculated; + + /** + * Instantiates a new averaging ring buffer. + * + * @param size the size + */ + public AveragingRingBuffer(int size) { + + if (size == 0) { + throw new IllegalArgumentException("Size must be greater than zero"); + } + + this.initialAverageCalculated = false; + this.numElements = size; + this.data = new long[this.numElements]; + this.index = new AtomicInteger(-1); + } + + /** + * Calculate average. + * + * @param maxArrayIndex the max array index + */ + private void calculateAverage(int maxArrayIndex) { + + long sum = 0; + + for (int i = 0; i <= maxArrayIndex; i++) { + sum += data[i]; + } + + average = (sum / (maxArrayIndex + 1)); + + } + + public long getAvg() { + + if (!initialAverageCalculated) { + /* + * until the index rolls once we will calculate the average from the data that has been added + * to the array, not including the zero elements + */ + if (index.get() < 0) { + calculateAverage(0); + } else { + calculateAverage(index.get()); + } + + } + + return average; + } + + /** + * Adds the sample. + * + * @param value the value + */ + public synchronized void addSample(long value) { + + index.incrementAndGet(); + + data[index.get()] = value; + + if (index.get() == (numElements - 1)) { + calculateAverage(numElements - 1); + + if (!initialAverageCalculated) { + initialAverageCalculated = true; + } + + index.set(-1); + } + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java new file mode 100644 index 0000000..ef78f9e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/ComponentStatistics.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.analytics; + + +/** + * The Interface ComponentStatistics. + */ +public interface ComponentStatistics { + + /** + * Adds the counter. + * + * @param key the key + */ + public void addCounter(String key); + + /** + * Peg counter. + * + * @param key the key + */ + public void pegCounter(String key); + + /** + * Increment counter. + * + * @param key the key + * @param value the value + */ + public void incrementCounter(String key, int value); + + /** + * Adds the histogram. + * + * @param key the key + * @param name the name + * @param maxYValue the max Y value + * @param numBins the num bins + * @param numDecimalPoints the num decimal points + */ + public void addHistogram(String key, String name, long maxYValue, int numBins, + int numDecimalPoints); + + /** + * Update histogram. + * + * @param key the key + * @param value the value + */ + public void updateHistogram(String key, long value); + + /** + * Reset. + */ + public void reset(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java new file mode 100644 index 0000000..55fb9d8 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistogramSampler.java @@ -0,0 +1,286 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.analytics; + +/** + * A class that models a histogram for reporting and tracking long values with variable steps, bins, + * and floating point accuracy. + * + * @author davea. + */ +public final class HistogramSampler { + + private String label; + + private long binMaxValue; + + private int numBins; + + private double stepSize; + + private long sampleValueTotal; + + private long minValue = -1; + + private long maxValue = 0; + + private long numSamples = 0; + + private long decimalPointAccuracy = 0; + + private static String FORMAT_FLOAT_TEMPLATE = "%%.%df"; + + private String floatFormatStr; + + private long[] histogramBins; + + /** + * Instantiates a new histogram sampler. + * + * @param label the label + * @param maxValue the max value + * @param numBins the num bins + * @param decimalPointAccuracy the decimal point accuracy + */ + public HistogramSampler(String label, long maxValue, int numBins, int decimalPointAccuracy) { + this.label = label; + this.binMaxValue = maxValue; + this.numBins = numBins; + this.stepSize = ((double) binMaxValue / (double) numBins); + this.decimalPointAccuracy = decimalPointAccuracy; + this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy); + + /* + * [numBins + 1] => last bin is catch-all for outliers + */ + + initializeHistogramBins(numBins + 1); + + } + + /** + * Initialize histogram bins. + * + * @param numBins the num bins + */ + private void initializeHistogramBins(int numBins) { + + histogramBins = new long[numBins]; + int counter = 0; + while (counter < numBins) { + histogramBins[counter] = 0; + counter++; + } + + } + + /* + * Is it really necessary to synchronize the collection, or should we simply switch the underlying + * data type to an AtomicLong + */ + + /** + * Track. + * + * @param value the value + */ + public synchronized void track(long value) { + + if (value < 0) { + return; + } + + sampleValueTotal += value; + numSamples++; + + if (minValue == -1) { + minValue = value; + } + + if (value < minValue) { + minValue = value; + } + + if (value > maxValue) { + maxValue = value; + } + + /* + * One step bin determination + */ + + if (value < (numBins * stepSize)) { + + int index = (int) (value / stepSize); + histogramBins[index]++; + + } else { + // peg the metric in the outlier bin + histogramBins[numBins - 1]++; + } + + } + + /** + * Clear. + */ + public void clear() { + + int counter = 0; + while (counter < numBins) { + histogramBins[counter] = 0; + counter++; + } + + minValue = -1; + maxValue = 0; + numSamples = 0; + sampleValueTotal = 0; + + } + + /** + * Re initialize bins. + * + * @param label the label + * @param numBins the num bins + * @param maxValue the max value + * @param decimalPointAccuracy the decimal point accuracy + */ + public void reInitializeBins(String label, int numBins, long maxValue, int decimalPointAccuracy) { + this.label = label; + this.decimalPointAccuracy = decimalPointAccuracy; + this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy); + this.numBins = numBins; + this.minValue = -1; + this.maxValue = 0; + initializeHistogramBins(numBins); + this.stepSize = (maxValue / numBins); + clear(); + } + + public long getNumberOfSamples() { + return numSamples; + } + + public long getTotalValueSum() { + return sampleValueTotal; + } + + /** + * Gets the stats. + * + * @param formatted the formatted + * @param indentPadding the indent padding + * @return the stats + */ + public String getStats(boolean formatted, String indentPadding) { + + StringBuilder sb = new StringBuilder(128); + + + if (!formatted) { + // generate CSV in the following format + + /* + * label,minValue,maxValue,avgValue,numSamples,stepSize,numSteps,stepCounters + */ + sb.append(indentPadding); + sb.append(label).append(","); + sb.append(minValue).append(","); + sb.append(maxValue).append(","); + if (numSamples == 0) { + sb.append(0).append(","); + } else { + sb.append((sampleValueTotal / numSamples)).append(","); + } + sb.append(numSamples).append(","); + sb.append(numBins).append(","); + sb.append(String.format(floatFormatStr, stepSize)); + + int counter = 0; + while (counter < numBins) { + + if (counter != (numBins)) { + sb.append(","); + } + + sb.append(histogramBins[counter]); + + counter++; + + } + + return sb.toString(); + + } + + sb.append("\n"); + sb.append(indentPadding).append("Label = ").append(label).append("\n"); + sb.append(indentPadding).append("Min = ").append(minValue).append("\n"); + sb.append(indentPadding).append("Max = ").append(maxValue).append("\n"); + sb.append(indentPadding).append("numSamples = ").append(numSamples).append("\n"); + + if (numSamples == 0) { + sb.append(indentPadding).append("Avg = ").append(0).append("\n"); + } else { + sb.append(indentPadding).append("Avg = ").append((sampleValueTotal / numSamples)) + .append("\n"); + } + + sb.append(indentPadding).append("StepSize = ").append(String.format(floatFormatStr, stepSize)) + .append("\n"); + + sb.append(indentPadding).append("Sample Histogram:").append("\n"); + + int counter = 0; + while (counter < numBins) { + + if (counter == (numBins - 1)) { + // outlier bin + double leftBound = (stepSize * counter); + sb.append(indentPadding).append("\t") + .append(" x >= " + String.format(floatFormatStr, leftBound) + " : " + + histogramBins[counter]) + .append("\n"); + + } else { + double leftBound = (stepSize * counter); + double rightBound = ((stepSize) * (counter + 1)); + sb.append(indentPadding).append("\t") + .append((String.format(floatFormatStr, leftBound) + " < x < " + + String.format(floatFormatStr, rightBound) + " : " + histogramBins[counter])) + .append("\n"); + } + + counter++; + + } + + return sb.toString(); + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java new file mode 100644 index 0000000..1a534e3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/analytics/HistoricalCounter.java @@ -0,0 +1,177 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.analytics; + +/** + * A simple class to model a historical counter. A set of values will be tracked and basic + * statistics will be calculated in real time (n, min, max, avg). + * + * @author davea + */ +public class HistoricalCounter { + + private double min; + + private double max; + + private double totalOfSamples; + + private long numSamples; + + private double value; + + private boolean maintainSingleValue; + + /** + * Instantiates a new historical counter. + * + * @param trackSingleValue the track single value + */ + public HistoricalCounter(boolean trackSingleValue) { + min = -1; + max = 0; + totalOfSamples = 0; + value = 0.0; + numSamples = 0; + this.maintainSingleValue = trackSingleValue; + } + + public boolean isSingleValue() { + return maintainSingleValue; + } + + /** + * Update. + * + * @param value the value + */ + public synchronized void update(double value) { + + if (value < 0) { + return; + } + + if (maintainSingleValue) { + + this.value = value; + + } else { + + if (min == -1) { + min = value; + } + + if (value < min) { + min = value; + } + + if (value > max) { + max = value; + } + + totalOfSamples += value; + numSamples++; + } + } + + public double getValue() { + return value; + } + + public double getMin() { + return min; + } + + public double getMax() { + return max; + } + + public long getNumSamples() { + return numSamples; + } + + public double getAvg() { + if (numSamples == 0) { + return 0; + } + + return (totalOfSamples / numSamples); + } + public void setMin(double min) { + this.min = min; + } + + public void setMax(double max) { + this.max = max; + } + + public double getTotalOfSamples() { + return totalOfSamples; + } + + public void setTotalOfSamples(double totalOfSamples) { + this.totalOfSamples = totalOfSamples; + } + + public void setNumSamples(long numSamples) { + this.numSamples = numSamples; + } + + public void setMaintainSingleValue(boolean maintainSingleValue) { + this.maintainSingleValue = maintainSingleValue; + } + + + /** + * Reset. + */ + public synchronized void reset() { + min = -1; + max = 0; + numSamples = 0; + totalOfSamples = 0; + value = 0.0; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(32); + + if (maintainSingleValue) { + sb.append("[ Val=").append(value).append(" ]"); + } else { + sb.append("[ NumSamples=").append(numSamples).append(","); + sb.append(" Min=").append(min).append(","); + sb.append(" Max=").append(max).append(","); + sb.append(" Avg=").append(getAvg()).append(" ]"); + } + + return sb.toString(); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java new file mode 100644 index 0000000..05ce775 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutoSuggestionSyncController.java @@ -0,0 +1,105 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; +import org.springframework.beans.factory.annotation.Autowired; + +public class AutoSuggestionSyncController extends SyncControllerImpl implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public AutoSuggestionSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig, + OxmEntityLookup oxmEntityLookup, SuggestionEntityLookup suggestionEntityLookup, + FiltersConfig filtersConfig, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Auto Suggestion Synchronizer"; + + IndexIntegrityValidator autoSuggestionIndexValidator = new IndexIntegrityValidator(esAdapter, + schemaConfig, endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(autoSuggestionIndexValidator); + + AutosuggestionSynchronizer suggestionSynchronizer = new AutosuggestionSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, + oxmEntityLookup, suggestionEntityLookup, filtersConfig); + + suggestionSynchronizer.setAaiAdapter(aaiAdapter); + suggestionSynchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(suggestionSynchronizer); + + IndexCleaner autosuggestIndexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(autosuggestIndexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + + + @Override + public void registerController() { + + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java new file mode 100644 index 0000000..baffa54 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizer.java @@ -0,0 +1,776 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.SuggestionsPermutation; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private class RetrySuggestionEntitySyncContainer { + NetworkTransaction txn; + SuggestionSearchEntity ssec; + + /** + * Instantiates a new RetrySuggestionEntitySyncContainer. + * + * @param txn the txn + * @param icer the icer + */ + public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) { + this.txn = txn; + this.ssec = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return ssec; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + protected ExecutorService esPutExecutor; + private Deque<RetrySuggestionEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + private OxmEntityLookup oxmEntityLookup; + private SuggestionEntityLookup suggestionEntityLookup; + private FiltersConfig filtersConfig; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AutosuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup, + SuggestionEntityLookup suggestionEntityLookup, FiltersConfig filtersConfig) throws Exception { + + super(LOG, "ASES-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers, + esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig); + + this.oxmEntityLookup = oxmEntityLookup; + this.suggestionEntityLookup = suggestionEntityLookup; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Autosuggestion Entity Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG); + this.retryQueue = new ConcurrentLinkedDeque<RetrySuggestionEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.syncDurationInMs = -1; + this.filtersConfig = filtersConfig; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + Map<String, SuggestionEntityDescriptor> descriptorMap = + suggestionEntityLookup.getSuggestionSearchEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred while processing entity self-links. Error: " + + exc.getMessage()); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred while performing the sync. Error: " + exc.getMessage()); + } + + return OperationState.OK; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + if ( operationResult == null ) { + return; + } + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /* + * Return a set of valid suggestion attributes for the provided entityName that are present in the + * JSON + * + * @param node JSON node in which the attributes should be found + * + * @param entityName Name of the entity + * + * @return List of all valid suggestion attributes(key's) + */ + public List<String> getSuggestableAttrNamesFromReponse(JsonNode node, String entityName) { + List<String> suggestableAttr = new ArrayList<String>(); + + HashMap<String, String> desc = + suggestionEntityLookup.getSuggestionSearchEntityOxmModel().get(entityName); + + if (desc != null) { + + String attr = desc.get("suggestibleAttributes"); + + if (attr != null) { + suggestableAttr = Arrays.asList(attr.split(",")); + List<String> suggestableValue = new ArrayList<String>(); + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) { + suggestableValue.add(attribute); + } + } + return suggestableValue; + } + } + + return new ArrayList<String>(); + } + + /** + * Fetch all the documents for upsert. Based on the number of permutations that are available the + * number of documents will be different + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + try { + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + // Step 1: Calculate the number of possible permutations of attributes + String entityName = txn.getDescriptor().getEntityName(); + JsonNode entityNode = mapper.readTree(jsonResult); + + List<String> availableSuggestableAttrName = + getSuggestableAttrNamesFromReponse(entityNode, entityName); + + ArrayList<ArrayList<String>> uniqueLists = + SuggestionsPermutation.getNonEmptyUniqueLists(availableSuggestableAttrName); + // Now we have a list of all possible permutations for the status that are + // defined for this entity type. Try inserting a document for every combination. + for (ArrayList<String> uniqueList : uniqueLists) { + + SuggestionSearchEntity sse = new SuggestionSearchEntity(filtersConfig, suggestionEntityLookup); + sse.setSuggestableAttr(uniqueList); + sse.setFilterBasedPayloadFromResponse(entityNode, entityName, uniqueList); + sse.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink())); + populateSuggestionSearchEntityDocument(sse, jsonResult, txn); + // The unique id for the document will be created at derive fields + sse.deriveFields(); + // Insert the document only if it has valid statuses + if (sse.isSuggestableDoc()) { + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sse); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a json processing error while processing the result from elasticsearch. Error: " + exc.getMessage()); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a io processing error while processing the result from elasticsearch. Error: " + exc.getMessage()); + } + } + + protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result, + NetworkTransaction txn) throws JsonProcessingException, IOException { + + OxmEntityDescriptor resultDescriptor = txn.getDescriptor(); + + sse.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSuggestionSearchEntityDocument()," + + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + sse.generateSuggestionInputPermutations(); + } + + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not + * found TODO -> Should we return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + // Insert a new document only if the paylod is different. + // This is determined by hashing the payload and using it as a id for the document + // + if (!wasEntryDiscovered) { + try { + String jsonPayload = null; + + jsonPayload = sse.getAsJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Suggestion search entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, sse); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during suggestion search entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SuggestionSearchEntity sse) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(sse.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySuggestionEntitySyncContainer rssec = + new RetrySuggestionEntitySyncContainer(esGetResult, sse); + retryQueue.push(rssec); + + String message = "Store document failed during suggestion search entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during suggestion search entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySuggestionEntitySyncContainer susc = retryQueue.poll(); + if (susc != null) { + + SuggestionSearchEntity sus = susc.getSuggestionSearchEntity(); + NetworkTransaction txn = susc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sus.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sus); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java new file mode 100644 index 0000000..7226c27 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSuggestionSynchronizer.java @@ -0,0 +1,197 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.Map; +import java.util.concurrent.ExecutorService; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.AggregationSuggestionEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + + +public class VnfAliasSuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(VnfAliasSuggestionSynchronizer.class); + + private boolean isSyncInProgress; + private boolean shouldPerformRetry; + private Map<String, String> contextMap; + protected ExecutorService esPutExecutor; + private FiltersConfig filtersConfig; + + public VnfAliasSuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, FiltersConfig filtersConfig) throws Exception { + super(LOG, "VASS-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers, + esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig); + + this.isSyncInProgress = false; + this.shouldPerformRetry = false; + this.synchronizerName = "VNFs Alias Suggestion Synchronizer"; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG); + this.filtersConfig = filtersConfig; + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand); + } + + if (totalWorkOnHand > 0 || !isSyncInProgress) { + return false; + } + + return true; + } + + @Override + public OperationState doSync() { + isSyncInProgress = true; + this.syncDurationInMs = -1; + syncStartedTimeStampInMs = System.currentTimeMillis(); + + syncEntity(); + + while (!isSyncDone()) { + try { + if (shouldPerformRetry) { + syncEntity(); + } + Thread.sleep(1000); + } catch (Exception exc) { + // We don't care about this exception + } + } + + return OperationState.OK; + } + + private void syncEntity() { + String txnId = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnId, synchronizerName, "", "Sync", ""); + + AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(filtersConfig); + syncEntity.deriveFields(); + syncEntity.initializeFilters(); + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), syncEntity.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + } + + try { + String jsonPayload = null; + jsonPayload = syncEntity.getAsJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction elasticPutTxn = new NetworkTransaction(); + elasticPutTxn.setLink(link); + elasticPutTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, elasticPutTxn, + elasticSearchAdapter, contextMap), esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation suggestion entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + wasEsOperationSuccessful(result); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during aggregation suggestion entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } + } + + private void wasEsOperationSuccessful(NetworkTransaction result) { + if (result != null) { + OperationResult opResult = result.getOperationResult(); + + if (!opResult.wasSuccessful()) { + shouldPerformRetry = true; + } else { + isSyncInProgress = false; + shouldPerformRetry = false; + } + } + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + @Override + public String getStatReport(boolean shouldDisplayFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, shouldDisplayFinalReport); + } + + @Override + public void shutdown() { + this.shutdownExecutors(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java new file mode 100644 index 0000000..f6504ad --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/autosuggestion/sync/VnfAliasSyncController.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.autosuggestion.sync; + +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class VnfAliasSyncController extends SyncControllerImpl implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public VnfAliasSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig, + FiltersConfig filtersConfig, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "VNFs Alias Suggestion Synchronizer"; + + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + VnfAliasSuggestionSynchronizer synchronizer = new VnfAliasSuggestionSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, filtersConfig); + + synchronizer.setAaiAdapter(aaiAdapter); + synchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(synchronizer); + + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java new file mode 100644 index 0000000..624573f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/common/search/CommonSearchSuggestion.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.common.search; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +@JsonInclude(Include.NON_NULL) +public class CommonSearchSuggestion implements SearchSuggestion { + protected String hashId; + protected String route; + protected String text; + protected List<UiFilterValueEntity> filterValues = new ArrayList<>(); + + public CommonSearchSuggestion() {} + + public CommonSearchSuggestion(String hashId, String route, String text, String perspective, + List<UiFilterValueEntity> filterValues) { + this.hashId = hashId; + this.route = route; + this.text = text; + this.filterValues = filterValues; + } + + public List<UiFilterValueEntity> getFilterValues() { + return filterValues; + } + + public String getHashId() { + return hashId; + } + + public String getRoute() { + return route; + } + + public String getText() { + return text; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + public void setRoute(String route) { + this.route = route; + } + + public void setText(String text) { + this.text = text; + } + + @Override + public String toString() { + return "CommonSearchSuggestion [" + (hashId != null ? "hashId=" + hashId + ", " : "") + + (route != null ? "route=" + route + ", " : "") + + (text != null ? "text=" + text + ", " : "") + + (filterValues != null ? "filterValues=" + filterValues : "") + "]"; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java new file mode 100644 index 0000000..286b445 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/SparkyResourceLoader.java @@ -0,0 +1,125 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +import org.springframework.context.ResourceLoaderAware; +import org.springframework.core.io.Resource; +import org.springframework.core.io.ResourceLoader; + +public class SparkyResourceLoader implements ResourceLoaderAware { + + + private static final String FILE_URI = "file:"; + private ResourceLoader resourceLoader; + private String configHomeEnvVar; + + // private static Logger LOG = LoggerFactory.getInstance().getLogger(SparkyResourceLoader.class); + + @Override + public void setResourceLoader(ResourceLoader resourceLoader) { + this.resourceLoader = resourceLoader; + } + + public String getFullFileUri(String uriFilePath) { + return FILE_URI + System.getProperty(configHomeEnvVar) + uriFilePath; + } + + public String getAbsolutePath(String uriFilePath) { + return System.getProperty(configHomeEnvVar) + uriFilePath; + } + + protected Resource getResource(String uriFilePath, boolean isRelative) { + + String fileUri = uriFilePath; + + if (!uriFilePath.startsWith("file:")) { + fileUri = "file:" + uriFilePath; + } + + if (isRelative) { + return resourceLoader.getResource(getFullFileUri(fileUri)); + } else { + return resourceLoader.getResource(fileUri); + } + + } + + public File getResourceAsFile(String uriFilePath, boolean isRelativePath) throws IOException { + + Resource resource = getResource(uriFilePath, isRelativePath); + + if (resource.exists()) { + return resource.getFile(); + } + + return null; + + } + + public byte[] getResourceAsBytes(String uriFilePath, boolean isRelativePath) throws IOException { + + Resource resource = getResource(uriFilePath, isRelativePath); + + if (resource.exists()) { + return getResourceAsBytes(resource); + } + + return null; + } + + public byte[] getResourceAsBytes(Resource resource) throws IOException { + + if ( resource != null && resource.exists()) { + return Files.readAllBytes(Paths.get(resource.getFile().getAbsolutePath())); + } + + return null; + } + + public String getResourceAsString(String uriFilePath, boolean isRelativePath) throws IOException { + + Resource resource = getResource(uriFilePath, isRelativePath); + + if (resource.exists()) { + return new String(getResourceAsBytes(resource)); + } + + return null; + } + + public String getConfigHomeEnvVar() { + return configHomeEnvVar; + } + + public void setConfigHomeEnvVar(String configHomeEnvVar) { + this.configHomeEnvVar = configHomeEnvVar; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java new file mode 100644 index 0000000..d632c5a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReference.java @@ -0,0 +1,78 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.List; + +/** + * The Class CrossEntityReference. + */ +public class CrossEntityReference { + private String targetEntityType; + private List<String> referenceAttributes; + + /** + * Instantiates a new cross entity reference. + */ + public CrossEntityReference() { + targetEntityType = null; + referenceAttributes = new ArrayList<String>(); + } + + public String getTargetEntityType() { + return targetEntityType; + } + + public void setTargetEntityType(String targetEntityType) { + this.targetEntityType = targetEntityType; + } + + public List<String> getReferenceAttributes() { + return referenceAttributes; + } + + public void setReferenceAttributes(List<String> referenceAttributes) { + this.referenceAttributes = referenceAttributes; + } + + /** + * Adds the reference attribute. + * + * @param additionalAttribute the additional attribute + */ + public void addReferenceAttribute(String additionalAttribute) { + referenceAttributes.add(additionalAttribute); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "CrossEntityReference [targetEntityType=" + targetEntityType + ", referenceAttributes=" + + referenceAttributes + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java new file mode 100644 index 0000000..c44b1f4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptor.java @@ -0,0 +1,67 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class CrossEntityReferenceDescriptor extends OxmEntityDescriptor { + protected CrossEntityReference crossEntityReference; + + public CrossEntityReference getCrossEntityReference() { + return crossEntityReference; + } + + public void setCrossEntityReference(CrossEntityReference crossEntityReference) { + this.crossEntityReference = crossEntityReference; + } + + /** + * Checks for cross entity references. + * + * @return true, if successful + */ + public boolean hasCrossEntityReferences() { + if (this.crossEntityReference == null) { + return false; + } + if (!this.crossEntityReference.getReferenceAttributes().isEmpty()) { + return true; + } + return false; + } + + + @Override + public String toString() { + return "CrossEntityReferenceDescriptor [" + + (crossEntityReference != null ? "crossEntityReference=" + crossEntityReference + ", " + : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java new file mode 100644 index 0000000..603b93d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceLookup.java @@ -0,0 +1,136 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class CrossEntityReferenceLookup implements OxmModelProcessor { + + private Map<String, HashMap<String, String>> crossReferenceEntityOxmModel; + private Map<String, CrossEntityReferenceDescriptor> crossReferenceEntityDescriptors; + + + public CrossEntityReferenceLookup() { + crossReferenceEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>(); + crossReferenceEntityDescriptors = new HashMap<String, CrossEntityReferenceDescriptor>(); + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry<String, String> entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("crossEntityReference")) { + oxmProperties.put("crossEntityReference", entry.getValue()); + } + } + } + + if (oxmProperties.containsKey("crossEntityReference")) { + crossReferenceEntityOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry<String, HashMap<String, String>> crossRefModel : crossReferenceEntityOxmModel + .entrySet()) { + HashMap<String, String> attribute = crossRefModel.getValue(); + CrossEntityReferenceDescriptor entity = new CrossEntityReferenceDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + List<String> crossEntityRefTokens = + Arrays.asList(attribute.get("crossEntityReference").split(",")); + + if (crossEntityRefTokens.size() >= 2) { + CrossEntityReference entityRef = new CrossEntityReference(); + entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); + + for (int i = 1; i < crossEntityRefTokens.size(); i++) { + entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); + } + + entity.setCrossEntityReference(entityRef); + } + crossReferenceEntityDescriptors.put(attribute.get("entityName"), entity); + } + + } + + public Map<String, HashMap<String, String>> getCrossReferenceEntityOxmModel() { + return crossReferenceEntityOxmModel; + } + + public void setCrossReferenceEntityOxmModel( + Map<String, HashMap<String, String>> crossReferenceEntityOxmModel) { + this.crossReferenceEntityOxmModel = crossReferenceEntityOxmModel; + } + + public Map<String, CrossEntityReferenceDescriptor> getCrossReferenceEntityDescriptors() { + return crossReferenceEntityDescriptors; + } + + public void setCrossReferenceEntityDescriptors( + Map<String, CrossEntityReferenceDescriptor> crossReferenceEntityDescriptors) { + this.crossReferenceEntityDescriptors = crossReferenceEntityDescriptors; + } + + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java new file mode 100644 index 0000000..4e995a5 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptor.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class GeoEntityDescriptor extends OxmEntityDescriptor { + + protected String geoLatName; + + protected String geoLongName; + + public String getGeoLatName() { + return geoLatName; + } + + public void setGeoLatName(String geoLatName) { + this.geoLatName = geoLatName; + } + + public String getGeoLongName() { + return geoLongName; + } + + public void setGeoLongName(String geoLongName) { + this.geoLongName = geoLongName; + } + + @Override + public String toString() { + return "GeoEntityDescriptor [" + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "") + + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java new file mode 100644 index 0000000..1e61345 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoEntityLookup.java @@ -0,0 +1,137 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class GeoEntityLookup implements OxmModelProcessor { + + private Map<String, HashMap<String, String>> geoEntityOxmModel; + + private Map<String, GeoOxmEntityDescriptor> geoEntityDescriptors; + + public GeoEntityLookup() { + geoEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>(); + geoEntityDescriptors = new HashMap<String, GeoOxmEntityDescriptor>(); + } + + public Map<String, HashMap<String, String>> getGeoEntityOxmModel() { + return geoEntityOxmModel; + } + + public void setGeoEntityOxmModel(Map<String, HashMap<String, String>> geoEntityOxmModel) { + this.geoEntityOxmModel = geoEntityOxmModel; + } + + public Map<String, GeoOxmEntityDescriptor> getGeoEntityDescriptors() { + return geoEntityDescriptors; + } + + public void setGeoEntityDescriptors(Map<String, GeoOxmEntityDescriptor> geoEntityDescriptors) { + this.geoEntityDescriptors = geoEntityDescriptors; + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + + if (properties != null) { + for (Map.Entry<String, String> entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("geoLat")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLat", entry.getValue()); + } + } else if (entry.getKey().equalsIgnoreCase("geoLong")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLong", entry.getValue()); + } + } + } + } + + if (oxmProperties.containsKey("geoLat") && oxmProperties.containsKey("geoLong")) { + geoEntityOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry<String, HashMap<String, String>> entityModel : geoEntityOxmModel.entrySet()) { + + HashMap<String, String> attribute = entityModel.getValue(); + + GeoOxmEntityDescriptor entity = new GeoOxmEntityDescriptor(); + + entity.setEntityName(attribute.get("entityName")); + + if (attribute.containsKey("primaryKeyAttributeNames")) { + + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + if (attribute.containsKey("geoLat") || attribute.containsKey("geoLong")) { + entity.setGeoLatName(attribute.get("geoLat")); + entity.setGeoLongName(attribute.get("geoLong")); + } + + geoEntityDescriptors.put(attribute.get("entityName"), entity); + } + } + + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java new file mode 100644 index 0000000..03fb9d6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptor.java @@ -0,0 +1,71 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +public class GeoOxmEntityDescriptor extends OxmEntityDescriptor { + + private String geoLatName; + + private String geoLongName; + + public String getGeoLatName() { + return geoLatName; + } + + public void setGeoLatName(String geoLatName) { + this.geoLatName = geoLatName; + } + + public String getGeoLongName() { + return geoLongName; + } + + public void setGeoLongName(String geoLongName) { + this.geoLongName = geoLongName; + } + + /** + * Checks for geo entity. + * + * @return true, if successful + */ + public boolean hasGeoEntity() { + return (this.geoLongName != null && this.geoLatName != null); + } + + @Override + public String toString() { + return "GeoOxmEntityDescriptor [" + + (geoLatName != null ? "geoLatName=" + geoLatName + ", " : "") + + (geoLongName != null ? "geoLongName=" + geoLongName + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java new file mode 100644 index 0000000..fd071d1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptor.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.List; + +public class OxmEntityDescriptor { + + protected String entityName; + + protected List<String> primaryKeyAttributeNames; + + public OxmEntityDescriptor() { + primaryKeyAttributeNames = new ArrayList<String>(); + } + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + + public List<String> getPrimaryKeyAttributeNames() { + return primaryKeyAttributeNames; + } + + public void setPrimaryKeyAttributeNames(List<String> primaryKeyAttributeNames) { + this.primaryKeyAttributeNames = primaryKeyAttributeNames; + } + + public void addPrimaryKeyName(String name) { + primaryKeyAttributeNames.add(name); + } + + @Override + public String toString() { + return "OxmEntityDescriptor [" + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java new file mode 100644 index 0000000..09326a8 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmEntityLookup.java @@ -0,0 +1,132 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class OxmEntityLookup implements OxmModelProcessor { + + private Map<String, HashMap<String, String>> oxmModel; + + private Map<String, DynamicType> entityTypeLookup; + + private Map<String, OxmEntityDescriptor> entityDescriptors; + + + public OxmEntityLookup() { + oxmModel = new LinkedHashMap<String, HashMap<String, String>>(); + entityTypeLookup = new LinkedHashMap<String, DynamicType>(); + entityDescriptors = new HashMap<String, OxmEntityDescriptor>(); + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + entityTypeLookup.put(entityName, entity); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + + oxmModel.put(entityName, oxmProperties); + + } + + for (Entry<String, HashMap<String, String>> entityModel : oxmModel.entrySet()) { + HashMap<String, String> attribute = entityModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + + entity.setEntityName(attribute.get("entityName")); + + if (attribute.containsKey("primaryKeyAttributeNames")) { + + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + entityDescriptors.put(attribute.get("entityName"), entity); + } + } + + } + + public Map<String, HashMap<String, String>> getOxmModel() { + return oxmModel; + } + + public void setOxmModel(Map<String, HashMap<String, String>> oxmModel) { + this.oxmModel = oxmModel; + } + + public Map<String, DynamicType> getEntityTypeLookup() { + return entityTypeLookup; + } + + public void setEntityTypeLookup(Map<String, DynamicType> entityTypeLookup) { + this.entityTypeLookup = entityTypeLookup; + } + + public Map<String, OxmEntityDescriptor> getEntityDescriptors() { + return entityDescriptors; + } + + public void setEntityDescriptors(Map<String, OxmEntityDescriptor> entityDescriptors) { + this.entityDescriptors = entityDescriptors; + } + + public void addEntityDescriptor(String type, OxmEntityDescriptor descriptor) { + if ( this.entityDescriptors != null ) { + this.entityDescriptors.put(type, descriptor); + } + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java new file mode 100644 index 0000000..475fe8f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelLoader.java @@ -0,0 +1,195 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.eclipse.persistence.jaxb.JAXBContextProperties; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContextFactory; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.io.support.ResourcePatternResolver; + +public class OxmModelLoader { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(OxmModelLoader.class); + + /* + * The intent of this parameter is to be able to programmatically over-ride the latest AAI schema + * version discovered from the aai-schema jar file. This property is optional, but if set on the + * bean or by another class in the system, then it will override the spec version that is loaded. + * + * If the latestVersionOverride is greater than 0 then it will set the latest version to the + * specified version, and that stream will be returned if available. + */ + + protected int oxmApiVersionOverride; + protected Set<OxmModelProcessor> processors; + private int latestVersionNum = 0; + + private final static Pattern p = Pattern.compile("aai_oxm_(v)(.*).xml"); + + public OxmModelLoader() { + this(-1, new HashSet<OxmModelProcessor>()); + } + + public OxmModelLoader(int apiVersionOverride,Set<OxmModelProcessor> oxmModelProcessors) { + this.oxmApiVersionOverride = apiVersionOverride; + this.processors = oxmModelProcessors; + } + + protected synchronized Map<Integer, InputStream> getStreamHandlesForOxmFromResource() { + Map<Integer, InputStream> listOfOxmFiles = new HashMap<Integer, InputStream>(); + ClassLoader oxmClassLoader = OxmModelLoader.class.getClassLoader(); + ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(oxmClassLoader); + Resource[] resources = null; + try { + resources = resolver.getResources("classpath*:/oxm/aai_oxm*.xml"); + } catch (IOException ex) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, ex.getMessage()); + } + + if (resources == null) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, "No OXM schema files found on classpath"); + } + + for (Resource resource : resources) { + Matcher m = p.matcher(resource.getFilename()); + + if (m.matches()) { + try { + listOfOxmFiles.put(new Integer(m.group(2)), resource.getInputStream()); + } catch (Exception e) { + LOG.error(AaiUiMsgs.OXM_LOADING_ERROR, + resource.getFilename(), e.getMessage()); + } + } + } + return listOfOxmFiles; + } + + /** + * Load an oxm model. + * @param inputStream file handle for oxm + */ + protected void loadModel(InputStream inputStream) { + Map<String, Object> properties = new HashMap<String, Object>(); + properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, inputStream); + try { + final DynamicJAXBContext oxmContext = DynamicJAXBContextFactory + .createContextFromOXM(Thread.currentThread().getContextClassLoader(), properties); + + parseOxmContext(oxmContext); + // populateSearchableOxmModel(); + LOG.info(AaiUiMsgs.OXM_LOAD_SUCCESS, String.valueOf(latestVersionNum)); + } catch (Exception exc) { + LOG.info(AaiUiMsgs.OXM_PARSE_ERROR_NONVERBOSE); + LOG.error(AaiUiMsgs.OXM_PARSE_ERROR_VERBOSE, "OXM v" + latestVersionNum, exc.getMessage()); + } + } + + /** + * Load the latest oxm model. + */ + public synchronized void loadLatestOxmModel() { + + LOG.info(AaiUiMsgs.INITIALIZE_OXM_MODEL_LOADER); + + // find handles for available oxm models + final Map<Integer, InputStream> listOfOxmStreams = getStreamHandlesForOxmFromResource(); + if (listOfOxmStreams.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FILE_NOT_FOUND); + return; + } + + InputStream stream = null; + + if (oxmApiVersionOverride > 0) { + latestVersionNum = oxmApiVersionOverride; + LOG.warn(AaiUiMsgs.WARN_GENERIC, "Overriding AAI Schema with version = " + latestVersionNum); + stream = listOfOxmStreams.get(latestVersionNum); + } else { + + for (Integer key : listOfOxmStreams.keySet()) { + if (key.intValue() > latestVersionNum) { + latestVersionNum = key.intValue(); + stream = listOfOxmStreams.get(key); + } + } + } + + // load the latest oxm file + loadModel(stream); + + } + + public int getLatestVersionNum() { + return latestVersionNum; + } + + public void setLatestVersionNum(int latestVersionNum) { + this.latestVersionNum = latestVersionNum; + } + + /** + * Parses the oxm context. + * + * @param oxmContext the oxm context + */ + private void parseOxmContext(DynamicJAXBContext oxmContext) { + + if (processors != null && processors.size() > 0) { + + for (OxmModelProcessor processor : processors) { + + try { + + processor.processOxmModel(oxmContext); + + } catch (Exception exc) { + + LOG.warn(AaiUiMsgs.WARN_GENERIC, + "OxmModelProcessor experienced an error. Error: " + exc.getMessage()); + + } + + } + + } + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java new file mode 100644 index 0000000..9e250b7 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/OxmModelProcessor.java @@ -0,0 +1,33 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public interface OxmModelProcessor { + + public void processOxmModel(DynamicJAXBContext jaxbContext); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java new file mode 100644 index 0000000..7833ee0 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableEntityLookup.java @@ -0,0 +1,119 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class SearchableEntityLookup implements OxmModelProcessor { + + private Map<String, HashMap<String, String>> searchableOxmModel; + private Map<String, SearchableOxmEntityDescriptor> searchableEntityDescriptors; + + public SearchableEntityLookup() { + searchableOxmModel = new LinkedHashMap<String, HashMap<String, String>>(); + searchableEntityDescriptors = new HashMap<String, SearchableOxmEntityDescriptor>(); + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry<String, String> entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("searchable")) { + oxmProperties.put("searchableAttributes", entry.getValue()); + } + } + } + + // Add all searchable entity types for reserve lookup + if (oxmProperties.containsKey("searchableAttributes")) { + searchableOxmModel.put(entityName, oxmProperties); + } + + } + + for (Entry<String, HashMap<String, String>> searchableModel : searchableOxmModel.entrySet()) { + HashMap<String, String> attribute = searchableModel.getValue(); + SearchableOxmEntityDescriptor entity = new SearchableOxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + entity + .setSearchableAttributes(Arrays.asList(attribute.get("searchableAttributes").split(","))); + searchableEntityDescriptors.put(attribute.get("entityName"), entity); + } + + } + + public Map<String, HashMap<String, String>> getSearchableOxmModel() { + return searchableOxmModel; + } + + public void setSearchableOxmModel(Map<String, HashMap<String, String>> searchableOxmModel) { + this.searchableOxmModel = searchableOxmModel; + } + + public Map<String, SearchableOxmEntityDescriptor> getSearchableEntityDescriptors() { + return searchableEntityDescriptors; + } + + public void setSearchableEntityDescriptors( + Map<String, SearchableOxmEntityDescriptor> searchableEntityDescriptors) { + this.searchableEntityDescriptors = searchableEntityDescriptors; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java new file mode 100644 index 0000000..9f2809f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SearchableOxmEntityDescriptor.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.List; + +public class SearchableOxmEntityDescriptor extends OxmEntityDescriptor { + + protected List<String> searchableAttributes; + + public List<String> getSearchableAttributes() { + return searchableAttributes; + } + + public void setSearchableAttributes(List<String> searchableAttributes) { + this.searchableAttributes = searchableAttributes; + } + + public void addSearchableAttribute(String attributeName) { + searchableAttributes.add(attributeName); + } + + /** + * Checks for searchable attributes. + * + * @return true, if successful + */ + public boolean hasSearchableAttributes() { + + if (this.searchableAttributes == null) { + return false; + } + + if (this.searchableAttributes.size() > 0) { + return true; + } + + return false; + + } + + @Override + public String toString() { + return "SearchableOxmEntityDescriptor [" + + (searchableAttributes != null ? "searchableAttributes=" + searchableAttributes + ", " + : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java new file mode 100644 index 0000000..774f6b0 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityDescriptor.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; + +public class SuggestionEntityDescriptor extends OxmEntityDescriptor { + + protected SuggestionSearchEntity suggestionSearchEntity; + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return suggestionSearchEntity; + } + + public void setSuggestionSearchEntity(SuggestionSearchEntity suggestionSearchEntity) { + this.suggestionSearchEntity = suggestionSearchEntity; + } + + @Override + public String toString() { + return "SuggestionEntityDescriptor [" + + (suggestionSearchEntity != null + ? "suggestionSearchEntity=" + suggestionSearchEntity + ", " : "") + + (entityName != null ? "entityName=" + entityName + ", " : "") + + (primaryKeyAttributeNames != null ? "primaryKeyAttributeNames=" + primaryKeyAttributeNames + : "") + + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java new file mode 100644 index 0000000..fde1b6a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/config/oxm/SuggestionEntityLookup.java @@ -0,0 +1,181 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Vector; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; +import org.eclipse.persistence.mappings.DatabaseMapping; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; + +public class SuggestionEntityLookup implements OxmModelProcessor { + + private Map<String, HashMap<String, String>> suggestionSearchEntityOxmModel; + private Map<String, SuggestionEntityDescriptor> suggestionSearchEntityDescriptors; + private FiltersConfig filtersConfig; + + public SuggestionEntityLookup(FiltersConfig filtersConfig) { + suggestionSearchEntityOxmModel = new LinkedHashMap<String, HashMap<String, String>>(); + suggestionSearchEntityDescriptors = new HashMap<String, SuggestionEntityDescriptor>(); + this.filtersConfig = filtersConfig; + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry<String, String> entry : properties.entrySet()) { + + + if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) { + + oxmProperties.put("containsSuggestibleProps", "true"); + + Vector<DatabaseMapping> descriptorMaps = entity.getDescriptor().getMappings(); + List<String> listOfSuggestableAttributes = new ArrayList<String>(); + + for (DatabaseMapping descMap : descriptorMaps) { + if (descMap.isAbstractDirectMapping()) { + + if (descMap.getProperties().get("suggestibleOnSearch") != null) { + String suggestableOnSearchString = + String.valueOf(descMap.getProperties().get("suggestibleOnSearch")); + + boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString); + + if (isSuggestibleOnSearch) { + /* Grab attribute types for suggestion */ + String attributeName = + descMap.getField().getName().replaceAll("/text\\(\\)", ""); + listOfSuggestableAttributes.add(attributeName); + + if (descMap.getProperties().get("suggestionVerbs") != null) { + String suggestionVerbsString = + String.valueOf(descMap.getProperties().get("suggestionVerbs")); + + oxmProperties.put("suggestionVerbs", suggestionVerbsString); + } + } + } + } + } + + if (!listOfSuggestableAttributes.isEmpty()) { + oxmProperties.put("suggestibleAttributes", + String.join(",", listOfSuggestableAttributes)); + } + } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) { + oxmProperties.put("suggestionAliases", entry.getValue()); + } + } + } + + if (oxmProperties.containsKey("containsSuggestibleProps")) { + suggestionSearchEntityOxmModel.put(entityName, oxmProperties); + } + } + + for (Entry<String, HashMap<String, String>> suggestionEntityModel : suggestionSearchEntityOxmModel + .entrySet()) { + HashMap<String, String> attribute = suggestionEntityModel.getValue(); + + String entityName = attribute.get("entityName"); + SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(filtersConfig, this); + suggestionSearchEntity.setEntityType(entityName); + + if (attribute.get("suggestionAliases") != null) { + suggestionSearchEntity + .setSuggestionAliases(Arrays.asList(attribute.get("suggestionAliases").split(","))); + } + + if (attribute.get("suggestibleAttributes") != null) { + suggestionSearchEntity.setSuggestionPropertyTypes( + Arrays.asList(attribute.get("suggestibleAttributes").split(","))); + } + + SuggestionEntityDescriptor entity = new SuggestionEntityDescriptor(); + entity.setSuggestionSearchEntity(suggestionSearchEntity); + entity.setEntityName(entityName); + + if (attribute.get("primaryKeyAttributeNames") != null) { + entity.setPrimaryKeyAttributeNames( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + } + + suggestionSearchEntityDescriptors.put(entityName, entity); + } + } + + public Map<String, HashMap<String, String>> getSuggestionSearchEntityOxmModel() { + return suggestionSearchEntityOxmModel; + } + + public void setSuggestionSearchEntityOxmModel( + Map<String, HashMap<String, String>> suggestionSearchEntityOxmModel) { + this.suggestionSearchEntityOxmModel = suggestionSearchEntityOxmModel; + } + + public Map<String, SuggestionEntityDescriptor> getSuggestionSearchEntityDescriptors() { + return suggestionSearchEntityDescriptors; + } + + public void setSuggestionSearchEntityDescriptors( + Map<String, SuggestionEntityDescriptor> suggestionSearchEntityDescriptors) { + this.suggestionSearchEntityDescriptors = suggestionSearchEntityDescriptors; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java new file mode 100644 index 0000000..604c74c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizer.java @@ -0,0 +1,937 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.crossentityreference.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.CrossEntityReference; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceDescriptor; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.IndexableCrossEntityReference; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class CrossEntityReferenceSynchronizer. + */ +public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryCrossEntitySyncContainer. + */ + private class RetryCrossEntitySyncContainer { + NetworkTransaction txn; + IndexableCrossEntityReference icer; + + /** + * Instantiates a new retry cross entity sync container. + * + * @param txn the txn + * @param icer the icer + */ + public RetryCrossEntitySyncContainer(NetworkTransaction txn, + IndexableCrossEntityReference icer) { + this.txn = txn; + this.icer = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public IndexableCrossEntityReference getIndexableCrossEntityReference() { + return icer; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class); + + private static final String SERVICE_INSTANCE = "service-instance"; + + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetryCrossEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + private boolean isAllWorkEnumerated; + protected ExecutorService esPutExecutor; + private CrossEntityReferenceLookup crossEntityReferenceLookup; + private OxmEntityLookup oxmEntityLookup; + private SearchableEntityLookup searchableEntityLookup; + + + /** + * Instantiates a new cross entity reference synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public CrossEntityReferenceSynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, CrossEntityReferenceLookup crossEntityReferenceLookup, + OxmEntityLookup oxmEntityLookup, SearchableEntityLookup searchableEntityLookup) throws Exception { + super(LOG, "CERS", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + this.crossEntityReferenceLookup = crossEntityReferenceLookup; + this.oxmEntityLookup = oxmEntityLookup; + this.searchableEntityLookup = searchableEntityLookup; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetryCrossEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.synchronizerName = "Cross Reference Entity Synchronizer"; + this.isAllWorkEnumerated = false; + this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG); + this.aaiEntityStats.intializeEntityCounters( + crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().keySet()); + + this.esEntityStats.intializeEntityCounters( + crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().keySet()); + this.syncDurationInMs = -1; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + syncStartedTimeStampInMs = System.currentTimeMillis(); + launchSyncFlow(); + return OperationState.OK; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return getStatReport(syncDurationInMs, showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !isAllWorkEnumerated) { + return false; + } + + return true; + } + + /** + * Launch sync flow. + * + * @return the operation state + */ + private OperationState launchSyncFlow() { + final Map<String,String> contextMap = MDC.getCopyOfContextMap(); + Map<String, CrossEntityReferenceDescriptor> descriptorMap = + crossEntityReferenceLookup.getCrossReferenceEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM); + + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred processing entity selflinks. Error: " + exc.getMessage()); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + isAllWorkEnumerated = true; + performSync(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred during entity synchronization. Error: " + exc.getMessage()); + + } + + return OperationState.OK; + } + + /** + * Perform sync. + */ + private void performSync() { + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + CrossEntityReferenceDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = crossEntityReferenceLookup.getCrossReferenceEntityDescriptors() + .get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + if (descriptor.hasCrossEntityReferences()) { + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setQueryParameters(linkDescriptor.getDepthModifier()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + // TODO // TODO -> LOG, waht should be logged here? + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + CrossEntityReferenceDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + descriptor = crossEntityReferenceLookup.getCrossReferenceEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + if (descriptor.hasCrossEntityReferences()) { + selflinks.add(new SelfLinkDescriptor( + resourceLink,SynchronizerConstants.DEPTH_ALL_MODIFIER, resourceType)); + } + } + } + } + } + } + + + + /** + * By providing the entity type and a json node for the entity, determine the + * primary key name(s) + primary key value(s) sufficient to build an entity query string + * of the following format: + * + * <entityType>.<primaryKeyNames>:<primaryKeyValues> + * + * @return - a composite string in the above format or null + */ + private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) { + + OxmEntityDescriptor entityDescriptor = + oxmEntityLookup.getEntityDescriptors().get(entityType); + + String queryString = null; + + if ( entityDescriptor != null ) { + + final List<String> primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeNames(); + final List<String> keyValues = new ArrayList<String>(); + NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues); + + queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames,"/") + ":" + NodeUtils.concatArray(keyValues); + + } + + return queryString; + + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + + if (!txn.getOperationResult().wasSuccessful()) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult()); + return; + } + + CrossEntityReferenceDescriptor cerDescriptor = crossEntityReferenceLookup + .getCrossReferenceEntityDescriptors().get(txn.getDescriptor().getEntityName()); + + if (cerDescriptor != null && cerDescriptor.hasCrossEntityReferences()) { + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + /** + * Here's what we are going to do: + * + * <li>Extract primary key name and value from the parent type. + * <li>Extract the primary key and value from the nested child instance. + * <li>Build a generic query to discover the self-link for the nested-child-instance using + * parent and child. + * <li>Set the self-link on the child. + * <li>Generate the id that will allow the elastic-search upsert to work. + * <li>Rinse and repeat. + */ + + CrossEntityReference cerDefinition = cerDescriptor.getCrossEntityReference(); + + if (cerDefinition != null) { + JsonNode convertedNode = null; + try { + convertedNode = NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult()); + + final String parentEntityQueryString = determineEntityQueryString(txn.getEntityType(), convertedNode); + + List<String> extractedParentEntityAttributeValues = new ArrayList<String>(); + + NodeUtils.extractFieldValuesFromObject(convertedNode, + cerDefinition.getReferenceAttributes(), + extractedParentEntityAttributeValues); + + List<JsonNode> nestedTargetEntityInstances = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(), + nestedTargetEntityInstances); + + for (JsonNode targetEntityInstance : nestedTargetEntityInstances) { + + if (cerDescriptor != null) { + + String childEntityType = cerDefinition.getTargetEntityType(); + OxmEntityDescriptor childDesciptor = oxmEntityLookup.getEntityDescriptors().get(childEntityType); + + List<String> childPrimaryKeyNames = null; + + if (childDesciptor != null) { + childPrimaryKeyNames = childDesciptor.getPrimaryKeyAttributeNames(); + } else { + childPrimaryKeyNames = new ArrayList<String>(); + } + + List<String> childKeyValues = new ArrayList<String>(); + NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, childKeyValues); + + String childEntityQueryKeyString = childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames,"/") + ":" + NodeUtils.concatArray(childKeyValues); + + /** + * Build generic-query to query child instance self-link from AAI + */ + List<String> orderedQueryKeyParams = new ArrayList<String>(); + + /** + * At present, there is an issue with resolving the self-link using the + * generic-query with nothing more than the service-instance identifier and the + * service-subscription. There is another level of detail we don't have access to + * unless we parse it out of the service-subscription self-link, which is a + * coupling I would like to avoid. Fortunately, there is a workaround, but only + * for service-instances, which is presently our only use-case for the + * cross-entity-reference in R1707. Going forwards hopefully there will be other + * ways to resolve a child self-link using parental embedded meta data that we + * don't currently have. + * + * The work-around with the service-instance entity-type is that it's possible to + * request the self-link using only the service-instance-id because of a + * historical AAI functional query requirement that it be possible to query a + * service-instance only by it's service-instance-id. This entity type is the only + * one in the system that can be queried this way which makes it a very limited + * workaround, but good enough for the current release. + */ + + if (SERVICE_INSTANCE.equals(childEntityType)) { + orderedQueryKeyParams.clear(); + orderedQueryKeyParams.add(childEntityQueryKeyString); + } else { + orderedQueryKeyParams.add(parentEntityQueryString); + orderedQueryKeyParams.add(childEntityQueryKeyString); + } + + String genericQueryStr = null; + try { + genericQueryStr = aaiAdapter.getGenericQueryForSelfLink(childEntityType, orderedQueryKeyParams); + + if (genericQueryStr != null) { + aaiWorkOnHand.incrementAndGet(); + + OperationResult aaiQueryResult = aaiAdapter.queryActiveInventoryWithRetries( + genericQueryStr, "application/json", + aaiAdapter.getEndpointConfig().getNumRequestRetries()); + + aaiWorkOnHand.decrementAndGet(); + + if (aaiQueryResult!= null && aaiQueryResult.wasSuccessful()) { + + Collection<JsonNode> entityLinks = new ArrayList<JsonNode>(); + JsonNode genericQueryResult = null; + try { + genericQueryResult = NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult()); + + if ( genericQueryResult != null ) { + + NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", entityLinks); + + String selfLink = null; + + if (entityLinks.size() != 1) { + /** + * an ambiguity exists where we can't reliably determine the self + * link, this should be a permanent error + */ + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, String.valueOf(entityLinks.size())); + } else { + selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); + + + IndexableCrossEntityReference icer = + getPopulatedDocument(targetEntityInstance, cerDescriptor); + + for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) { + icer.addCrossEntityReferenceValue( + parentCrossEntityReferenceAttributeValue); + } + + icer.setLink(ActiveInventoryAdapter.extractResourcePath(selfLink)); + + icer.deriveFields(); + + String link = null; + try { + link = elasticSearchAdapter + .buildElasticSearchGetDocUrl(getIndexName(), icer.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, + exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync( + new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, + error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + + } + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getLocalizedMessage()); + } + + } else { + String message = "Entity sync failed because AAI query failed with error " + aaiQueryResult.getResult(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } else { + String message = "Entity Sync failed because generic query str could not be determined."; + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + } catch (Exception exc) { + String message = "Failed to sync entity because generation of generic query failed with error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } + } + + } catch (IOException ioe) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage()); + } + } + + } + + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType()); + } + } + + /** + * Perform document upsert. + * + * @param esGetResult the es get result + * @param icer the icer + */ + protected void performDocumentUpsert(NetworkTransaction esGetResult, + IndexableCrossEntityReference icer) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), icer.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + String versionNumber = null; + if (esGetResult.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue()); + } else if (esGetResult.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetResult.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(icer.getAsJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + jsonPayload = icer.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest(getIndexName(), + "default", icer.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetResult.getEntityType()); + transactionTracker.setDescriptor(esGetResult.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetResult.getEntityType()); + updateElasticTxn.setDescriptor(esGetResult.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage()); + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param icer the icer + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, IndexableCrossEntityReference icer) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(icer.getId())) { + + esWorkOnHand.incrementAndGet(); + + RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer); + retryQueue.push(rsc); + + LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT); + } + } else { + LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()), + or.getResult()); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryCrossEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + // In this retry flow the icer object has already + // derived its fields + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), icer.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did + * that for this request already when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Gets the populated document. + * + * @param entityNode the entity node + * @param resultDescriptor the result descriptor + * @return the populated document + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + IndexableCrossEntityReference icer = new IndexableCrossEntityReference(); + + icer.setEntityType(resultDescriptor.getEntityName()); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + return icer; + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java new file mode 100644 index 0000000..dded79f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ActiveInventoryAdapter.java @@ -0,0 +1,404 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.UriBuilder; + +import org.apache.http.client.utils.URIBuilder; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.RestClientFactory; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; + +/** + * The Class ActiveInventoryAdapter. + */ + +public class ActiveInventoryAdapter { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + private static final String HEADER_TRANS_ID = "X-TransactionId"; + private static final String HEADER_FROM_APP_ID = "X-FromAppId"; + private static final String HEADER_AUTHORIZATION = "Authorization"; + + private static final String HTTP_SCHEME = "http"; + private static final String HTTPS_SCHEME = "https"; + + private static final String TRANSACTION_ID_PREFIX = "txnId-"; + private static final String UI_APP_NAME = "AAI-UI"; + + private OxmModelLoader oxmModelLoader; + private OxmEntityLookup oxmEntityLookup; + private RestEndpointConfig endpointConfig; + + private RestClient restClient; + + /** + * Instantiates a new active inventory adapter. + * @throws RestClientConstructionException + * + */ + + public ActiveInventoryAdapter(OxmModelLoader oxmModelLoader, OxmEntityLookup oxmEntityLookup, + RestEndpointConfig endpointConfig) + throws ElasticSearchOperationException, IOException, RestClientConstructionException { + + this.oxmModelLoader = oxmModelLoader; + this.oxmEntityLookup = oxmEntityLookup; + this.endpointConfig = endpointConfig; + this.restClient = RestClientFactory.buildClient(endpointConfig); + + } + + protected Map<String, List<String>> getMessageHeaders() { + + Map<String, List<String>> headers = new HashMap<String, List<String>>(); + + headers.putIfAbsent(HEADER_FROM_APP_ID, new ArrayList<String>()); + headers.get(HEADER_FROM_APP_ID).add(UI_APP_NAME); + + headers.putIfAbsent(HEADER_TRANS_ID, new ArrayList<String>()); + headers.get(HEADER_TRANS_ID).add(TRANSACTION_ID_PREFIX + NodeUtils.getRandomTxnId()); + + if (endpointConfig.getRestAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) { + + headers.putIfAbsent(HEADER_AUTHORIZATION, new ArrayList<String>()); + headers.get(HEADER_AUTHORIZATION).add(getBasicAuthenticationCredentials()); + + } + + return headers; + } + + protected String getBasicAuthenticationCredentials() { + String usernameAndPassword = String.join(":", endpointConfig.getBasicAuthUserName(), + endpointConfig.getBasicAuthPassword()); + return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes()); + } + + public OxmEntityLookup getOxmEntityLookup() { + return oxmEntityLookup; + } + + public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) { + this.oxmEntityLookup = oxmEntityLookup; + } + + protected String getResourceBasePath() { + + String versionStr = null; + if (oxmModelLoader != null) { + versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + + return "/aai/v" + versionStr; + + } + + public static String extractResourcePath(String selflink) { + try { + return new URI(selflink).getRawPath(); + } catch (URISyntaxException uriSyntaxException) { + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK, + uriSyntaxException.getMessage()); + return selflink; + } + } + + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + * @throws Exception the exception + */ + private String getFullUrl(String resourceUrl) throws Exception { + final String basePath = getResourceBasePath(); + return String.format("https://%s:%s%s%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), basePath, resourceUrl); + } + + public String getGenericQueryForSelfLink(String startNodeType, List<String> queryParams) + throws Exception { + + URIBuilder urlBuilder = new URIBuilder(getFullUrl("/search/generic-query")); + + for (String queryParam : queryParams) { + urlBuilder.addParameter("key", queryParam); + } + + urlBuilder.addParameter("start-node-type", startNodeType); + urlBuilder.addParameter("include", startNodeType); + + final String constructedLink = urlBuilder.toString(); + + return constructedLink; + + } + + + public OperationResult getSelfLinksByEntityType(String entityType) throws Exception { + + /* + * For this one, I want to dynamically construct the nodes-query for self-link discovery as a + * utility method that will use the OXM model entity data to drive the query as well. + */ + + if (entityType == null) { + throw new NullPointerException( + "Failed to getSelfLinksByEntityType() because entityType is null"); + } + + OxmEntityDescriptor entityDescriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + + if (entityDescriptor == null) { + throw new NoSuchElementException("Failed to getSelfLinksByEntityType() because could" + + " not find entity descriptor from OXM with type = " + entityType); + } + + String link = null; + final String primaryKeyStr = + NodeUtils.concatArray(entityDescriptor.getPrimaryKeyAttributeNames(), "/"); + + link = getFullUrl("/search/nodes-query?search-node-type=" + entityType + "&filter=" + + primaryKeyStr + ":EXISTS"); + + + return restClient.get(link, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE); + + } + + public OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName, + String primaryKeyValue) throws Exception { + + if (entityType == null) { + throw new NullPointerException("Failed to getSelfLinkForEntity() because entityType is null"); + } + + if (primaryKeyName == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyName is null"); + } + + if (primaryKeyValue == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyValue is null"); + } + + /* + * Try to protect ourselves from illegal URI formatting exceptions caused by characters that + * aren't natively supported in a URI, but can be escaped to make them legal. + */ + + String encodedEntityType = URLEncoder.encode(entityType, "UTF-8"); + String encodedPrimaryKeyName = URLEncoder.encode(primaryKeyName, "UTF-8"); + String encodedPrimaryKeyValue = URLEncoder.encode(primaryKeyValue, "UTF-8"); + + String link = null; + + if ("service-instance".equals(entityType)) { + + link = getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + + encodedPrimaryKeyName + ":" + encodedPrimaryKeyValue + "&start-node-type=" + + encodedEntityType + "&include=customer&depth=2"); + + } else { + + link = + getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + encodedPrimaryKeyName + + ":" + encodedPrimaryKeyValue + "&start-node-type=" + encodedEntityType); + + } + + return queryActiveInventoryWithRetries(link, "application/json", + endpointConfig.getNumRequestRetries()); + + } + + + /** + * Our retry conditions should be very specific. + * + * @param r the r + * @return true, if successful + */ + private boolean shouldRetryRequest(OperationResult r) { + + if (r == null) { + return true; + } + + int rc = r.getResultCode(); + + if (rc == 200) { + return false; + } + + if (rc == 404) { + return false; + } + + return true; + + } + + /** + * Query active inventory. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + // package protected for test classes instead of private + OperationResult queryActiveInventory(String url, String acceptContentType) { + + return restClient.get(url, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE); + + } + + public RestEndpointConfig getEndpointConfig() { + return endpointConfig; + } + + public void setEndpointConfig(RestEndpointConfig endpointConfig) { + this.endpointConfig = endpointConfig; + } + + public OperationResult queryActiveInventoryWithRetries(String url, String responseType, + int numRetries) { + + OperationResult result = null; + + for (int retryCount = 0; retryCount < numRetries; retryCount++) { + + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(retryCount + 1)); + + result = queryActiveInventory(url, responseType); + + /** + * Record number of times we have attempted the request to later summarize how many times we + * are generally retrying over thousands of messages in a sync. + * + * If the number of retries is surprisingly high, then we need to understand why that is as + * the number of retries is also causing a heavier load on AAI beyond the throttling controls + * we already have in place in term of the transaction rate controller and number of + * parallelized threads per task processor. + */ + + result.setNumRetries(retryCount); + + if (!shouldRetryRequest(result)) { + + result.setFromCache(false); + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(retryCount + 1)); + + return result; + } + + try { + /* + * Sleep between re-tries to be nice to the target system. + */ + Thread.sleep(50); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage()); + break; + } + LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(retryCount + 1)); + + } + + LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url); + + return result; + + } + + public String repairSelfLink(String selfLink) { + return repairSelfLink(selfLink, null); + } + + /** + * This method adds a scheme, host and port (if missing) to the passed-in URI. + * If these parts of the URI are already present, they will not be duplicated. + * + * @param selflink The URI to repair + * @param queryParams The query parameters as a single string + * @return The corrected URI (i.e. includes a scheme/host/port) + */ + public String repairSelfLink(String selflink, String queryParams) { + if (selflink == null) { + return selflink; + } + + UriBuilder builder = UriBuilder.fromPath(selflink).host(endpointConfig.getEndpointIpAddress()) + .port(Integer.parseInt(endpointConfig.getEndpointServerPort())); + + switch (endpointConfig.getRestAuthenticationMode()) { + + case SSL_BASIC: + case SSL_CERT: { + builder.scheme(HTTPS_SCHEME); + break; + } + + default: { + builder.scheme(HTTP_SCHEME); + } + } + + boolean includeQueryParams = ( (null != queryParams) && (!"".equals(queryParams)) ); + + /* builder.build().toString() will encode special characters to hexadecimal pairs prefixed with a '%' + so we're adding the query parameters separately, in their UTF-8 representations, so that + characters such as '?', '&', etc. remain intact as needed by the synchronizer */ + return (builder.build().toString() + (includeQueryParams ? queryParams : "")); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java new file mode 100644 index 0000000..3f5a273 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/ElasticSearchAdapter.java @@ -0,0 +1,157 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.RestClientFactory; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; + +/** + * The Class ElasticSearchAdapter. + + */ +public class ElasticSearchAdapter { + + private static final String BULK_IMPORT_INDEX_TEMPLATE = + "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n"; + + private static final String BULK_API = "_bulk"; + + private static final String DEFAULT_TYPE = "default"; + + private RestClient restClient; + private RestEndpointConfig endpointConfig; + + /** + * Instantiates a new elastic search adapter. + * @throws RestClientConstructionException + */ + public ElasticSearchAdapter(RestEndpointConfig endpointConfig) throws RestClientConstructionException { + + this.restClient = RestClientFactory.buildClient(endpointConfig); + this.endpointConfig = endpointConfig; + + } + + protected Map<String, List<String>> getMessageHeaders() { + Map<String, List<String>> headers = new HashMap<String, List<String>>(); + // insert mandatory headers if there are any + return headers; + } + + public OperationResult doGet(String url, MediaType acceptContentType) { + return restClient.get(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doDelete(String url, MediaType acceptContentType) { + return restClient.delete(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doPost(String url, String jsonPayload, MediaType acceptContentType) { + return restClient.post(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE, + acceptContentType); + } + + public OperationResult doPut(String url, String jsonPayload, MediaType acceptContentType) { + return restClient.put(url, jsonPayload, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE, + acceptContentType); + } + + public OperationResult doPatch(String url, String jsonPayload, MediaType acceptContentType) { + + Map<String,List<String>> headers = getMessageHeaders(); + headers.putIfAbsent("X-HTTP-Method-Override", new ArrayList<String>()); + headers.get("X-HTTP-Method-Override").add("PATCH"); + + return restClient.post(url, jsonPayload, headers, MediaType.APPLICATION_JSON_TYPE, acceptContentType); + } + + public OperationResult doHead(String url, MediaType acceptContentType) { + return restClient.head(url, getMessageHeaders(), acceptContentType); + } + + public OperationResult doBulkOperation(String url, String payload) { + return restClient.put(url, payload, getMessageHeaders(), + MediaType.APPLICATION_FORM_URLENCODED_TYPE, MediaType.APPLICATION_JSON_TYPE); + } + + public String buildBulkImportOperationRequest(String index, String type, String id, + String version, String payload) { + + StringBuilder requestPayload = new StringBuilder(128); + + requestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, index, type, id, version)); + requestPayload.append(payload).append("\n"); + + return requestPayload.toString(); + + } + + public OperationResult retrieveEntityById(String host, String port, String indexName, + String docType, String resourceUrl) { + String esUrl = + String.format("http://%s:%s/%s/%s/%s", host, port, indexName, docType, resourceUrl); + return doGet(esUrl, MediaType.APPLICATION_JSON_TYPE); + } + + public String buildElasticSearchUrlForApi(String indexName, String api) { + return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), indexName, api); + } + + public String buildElasticSearchUrl(String indexName, String docType) { + return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), indexName, docType); + } + + public String buildElasticSearchGetDocUrl(String indexName, String docType, String docId) { + return String.format("http://%s:%s/%s/%s/%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), indexName, docType, docId); + } + + public String buildElasticSearchGetDocUrl(String indexName, String docId) { + return buildElasticSearchGetDocUrl(indexName, DEFAULT_TYPE, docId); + } + + public String buildElasticSearchPostUrl(String indexName) { + return String.format("http://%s:%s/%s/%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), indexName, DEFAULT_TYPE); + } + + public String getBulkUrl() { + return String.format("http://%s:%s/%s", endpointConfig.getEndpointIpAddress(), + endpointConfig.getEndpointServerPort(), BULK_API); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java new file mode 100644 index 0000000..4ceb0d6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/GizmoAdapter.java @@ -0,0 +1,336 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.UriBuilder; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.RestClientFactory; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; + +/** + * The Class GizmoAdapter. + */ + +public class GizmoAdapter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(GizmoAdapter.class); + + private static final String HEADER_TRANS_ID = "X-TransactionId"; + private static final String HEADER_FROM_APP_ID = "X-FromAppId"; + private static final String HEADER_AUTHORIZATION = "Authorization"; + + private static final String HTTP_SCHEME = "http"; + private static final String HTTPS_SCHEME = "https"; + + private static final String TRANSACTION_ID_PREFIX = "txnId-"; + private static final String UI_APP_NAME = "AAI-UI"; + + private OxmModelLoader oxmModelLoader; + + private RestEndpointConfig endpointConfig; + + private RestClient restClient; + + private String inventoryBasePath; + private String relationshipsBasePath; + + /** + * Instantiates a new active inventory adapter. + * + * @throws RestClientConstructionException + * + */ + + public GizmoAdapter(OxmModelLoader oxmModelLoader, RestEndpointConfig endpointConfig) + throws ElasticSearchOperationException, IOException, RestClientConstructionException { + + this.oxmModelLoader = oxmModelLoader; + this.endpointConfig = endpointConfig; + this.restClient = RestClientFactory.buildClient(endpointConfig); + + } + + public String getRelationshipsBasePath() { + return relationshipsBasePath; + } + + public void setRelationshipsBasePath(String relationshipsBasePath) { + this.relationshipsBasePath = relationshipsBasePath; + } + + public String getInventoryBasePath() { + return inventoryBasePath; + } + + public void setInventoryBasePath(String inventoryBasePath) { + this.inventoryBasePath = inventoryBasePath; + } + + public String getFullInventoryUrl(String resourceUrl) throws Exception { + final String host = endpointConfig.getEndpointIpAddress(); + final String port = endpointConfig.getEndpointServerPort(); + final String basePath = getInventoryBasePath(); + return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl); + } + + public String addServerDetailsToUrl(String resourceUrl) throws Exception { + final String host = endpointConfig.getEndpointIpAddress(); + final String port = endpointConfig.getEndpointServerPort(); + return String.format("https://%s:%s/%s", host, port, resourceUrl); + } + + public String getFullRelationshipUrl(String resourceUrl) throws Exception { + final String host = endpointConfig.getEndpointIpAddress(); + final String port = endpointConfig.getEndpointServerPort(); + final String basePath = getRelationshipsBasePath(); + return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl); + } + + protected Map<String, List<String>> getMessageHeaders() { + + Map<String, List<String>> headers = new HashMap<String, List<String>>(); + + headers.putIfAbsent(HEADER_FROM_APP_ID, new ArrayList<String>()); + headers.get(HEADER_FROM_APP_ID).add(UI_APP_NAME); + + headers.putIfAbsent(HEADER_TRANS_ID, new ArrayList<String>()); + headers.get(HEADER_TRANS_ID).add(TRANSACTION_ID_PREFIX + NodeUtils.getRandomTxnId()); + + if (endpointConfig.getRestAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) { + + headers.putIfAbsent(HEADER_AUTHORIZATION, new ArrayList<String>()); + headers.get(HEADER_AUTHORIZATION).add(getBasicAuthenticationCredentials()); + + } + + return headers; + } + + protected String getBasicAuthenticationCredentials() { + String usernameAndPassword = String.join(":", endpointConfig.getBasicAuthUserName(), + endpointConfig.getBasicAuthPassword()); + return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes()); + } + + /** + * Our retry conditions should be very specific. + * + * @param r + * the r + * @return true, if successful + */ + private boolean shouldRetryRequest(OperationResult r) { + + if (r == null) { + return true; + } + + int rc = r.getResultCode(); + + if (rc == 200) { + return false; + } + + if (rc == 404) { + return false; + } + + return true; + + } + + /** + * Query active inventory. + * + * @param url + * the url + * @param acceptContentType + * the accept content type + * @return the operation result + */ + OperationResult queryGizmo(String url, String acceptContentType) { + + return restClient.get(url, getMessageHeaders(), MediaType.APPLICATION_JSON_TYPE); + + } + + public RestEndpointConfig getEndpointConfig() { + return endpointConfig; + } + + public void setEndpointConfig(RestEndpointConfig endpointConfig) { + this.endpointConfig = endpointConfig; + } + + public OperationResult queryGizmoWithRetries(String url, String responseType, int numRetries) { + + OperationResult result = null; + + for (int retryCount = 0; retryCount < numRetries; retryCount++) { + + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(retryCount + 1)); + + result = queryGizmo(url, responseType); + + /** + * Record number of times we have attempted the request to later + * summarize how many times we are generally retrying over thousands + * of messages in a sync. + * + * If the number of retries is surprisingly high, then we need to + * understand why that is as the number of retries is also causing a + * heavier load on AAI beyond the throttling controls we already + * have in place in term of the transaction rate controller and + * number of parallelized threads per task processor. + */ + + result.setNumRetries(retryCount); + + if (!shouldRetryRequest(result)) { + + result.setFromCache(false); + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(retryCount + 1)); + + return result; + } + + try { + /* + * Sleep between re-tries to be nice to the target system. + */ + Thread.sleep(50); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage()); + break; + } + LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(retryCount + 1)); + + } + + LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url); + + return result; + + } + + /** + * This method adds a scheme, host and port (if missing) to the passed-in + * URI. If these parts of the URI are already present, they will not be + * duplicated. + * + * @param selflink + * The URI to repair + * @param queryParams + * The query parameters as a single string + * @return The corrected URI (i.e. includes a scheme/host/port) + */ + + private String repairGizmoSelfLink(String baseUrlPath, String selfLink, String queryParams) { + + if (selfLink == null) { + return selfLink; + } + + if (selfLink.startsWith("http") || selfLink.startsWith("https")) { + return selfLink; + } + + UriBuilder builder = UriBuilder.fromPath(baseUrlPath + "/" + selfLink) + .host(endpointConfig.getEndpointIpAddress()) + .port(Integer.parseInt(endpointConfig.getEndpointServerPort())); + + switch (endpointConfig.getRestAuthenticationMode()) { + + case SSL_BASIC: + case SSL_CERT: { + builder.scheme(HTTPS_SCHEME); + break; + } + + default: { + builder.scheme(HTTP_SCHEME); + } + } + + boolean includeQueryParams = ((null != queryParams) && (!"".equals(queryParams))); + + /* + * builder.build().toString() will encode special characters to hexadecimal pairs prefixed with + * a '%' so we're adding the query parameters separately, in their UTF-8 representations, so + * that characters such as '?', '&', etc. remain intact as needed by the synchronizer + */ + return (builder.build().toString() + (includeQueryParams ? queryParams : "")); + + } + + public String repairRelationshipSelfLink(String selflink, String queryParams) { + return repairGizmoSelfLink(relationshipsBasePath, selflink, queryParams); + } + + public String repairInventorySelfLink(String selflink, String queryParams) { + return repairGizmoSelfLink(inventoryBasePath, selflink, queryParams); + } + + public OperationResult getSelfLinksByEntityType(String entityType) throws Exception { + + if (entityType == null) { + throw new NullPointerException("Failed to getSelfLinksByEntityType() because entityType is null"); + } + + String link = getFullInventoryUrl(entityType); + + return queryGizmoWithRetries(link, "application/json", endpointConfig.getNumRequestRetries()); + + } + + public static String extractResourcePath(String selflink) { + try { + return new URI(selflink).getRawPath(); + } catch (URISyntaxException uriSyntaxException) { + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK, uriSyntaxException.getMessage()); + return selflink; + } + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java new file mode 100644 index 0000000..0fc4a4e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/NetworkTransaction.java @@ -0,0 +1,159 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.dal.rest.HttpMethod; + + +/** + * The Class NetworkTransaction. + */ +public class NetworkTransaction { + + private OperationResult operationResult; + + private String entityType; + + private String link; + + private String queryParameters; + + private HttpMethod operationType; + + private OxmEntityDescriptor descriptor; + + private long createdTimeStampInMs; + + private long opTimeInMs; + + private long taskAgeInMs; + + /** + * Instantiates a new network transaction. + */ + public NetworkTransaction() { + this.createdTimeStampInMs = System.currentTimeMillis(); + this.opTimeInMs = 0L; + } + + /** + * Instantiates a new network transaction. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + public NetworkTransaction(HttpMethod method, String entityType, OperationResult or) { + this(); + this.operationType = method; + this.entityType = entityType; + this.operationResult = or; + this.opTimeInMs = 0L; + } + + public HttpMethod getOperationType() { + return operationType; + } + + public long getTaskAgeInMs() { + return taskAgeInMs; + } + + /** + * Sets the task age in ms. + */ + public void setTaskAgeInMs() { + this.taskAgeInMs = (System.currentTimeMillis() - createdTimeStampInMs); + } + + public void setOperationType(HttpMethod operationType) { + this.operationType = operationType; + } + + public OperationResult getOperationResult() { + return operationResult; + } + + public void setOperationResult(OperationResult operationResult) { + this.operationResult = operationResult; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + + public String getQueryParameters() { + return queryParameters; + } + + public void setQueryParameters(String queryParameters) { + this.queryParameters = queryParameters; + } + + public long getOpTimeInMs() { + return opTimeInMs; + } + + public void setOpTimeInMs(long opTimeInMs) { + this.opTimeInMs = opTimeInMs; + } + + public OxmEntityDescriptor getDescriptor() { + return descriptor; + } + + public void setDescriptor(OxmEntityDescriptor descriptor) { + this.descriptor = descriptor; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "NetworkTransaction [operationResult=" + operationResult.toString() + ", entityType=" + + entityType + ", link=" + link + ", operationType=" + operationType + ", descriptor=" + + descriptor.toString() + ", createdTimeStampInMs=" + createdTimeStampInMs + + ", taskAgeInMs=" + taskAgeInMs + "]"; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java new file mode 100644 index 0000000..5ec7318 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryEntityStatistics.java @@ -0,0 +1,285 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.aai; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.NetworkTransaction; + + +/** + * The Class ActiveInventoryEntityStatistics. + */ +public class ActiveInventoryEntityStatistics { + + private static final String TOTAL = "Total"; + + private static final String FOUND = "Found"; + + private static final String NO_PAYLOAD = "NoPayload"; + + private static final String NOT_FOUND = "NotFound"; + + private static final String NUM_RETRIES = "NumRetries"; + + private static final String ERROR = "Error"; + + private Map<String, HashMap<String, AtomicInteger>> activeInventoryEntityStatistics; + + /** + * Creates the entity op stats. + * + * @return the hash map + */ + private HashMap<String, AtomicInteger> createEntityOpStats() { + + HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>(); + + opStats.put(TOTAL, new AtomicInteger()); + opStats.put(FOUND, new AtomicInteger()); + opStats.put(NO_PAYLOAD, new AtomicInteger()); + opStats.put(NOT_FOUND, new AtomicInteger()); + opStats.put(NUM_RETRIES, new AtomicInteger()); + opStats.put(ERROR, new AtomicInteger()); + + return opStats; + + } + + /** + * Initializecreate active inventory entity statistics. + */ + private void initializecreateActiveInventoryEntityStatistics() { + Set<String> keys = activeInventoryEntityStatistics.keySet(); + + Set<String> opStatKeySet = null; + Map<String, AtomicInteger> opStats = null; + + for (String k : keys) { + + opStats = activeInventoryEntityStatistics.get(k); + + opStatKeySet = opStats.keySet(); + + for (String opStatKey : opStatKeySet) { + opStats.get(opStatKey).set(0); + } + } + } + + /** + * Instantiates a new active inventory entity statistics. + * + * @param loader the loader + */ + public ActiveInventoryEntityStatistics() { + activeInventoryEntityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>(); + reset(); + } + + /** + * Initialize counters from oxm entity descriptors. + * + * @param descriptors the descriptors + */ + public void intializeEntityCounters( + String... entityTypes) { + + if (entityTypes != null && entityTypes.length > 0) { + for (String entityType : entityTypes) { + activeInventoryEntityStatistics.put(entityType, createEntityOpStats()); + } + + } + + } + + public void intializeEntityCounters( + Set<String> entityTypes) { + + if (entityTypes != null && entityTypes.size() > 0) { + for (String entityType : entityTypes) { + activeInventoryEntityStatistics.put(entityType, createEntityOpStats()); + } + } + + } + + + + /** + * Reset. + */ + public void reset() { + initializecreateActiveInventoryEntityStatistics(); + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + + if (txn == null) { + return -1; + } + + OperationResult or = txn.getOperationResult(); + + if (or == null) { + return -1; + } + + return or.getResultCode(); + + } + + /** + * Update active inventory entity counters. + * + * @param txn the txn + */ + private void updateActiveInventoryEntityCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + Map<String, AtomicInteger> opStats = activeInventoryEntityStatistics.get(txn.getEntityType()); + + int rc = getResultCode(txn); + + switch (txn.getOperationType()) { + + case GET: { + + opStats.get(TOTAL).incrementAndGet(); + + if (200 <= rc && rc <= 299) { + opStats.get(FOUND).incrementAndGet(); + } else if (rc == 404) { + opStats.get(NOT_FOUND).incrementAndGet(); + } else { + opStats.get(ERROR).incrementAndGet(); + } + + break; + } + + default: { + // nothing else for now + } + + } + + OperationResult or = txn.getOperationResult(); + + if (or != null && or.wasSuccessful()) { + + if (or.getResult() == null || or.getResult().length() == 0) { + opStats.get(NO_PAYLOAD).incrementAndGet(); + } + + if (or.getNumRetries() > 0) { + opStats.get(NUM_RETRIES).addAndGet(or.getNumRetries()); + } + + } + + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + updateActiveInventoryEntityCounters(txn); + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + /* + * sort entities, then sort nested op codes + */ + + TreeMap<String, HashMap<String, AtomicInteger>> activeInventoryEntitySortedTreeMap = + new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() { + + @Override + public int compare(String o1, String o2) { + return o1.toLowerCase().compareTo(o2.toLowerCase()); + } + }); + + activeInventoryEntitySortedTreeMap.putAll(activeInventoryEntityStatistics); + + for (String counterEntityKey : activeInventoryEntitySortedTreeMap.keySet()) { + + HashMap<String, AtomicInteger> entityCounters = + activeInventoryEntitySortedTreeMap.get(counterEntityKey); + + AtomicInteger total = entityCounters.get(TOTAL); + AtomicInteger found = entityCounters.get(FOUND); + AtomicInteger noPayload = entityCounters.get(NO_PAYLOAD); + AtomicInteger notFound = entityCounters.get(NOT_FOUND); + AtomicInteger numRetries = entityCounters.get(NUM_RETRIES); + AtomicInteger error = entityCounters.get(ERROR); + + int totalValue = (total == null) ? 0 : total.get(); + int foundValue = (found == null) ? 0 : found.get(); + int noPayloadValue = (noPayload == null) ? 0 : noPayload.get(); + int notFoundValue = (notFound == null) ? 0 : notFound.get(); + int numRetriesValue = (numRetries == null) ? 0 : numRetries.get(); + int errorValue = (error == null) ? 0 : error.get(); + + sb.append("\n ") + .append(String.format( + "%-30s TOTAL: %-12d FOUND: %-12d NO_PAYLOAD:" + + " %-12d NOT_FOUND: %-12d NUM_RETRIES: %-12d ERROR: %-12d", + counterEntityKey, totalValue, foundValue, noPayloadValue, notFoundValue, + numRetriesValue, errorValue)); + } + + return sb.toString(); + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java new file mode 100644 index 0000000..b05b12c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java @@ -0,0 +1,139 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.aai; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.analytics.AbstractStatistics; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +/** + * The Class ActiveInventoryProcessingExceptionStatistics. + */ +public class ActiveInventoryProcessingExceptionStatistics extends AbstractStatistics { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + private static final String NATIVE_SOCKET_CONNECT_EXCEPTION = "NativeSocketConnectException"; + private static final String NATIVE_SOCKET_CONNECTION_RESET = "NativeSocketConnectionReset"; + private static final String NATIVE_SOCKET_CONNECTION_REFUSED = "NativeSocketConnectionRefused"; + private static final String CLIENT_TIMEOUT_EXCEPTION = "JerseyClientTimoutException"; + private static final String UNKNOWN_EXCEPTION = "UnknownException"; + + /** + * Creates the counters. + */ + private void createCounters() { + addCounter(NATIVE_SOCKET_CONNECT_EXCEPTION); + addCounter(NATIVE_SOCKET_CONNECTION_RESET); + addCounter(NATIVE_SOCKET_CONNECTION_REFUSED); + addCounter(CLIENT_TIMEOUT_EXCEPTION); + addCounter(UNKNOWN_EXCEPTION); + } + + /** + * Instantiates a new active inventory processing exception statistics. + */ + public ActiveInventoryProcessingExceptionStatistics() { + createCounters(); + reset(); + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + OperationResult or = txn.getOperationResult(); + + if (or != null && !or.wasSuccessful()) { + + if (or.getResultCode() != 404) { + + String result = or.getResult(); + + if (result != null) { + + /* + * Try to classify exceptions and peg counters + */ + + if (result.contains("java.net.SocketTimeoutException: connect timed out")) { + pegCounter(CLIENT_TIMEOUT_EXCEPTION); + } else if (result.contains("java.net.ConnectException: Connection timed out: connect")) { + pegCounter(NATIVE_SOCKET_CONNECT_EXCEPTION); + } else if (result.contains("java.net.ConnectException: Connection refused: connect")) { + pegCounter(NATIVE_SOCKET_CONNECTION_REFUSED); + } else if (result.contains("java.net.SocketException: Connection reset")) { + pegCounter(NATIVE_SOCKET_CONNECTION_RESET); + } else { + pegCounter(UNKNOWN_EXCEPTION); + LOG.error(AaiUiMsgs.PEGGING_ERROR, result.toString()); + } + + } + } + + } + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + int nativeConnect = getCounterValue(NATIVE_SOCKET_CONNECT_EXCEPTION); + int nativeCxnReset = getCounterValue(NATIVE_SOCKET_CONNECTION_RESET); + int nativeCxnRefused = getCounterValue(NATIVE_SOCKET_CONNECTION_REFUSED); + int clientTimeout = getCounterValue(CLIENT_TIMEOUT_EXCEPTION); + int unknown = getCounterValue(UNKNOWN_EXCEPTION); + + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECT_EXCEPTION, nativeConnect)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_RESET, nativeCxnReset)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_REFUSED, nativeCxnRefused)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", CLIENT_TIMEOUT_EXCEPTION, clientTimeout)); + sb.append("\n ").append(String.format("%-40s: %-12d", UNKNOWN_EXCEPTION, unknown)); + + return sb.toString(); + + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java new file mode 100644 index 0000000..0d46c2a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java @@ -0,0 +1,265 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; + + +/** + * The Class ElasticSearchEntityStatistics. + */ +public class ElasticSearchEntityStatistics { + + private static final String TOTAL = "Total"; + private static final String CREATED = "Created"; + private static final String MODIFIED = "Modified"; + private static final String OTHERSUCCESS = "OTHERSUCCESS"; + private static final String DELETED = "DELETED"; + private static final String ERROR = "ERROR"; + + private Map<String, HashMap<String, AtomicInteger>> entityStatistics; + + /** + * Creates the entity op stats. + * + * @return the hash map + */ + private HashMap<String, AtomicInteger> createEntityOpStats() { + + HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>(); + + opStats.put(TOTAL, new AtomicInteger()); + opStats.put(CREATED, new AtomicInteger()); + opStats.put(MODIFIED, new AtomicInteger()); + opStats.put(OTHERSUCCESS, new AtomicInteger()); + opStats.put(DELETED, new AtomicInteger()); + opStats.put(ERROR, new AtomicInteger()); + + return opStats; + + } + + /** + * Initializecreate active inventory entity statistics. + */ + private void initializecreateActiveInventoryEntityStatistics() { + Set<String> keys = entityStatistics.keySet(); + + Set<String> opStatKeySet = null; + Map<String, AtomicInteger> opStats = null; + + for (String k : keys) { + + opStats = entityStatistics.get(k); + + opStatKeySet = opStats.keySet(); + + for (String opStatKey : opStatKeySet) { + opStats.get(opStatKey).set(0); + } + } + } + + /** + * Instantiates a new elastic search entity statistics. + * + * @param loader the loader + */ + public ElasticSearchEntityStatistics() { + entityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>(); + reset(); + } + + /** + * Initialize counters from oxm entity descriptors. + * + * @param descriptors the descriptors + */ + public void intializeEntityCounters( + String... entityTypes) { + + if (entityTypes != null && entityTypes.length > 0) { + for (String entityType : entityTypes) { + entityStatistics.put(entityType, createEntityOpStats()); + } + + } + + } + + public void intializeEntityCounters( + Set<String> entityTypes) { + + if (entityTypes != null && entityTypes.size() > 0) { + for (String entityType : entityTypes) { + entityStatistics.put(entityType, createEntityOpStats()); + } + } + + } + + /** + * Reset. + */ + public void reset() { + initializecreateActiveInventoryEntityStatistics(); + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + + if (txn == null) { + return -1; + } + + OperationResult or = txn.getOperationResult(); + + if (or == null) { + return -1; + } + + return or.getResultCode(); + + } + + /** + * Update elastic search entity counters. + * + * @param txn the txn + */ + private void updateElasticSearchEntityCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + Map<String, AtomicInteger> entityOpStats = entityStatistics.get(txn.getEntityType()); + + int resultCode = getResultCode(txn); + + if (txn.getOperationType() == HttpMethod.PUT) { + + entityOpStats.get(TOTAL).incrementAndGet(); + + if (resultCode == 201) { + entityOpStats.get(CREATED).incrementAndGet(); + } else if (resultCode == 200) { + entityOpStats.get(MODIFIED).incrementAndGet(); + } else if (202 <= resultCode && resultCode <= 299) { + entityOpStats.get(OTHERSUCCESS).incrementAndGet(); + } else { + entityOpStats.get(ERROR).incrementAndGet(); + } + + } else if (txn.getOperationType() == HttpMethod.DELETE) { + + entityOpStats.get(TOTAL).incrementAndGet(); + + if (200 <= resultCode && resultCode <= 299) { + entityOpStats.get(DELETED).incrementAndGet(); + } else { + entityOpStats.get(ERROR).incrementAndGet(); + } + } + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + updateElasticSearchEntityCounters(txn); + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + /* + * sort entities, then sort nested op codes + */ + + TreeMap<String, HashMap<String, AtomicInteger>> elasticEntitySortedTreeMap = + new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() { + + @Override + public int compare(String o1, String o2) { + return o1.toLowerCase().compareTo(o2.toLowerCase()); + } + }); + + elasticEntitySortedTreeMap.putAll(entityStatistics); + + for (String counterEntityKey : elasticEntitySortedTreeMap.keySet()) { + + HashMap<String, AtomicInteger> entityCounters = + elasticEntitySortedTreeMap.get(counterEntityKey); + + AtomicInteger total = entityCounters.get(TOTAL); + AtomicInteger created = entityCounters.get(CREATED); + AtomicInteger modified = entityCounters.get(MODIFIED); + AtomicInteger otherSuccess = entityCounters.get(OTHERSUCCESS); + AtomicInteger deleted = entityCounters.get(DELETED); + AtomicInteger error = entityCounters.get(ERROR); + + int totalValue = (total == null) ? 0 : total.get(); + int createdValue = (created == null) ? 0 : created.get(); + int modifiedValue = (modified == null) ? 0 : modified.get(); + int otherSuccessValue = (otherSuccess == null) ? 0 : otherSuccess.get(); + int deletedValue = (deleted == null) ? 0 : deleted.get(); + int errorValue = (error == null) ? 0 : error.get(); + + sb.append("\n ") + .append(String.format( + "%-30s TOTAL: %-12d CREATED: %-12d MODIFIED:" + + " %-12d OTHER_2XX: %-12d DELETED: %-12d ERROR: %-12d", + counterEntityKey, totalValue, createdValue, modifiedValue, otherSuccessValue, + deletedValue, errorValue)); + } + return sb.toString(); + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java new file mode 100644 index 0000000..5ad7fd0 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/exception/ElasticSearchOperationException.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.exception; + +/** + * The Class ElasticSearchOperationException. + */ +public class ElasticSearchOperationException extends Exception { + + private static final long serialVersionUID = -7689309913743200670L; + + /** + * Instantiates a new elastic search operation exception. + * + * @param message the message + * @param exc the exc + */ + public ElasticSearchOperationException(String message, Exception exc) { + super(message, exc); + } + + /** + * Instantiates a new elastic search operation exception. + * + * @param message the message + */ + public ElasticSearchOperationException(String message) { + super(message); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java new file mode 100644 index 0000000..bae0784 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessor.java @@ -0,0 +1,207 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.proxy.processor; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonObjectBuilder; +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.rest.HttpUtil; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.RestClientFactory; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.slf4j.MDC; + +/** + * The Class AaiUiProxyProcessor. + */ +public class AaiUiProxyProcessor { + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AaiUiProxyProcessor.class); + private static Logger auditLogger = + LoggerFactory.getInstance().getAuditLogger(AaiUiProxyProcessor.class.getName()); + + private RestClient client; + private String synapseBaseUrl; + + private OperationResult operationResult = null; + + private String xTransactionId; + private String xFromAppId; + + private static final String ROUTER_SERVICE = "routerService"; + + + /** + * Instantiates a new AaiUiProxyProcessor. + * + * @throws RestClientConstructionException + */ + + public AaiUiProxyProcessor(RestEndpointConfig endpointConfig, String apiGatewayEndpoint) + throws RestClientConstructionException { + client = RestClientFactory.buildClient(endpointConfig); + synapseBaseUrl = "https://" + endpointConfig.getEndpointIpAddress() + ":" + + endpointConfig.getEndpointServerPort() + "/" + apiGatewayEndpoint; + } + + + void setUpMdcContext(final Exchange exchange, final HttpServletRequest request) { + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + this.xTransactionId = NodeUtils.getRandomTxnId(); + } else { + this.xTransactionId = (String) xTransactionId; + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + xFromAppId = "Browser"; + } else { + xFromAppId = (String) partnerName; + } + + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", xFromAppId, + request.getRequestURI() + ":" + request.getLocalPort()); + } + + private Map<String, List<String>> getHeaders() { + Map<String, List<String>> headers = new HashMap<>(); + headers.put("X-FromAppId", Arrays.asList(SparkyConstants.APP_NAME)); + headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID))); + headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME))); + return headers; + } + + private String getProxyPayloadAsString(final Exchange exchange) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + String srcUri = ""; + try { + srcUri = (String) exchange.getIn().getHeader(Exchange.HTTP_URI); + jsonBuilder.add("origin-uri", srcUri); + + String body = exchange.getIn().getBody(String.class); + + if (body != null && body.length() != 0) { + jsonBuilder.add("origin-payload", body); + } + + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Failed to extract payload for proxying.\n" + "Requestor URL: " + srcUri); + } + + return jsonBuilder.build().toString(); + } + + private String getSynapseUrl(String requestUri) { + String url = ""; + int pos = requestUri.indexOf(ROUTER_SERVICE); + if (pos != -1) { + url = synapseBaseUrl + requestUri.substring(pos + ROUTER_SERVICE.length()); + } else { + LOG.error(AaiUiMsgs.DR_REQUEST_URI_FOR_PROXY_UNKNOWN, requestUri); + } + return url; + } + + public void proxyMessage(Exchange exchange) { + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + + setUpMdcContext(exchange, request); + + try { + Map<String, List<String>> headers = getHeaders(); + String proxyPayload = getProxyPayloadAsString(exchange); + String fromUrl = (String) exchange.getIn().getHeader(Exchange.HTTP_URI); + String toUrl = getSynapseUrl(fromUrl); + auditLogger.info(AaiUiMsgs.DR_PROXY_FROM_TO, fromUrl, toUrl); + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Proxying request:\n" + proxyPayload + "\n" + "Target URL:\n" + toUrl); + + long startTimeInMs = System.currentTimeMillis(); + + operationResult = client.post(toUrl, proxyPayload, headers, + javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE, + javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE); + + long drOpTime = (System.currentTimeMillis() - startTimeInMs); + int rc = operationResult.getResultCode(); + String result = ""; + + if (HttpUtil.isHttpResponseClassSuccess(rc)) { + result = operationResult.getResult(); + } else { + result = operationResult.getFailureCause(); + LOG.info(AaiUiMsgs.DR_PROCESSING_FAILURE, String.valueOf(rc), proxyPayload); + } + + auditLogger.info(AaiUiMsgs.DR_PROCESSING_TIME, String.valueOf(drOpTime)); + + exchange.getOut().setHeader("X-TransactionId", xTransactionId); + exchange.getOut().setHeader("X-FromAppId", xFromAppId); + exchange.getOut().setHeader("RequestUrl", request.getRequestURI()); + exchange.getOut().setHeader("RequestPort", request.getLocalPort()); + exchange.getOut().setBody(result); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc); + } + } + + public String getSynapseBaseUrl() { + return synapseBaseUrl; + } + + public void setSynapseBaseUrl(String synapseBaseUrl) { + this.synapseBaseUrl = synapseBaseUrl; + } + + public RestClient getClient() { + return client; + } + + public void setClient(RestClient client) { + this.client = client; + } + + protected OperationResult getOperationResult() { + return operationResult; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java new file mode 100644 index 0000000..a891d20 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/HttpMethod.java @@ -0,0 +1,33 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.rest; + + +/** + * The Enum HttpMethod. + */ +public enum HttpMethod { + GET, PUT, POST, DELETE, PATCH, HEAD +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java new file mode 100644 index 0000000..830e624 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientConstructionException.java @@ -0,0 +1,38 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.rest; + +public class RestClientConstructionException extends Exception { + + /** + * + */ + private static final long serialVersionUID = 1L; + + public RestClientConstructionException(String message) { + super(message); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java new file mode 100644 index 0000000..30e48b7 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestClientFactory.java @@ -0,0 +1,97 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.rest; + +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.util.Encryptor; + +public class RestClientFactory { + + public static RestClient buildClient(RestEndpointConfig restEndpointConfig) + throws RestClientConstructionException { + + if (restEndpointConfig == null) { + throw new RestClientConstructionException( + "Failed to build RestClient because RestEndpointConfig is null."); + } + + if (restEndpointConfig.getRestAuthenticationMode() == null) { + throw new RestClientConstructionException( + "Failed to build RestClient because RestAuthenticationMode is null."); + } + + SparkyResourceLoader resourceLoader = restEndpointConfig.getResourceLoader(); + + switch (restEndpointConfig.getRestAuthenticationMode()) { + + case SSL_CERT: { + + Encryptor enc = new Encryptor(); + String certFileNameFullPath = resourceLoader.getAbsolutePath(restEndpointConfig.getCertFileName()); + String decryptedCertPassword = enc.decryptValue(restEndpointConfig.getCertPassword()); + String truststoreFileNameFullPath = + resourceLoader.getAbsolutePath(restEndpointConfig.getTruststoreFileName()); + + return new RestClient() // + .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) // + .validateServerCertChain(restEndpointConfig.isValidateServerCertChain()) // + .validateServerHostname(restEndpointConfig.isValidateServerHostname()) // + .clientCertFile(certFileNameFullPath) // + .clientCertPassword(decryptedCertPassword) // + .trustStore(truststoreFileNameFullPath) // + .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) // + .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs()); + } + + case SSL_BASIC: { + + return new RestClient() // + .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) // + .basicAuthUsername(restEndpointConfig.getBasicAuthUserName()) // + .basicAuthPassword(restEndpointConfig.getBasicAuthPassword()) // + .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) // + .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs()); + + } + + case HTTP_NOAUTH: + case UNKNOWN_MODE: + default: { + + return new RestClient() // + .authenticationMode(restEndpointConfig.getRestAuthenticationMode()) // + .connectTimeoutMs(restEndpointConfig.getConnectTimeoutInMs()) // + .readTimeoutMs(restEndpointConfig.getReadTimeoutInMs()); + + } + + + } + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java new file mode 100644 index 0000000..dde68b8 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/RestOperationalStatistics.java @@ -0,0 +1,255 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.rest; + +import org.onap.aai.sparky.analytics.AbstractStatistics; +import org.onap.aai.sparky.dal.NetworkTransaction; + +/** + * The Class RestOperationalStatistics. + */ +public class RestOperationalStatistics extends AbstractStatistics { + + private static final String GET_1XX = "GET_1XX"; + private static final String GET_2XX = "GET_2XX"; + private static final String GET_3XX = "GET_3XX"; + private static final String GET_4XX = "GET_4XX"; + private static final String GET_5XX = "GET_5XX"; + private static final String GET_6XX = "GET_6XX"; + + private static final String PUT_1XX = "PUT_1XX"; + private static final String PUT_2XX = "PUT_2XX"; + private static final String PUT_3XX = "PUT_3XX"; + private static final String PUT_4XX = "PUT_4XX"; + private static final String PUT_5XX = "PUT_5XX"; + private static final String PUT_6XX = "PUT_6XX"; + + private static final String POST_1XX = "POST_1XX"; + private static final String POST_2XX = "POST_2XX"; + private static final String POST_3XX = "POST_3XX"; + private static final String POST_4XX = "POST_4XX"; + private static final String POST_5XX = "POST_5XX"; + private static final String POST_6XX = "POST_6XX"; + + private static final String DELETE_1XX = "DELETE_1XX"; + private static final String DELETE_2XX = "DELETE_2XX"; + private static final String DELETE_3XX = "DELETE_3XX"; + private static final String DELETE_4XX = "DELETE_4XX"; + private static final String DELETE_5XX = "DELETE_5XX"; + private static final String DELETE_6XX = "DELETE_6XX"; + + /** + * Creates the counters. + */ + private void createCounters() { + + addCounter(GET_1XX); + addCounter(GET_2XX); + addCounter(GET_3XX); + addCounter(GET_4XX); + addCounter(GET_5XX); + addCounter(GET_6XX); + + addCounter(PUT_1XX); + addCounter(PUT_2XX); + addCounter(PUT_3XX); + addCounter(PUT_4XX); + addCounter(PUT_5XX); + addCounter(PUT_6XX); + + addCounter(POST_1XX); + addCounter(POST_2XX); + addCounter(POST_3XX); + addCounter(POST_4XX); + addCounter(POST_5XX); + addCounter(POST_6XX); + + addCounter(DELETE_1XX); + addCounter(DELETE_2XX); + addCounter(DELETE_3XX); + addCounter(DELETE_4XX); + addCounter(DELETE_5XX); + addCounter(DELETE_6XX); + + + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + if (txn == null) { + return -1; + } + + if (txn.getOperationResult() == null) { + return -1; + } + + return txn.getOperationResult().getResultCode(); + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + int rc = getResultCode(txn); + + switch (txn.getOperationType()) { + + case GET: { + + if (100 <= rc && rc <= 199) { + pegCounter(GET_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(GET_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(GET_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(GET_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(GET_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(GET_6XX); + } + + break; + } + + case PUT: { + + if (100 <= rc && rc <= 199) { + pegCounter(PUT_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(PUT_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(PUT_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(PUT_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(PUT_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(PUT_6XX); + } + + break; + } + + case POST: { + + if (100 <= rc && rc <= 199) { + pegCounter(POST_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(POST_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(POST_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(POST_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(POST_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(POST_6XX); + } + + break; + } + + case DELETE: { + + if (100 <= rc && rc <= 199) { + pegCounter(DELETE_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(DELETE_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(DELETE_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(DELETE_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(DELETE_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(DELETE_6XX); + } + + break; + } + + default: { + // not expecting anything else yet + } + + } + + } + + /** + * Instantiates a new rest operational statistics. + */ + public RestOperationalStatistics() { + createCounters(); + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n ") + .append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", + HttpMethod.DELETE, getCounterValue(DELETE_1XX), getCounterValue(DELETE_2XX), + getCounterValue(DELETE_3XX), getCounterValue(DELETE_4XX), getCounterValue(DELETE_5XX), + getCounterValue(DELETE_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.PUT, + getCounterValue(PUT_1XX), getCounterValue(PUT_2XX), getCounterValue(PUT_3XX), + getCounterValue(PUT_4XX), getCounterValue(PUT_5XX), getCounterValue(PUT_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.POST, + getCounterValue(POST_1XX), getCounterValue(POST_2XX), getCounterValue(POST_3XX), + getCounterValue(POST_4XX), getCounterValue(POST_5XX), getCounterValue(POST_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.GET, + getCounterValue(GET_1XX), getCounterValue(GET_2XX), getCounterValue(GET_3XX), + getCounterValue(GET_4XX), getCounterValue(GET_5XX), getCounterValue(GET_6XX))); + + return sb.toString(); + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java new file mode 100644 index 0000000..8859f02 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfig.java @@ -0,0 +1,179 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.rest.config; + +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.SparkyResourceLoader; + +public class RestEndpointConfig { + + private String endpointIpAddress; + private String endpointServerPort; + private int numRequestRetries; + private String basicAuthUserName; + private String basicAuthPassword; + private RestAuthenticationMode restAuthenticationMode; + private int connectTimeoutInMs; + private int readTimeoutInMs; + private String certFileName; + private String certPassword; + private String truststoreFileName; + private boolean validateServerCertChain; + private boolean validateServerHostname; + private SparkyResourceLoader resourceLoader; + + public boolean isValidateServerCertChain() { + return validateServerCertChain; + } + + public void setValidateServerCertChain(boolean validateServerCertChain) { + this.validateServerCertChain = validateServerCertChain; + } + + public boolean isValidateServerHostname() { + return validateServerHostname; + } + + public void setValidateServerHostname(boolean validateServerHostname) { + this.validateServerHostname = validateServerHostname; + } + + public String getEndpointIpAddress() { + return endpointIpAddress; + } + + public void setEndpointIpAddress(String endpointIpAddress) { + this.endpointIpAddress = endpointIpAddress; + } + + public String getEndpointServerPort() { + return endpointServerPort; + } + + public void setEndpointServerPort(String endpointServerPort) { + this.endpointServerPort = endpointServerPort; + } + + public int getNumRequestRetries() { + return numRequestRetries; + } + + public void setNumRequestRetries(int numRequestRetries) { + this.numRequestRetries = numRequestRetries; + } + + public String getBasicAuthUserName() { + return basicAuthUserName; + } + + public void setBasicAuthUserName(String basicAuthUserName) { + this.basicAuthUserName = basicAuthUserName; + } + + public String getBasicAuthPassword() { + return basicAuthPassword; + } + + public void setBasicAuthPassword(String basicAuthPassword) { + this.basicAuthPassword = basicAuthPassword; + } + + public RestAuthenticationMode getRestAuthenticationMode() { + return restAuthenticationMode; + } + + public void setRestAuthenticationMode(RestAuthenticationMode restAuthenticationMode) { + this.restAuthenticationMode = restAuthenticationMode; + } + + public int getConnectTimeoutInMs() { + return connectTimeoutInMs; + } + + public void setConnectTimeoutInMs(int connectTimeoutInMs) { + this.connectTimeoutInMs = connectTimeoutInMs; + } + + public int getReadTimeoutInMs() { + return readTimeoutInMs; + } + + public void setReadTimeoutInMs(int readTimeoutInMs) { + this.readTimeoutInMs = readTimeoutInMs; + } + + public String getCertFileName() { + return certFileName; + } + + public void setCertFileName(String certFileName) { + this.certFileName = certFileName; + } + + public String getCertPassword() { + return certPassword; + } + + public void setCertPassword(String certPassword) { + this.certPassword = certPassword; + } + + public String getTruststoreFileName() { + return truststoreFileName; + } + + public void setTruststoreFileName(String truststoreFileName) { + this.truststoreFileName = truststoreFileName; + } + + public SparkyResourceLoader getResourceLoader() { + return resourceLoader; + } + + public void setResourceLoader(SparkyResourceLoader resourceLoader) { + this.resourceLoader = resourceLoader; + } + + @Override + public String toString() { + return "RestEndpointConfig [" + + (endpointIpAddress != null ? "endpointIpAddress=" + endpointIpAddress + ", " : "") + + (endpointServerPort != null ? "endpointServerPort=" + endpointServerPort + ", " : "") + + "numRequestRetries=" + numRequestRetries + ", " + + (basicAuthUserName != null ? "basicAuthUserName=" + basicAuthUserName + ", " : "") + + (basicAuthPassword != null ? "basicAuthPassword=" + basicAuthPassword + ", " : "") + + (restAuthenticationMode != null + ? "restAuthenticationMode=" + restAuthenticationMode + ", " : "") + + "connectTimeoutInMs=" + connectTimeoutInMs + ", readTimeoutInMs=" + readTimeoutInMs + ", " + + (certFileName != null ? "certFileName=" + certFileName + ", " : "") + + (certPassword != null ? "certPassword=" + certPassword + ", " : "") + + (truststoreFileName != null ? "truststoreFileName=" + truststoreFileName + ", " : "") + + "validateServerCertChain=" + validateServerCertChain + ", validateServerHostname=" + + validateServerHostname + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java new file mode 100644 index 0000000..8b35d7c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeEditProcessor.java @@ -0,0 +1,182 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.UnsupportedEncodingException; +import java.util.Map; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.editattributes.entity.EditRequest; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.Cookie; +import org.restlet.data.MediaType; +import org.restlet.data.Status; +import org.restlet.util.Series; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class AttributeEditProcessor. + */ +public class AttributeEditProcessor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AttributeEditProcessor.class); + + private ObjectMapper mapper; + private AttributeUpdater attrUpdater; + + public AttributeEditProcessor(AttributeUpdater attributeUpdater) { + this.attrUpdater = attributeUpdater; + + this.mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + } + + public void editAttribute(Exchange exchange) { + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String payload = exchange.getIn().getBody(String.class); + EditRequest editRequest = null; + OperationResult operationResult = new OperationResult(); + + Response response = exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + response.setStatus(Status.SUCCESS_OK); // 200 is assumed unless an actual exception occurs (a failure is still a valid response) + + boolean wasErrorDuringProcessing = false; + String errorMessage = null; + + + try { + + if (payload != null && !payload.isEmpty()) { + editRequest = mapper.readValue(payload, EditRequest.class); + + if (editRequest != null) { + + String attUid = getAttUid(request.getCookies()); + String objectUri = editRequest.getEntityUri(); + Map<String, Object> attributeValues = editRequest.getAttributes(); + + if (attUid != null && !attUid.isEmpty() && objectUri != null && !objectUri.isEmpty() + && attributeValues != null && !attributeValues.isEmpty()) { + + LOG.info(AaiUiMsgs.ATTRIBUTES_HANDLING_EDIT, objectUri, editRequest.toString()); + + operationResult = attrUpdater.updateObjectAttribute(objectUri, attributeValues, attUid); + + boolean wasSuccess = (operationResult.getResultCode() == 200); + String message = String.format("Edit Attributes completed with Result Code : %s (%s).", + operationResult.getResultCode(), wasSuccess ? "success" : "failed"); + + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + } + } + } else { + wasErrorDuringProcessing = true; + errorMessage = "Empty payload provided, need details to complete request"; + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + operationResult.setResult(500, "Error encountered while trying to update attributes."); + response.setStatus(Status.SERVER_ERROR_INTERNAL); + } + + if(wasErrorDuringProcessing) { + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, errorMessage); + } + + response.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + } + + /** + * Gets the att uid. + * + * @param request the request + * @return the att uid + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + public String getAttUid(Series<Cookie> cookies) throws UnsupportedEncodingException { + String attId = ""; + if (cookies == null) { + LOG.error(AaiUiMsgs.COOKIE_NOT_FOUND); + return attId; + } + for (Cookie cookie : cookies) { + if (cookie.getName().equals("attESHr")) { + // This cookie is of the form : + // "FIRSTNAME|LASTNAME|emailname@domain.com|||ab1234||fl6789,RBFMSKQ," + // + "Z9V2298,9762186|YNNNNNNNNNNNNNYNNYYNNNNN|FIRSTNAME|EY6SC9000|" + // we are to extract fl6789 from this which would be the attuid for the user. + String value = cookie.getValue(); + value = java.net.URLDecoder.decode(value, "UTF-8"); + LOG.info(AaiUiMsgs.COOKIE_FOUND, value); + String[] values = value.split("\\|"); + if (values.length > 7) { + attId = (values[7].split(","))[0]; + + String initials = (values[0].substring(0, 1) + values[1].substring(0, 1)).toLowerCase(); + if (attId.startsWith(initials)) { + return attId; + } + } + } + } + return attId; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java new file mode 100644 index 0000000..5d71135 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/AttributeUpdater.java @@ -0,0 +1,362 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.ws.rs.core.UriBuilder; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.editattributes.exception.AttributeUpdateException; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.PropertyNamingStrategy; + +/** + * Class to process attribute updates on AAI objects. + * + * + */ +public class AttributeUpdater { + + /** + * The Class AaiEditObject. + */ + public class AaiEditObject { + String objectType; + String rootElement; + String keyName; + String keyValue; + String schemaVersion; + + /** + * Instantiates a new aai edit object. + */ + public AaiEditObject() { + + } + + /** + * Instantiates a new aai edit object. + * + * @param objectType the object type + * @param idName the id name + * @param schemaVersion the schema version + */ + public AaiEditObject(String objectType, String idName, String schemaVersion) { + super(); + this.objectType = objectType; + this.keyName = idName; + this.schemaVersion = schemaVersion; + } + + public String getObjectType() { + return objectType; + } + + public void setObjectType(String objectType) { + this.objectType = objectType; + } + + public String getKeyName() { + return keyName; + } + + public void setKeyName(String idName) { + this.keyName = idName; + } + + public String getSchemaVersion() { + return schemaVersion; + } + + public void setSchemaVersion(String schemaVersion) { + this.schemaVersion = schemaVersion; + } + + public void setKeyValue(String keyValue) { + this.keyValue = keyValue; + } + + public String getKeyValue() { + return keyValue; + } + + public String getRootElement() { + return rootElement; + } + + public void setRootElement(String rootElement) { + this.rootElement = rootElement; + } + + } + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(AttributeUpdater.class); + private static final String MESSAGE_VERSION_EXTRACTION_REGEX = "\\/(v[0-9]+)"; + private static final String ATTRIBUTES_UPDATED_SUCCESSFULLY = "Attributes updated successfully"; + private static final String ATTRIBUTES_NOT_UPDATED = "Attributes not updated. "; + + private ActiveInventoryAdapter aaiAdapter; + private UserValidator validator; + private OxmModelLoader oxmModelLoader; + private OxmEntityLookup oxmEntityLookup; + + /** + * Instantiates a new attribute updater. + * @throws AttributeUpdateException + */ + public AttributeUpdater(OxmModelLoader oxmModelLoader, OxmEntityLookup oxmEntityLookup, ActiveInventoryAdapter activeInventoryAdapter) throws AttributeUpdateException { + super(); + this.oxmModelLoader = oxmModelLoader; + this.oxmEntityLookup = oxmEntityLookup; + this.aaiAdapter = activeInventoryAdapter; + + try { + this.validator = new UserValidator(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER, exc.getLocalizedMessage()); + throw new AttributeUpdateException(exc); + } + } + + protected String getResourceBasePath() { + + String versionStr = null; + if (oxmModelLoader != null) { + versionStr = String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + + return "/aai/v" + versionStr; + + } + + protected URI getBaseUri() { + return UriBuilder + .fromUri("https://" + aaiAdapter.getEndpointConfig().getEndpointIpAddress() + ":" + + aaiAdapter.getEndpointConfig().getEndpointServerPort() + getResourceBasePath()) + .build(); + } + + /** + * Update object attribute. + * + * @param objectUri - Valid URI of the object as per OXM model. + * @param attributeValues - Map of (attribute-name & attribute-value) for + * any attributes to be updated to the value. + * @param attUid - ATTUID of the user requesting the update. + * @return - OperationResult with success or failure reason. + */ + public OperationResult updateObjectAttribute(String objectUri, Map<String, Object> attributeValues, String attUid) { + OperationResult result = new OperationResult(); + LOG.info(AaiUiMsgs.ATTRIBUTES_UPDATE_METHOD_CALLED, objectUri, attUid, String.valueOf(attributeValues)); + if (!validator.isAuthorizedUser(attUid)) { + result.setResultCode(403); + result.setResult(String.format("User %s is not authorized for Attributes update ", attUid)); + LOG.error(AaiUiMsgs.ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE, attUid); + return result; + } + + AaiEditObject object = null; + + try { + object = getEditObjectFromUri(objectUri); + } catch (AttributeUpdateException exc) { + result.setResultCode(400); + result.setResult(ATTRIBUTES_NOT_UPDATED); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + return result; + } + try { + String jsonPayload = convertEditRequestToJson(object, attributeValues); + String patchUri = getBaseUri().toString() + getRelativeUri(objectUri); + + + /* + * FIX ME: Dave Adams, 8-Nov-2017 + */ + + //result = aaiAdapter.doPatch(patchUri, jsonPayload, MediaType.APPLICATION_JSON); + + result = new OperationResult(); + result.setResultCode(404); + + if (result.getResultCode() == 200) { + result.setResult(ATTRIBUTES_UPDATED_SUCCESSFULLY); + String message = result.getResult() + " for " + objectUri; + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + } else { + String message = ATTRIBUTES_NOT_UPDATED + " For: " + objectUri + ". AAI PATCH Status Code : " + + result.getResultCode() + ". Error : " + result.getResult(); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_MESSAGE, message); + } + } catch (AttributeUpdateException exc) { + result.setResultCode(500); + result.setResult(ATTRIBUTES_NOT_UPDATED + exc.getLocalizedMessage()); + LOG.error(AaiUiMsgs.ATTRIBUTES_NOT_UPDATED_EXCEPTION, exc.getLocalizedMessage()); + } + return result; + + } + + /** + * Gets the relative uri. + * + * @param objectUri the object uri + * @return the relative uri + */ + public String getRelativeUri(String objectUri) { + String tempUri = objectUri; + final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL); + Matcher matcher = pattern.matcher(objectUri); + while (matcher.find()) { + tempUri = objectUri.substring(matcher.end()); + } + if (!tempUri.startsWith("/")) { + tempUri = "/" + tempUri; + } + return tempUri; + } + + /** + * Gets the edits the object from uri. + * + * @param objectUri the object uri + * @return the edits the object from uri + * @throws AttributeUpdateException the attribute update exception + */ + public AaiEditObject getEditObjectFromUri(String objectUri) throws AttributeUpdateException { + + AaiEditObject object = new AaiEditObject(); + String version = getVersionFromUri(objectUri); + + if ( null == version ) { + version = "v" + String.valueOf(oxmModelLoader.getLatestVersionNum()); + } + object.setSchemaVersion(version); + + String[] values = objectUri.split("/"); + if (values.length < 2) { + throw new AttributeUpdateException("Invalid or malformed object URI : " + objectUri); + } + String keyValue = values[values.length - 1]; + String rootElement = values[values.length - 2]; + + object.setKeyValue(keyValue); + object.setRootElement(rootElement); + + String objectJavaType = null; + Map<String, DynamicType> entityTypeLookup = oxmEntityLookup.getEntityTypeLookup(); + DynamicType entity = entityTypeLookup.get(rootElement); + if ( null != entity ) { + objectJavaType = entity.getName(); + String message = "Descriptor: Alias: " + objectJavaType + " : DefaultRootElement: " + + rootElement; + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, message); + } + + + if (objectJavaType == null) { + throw new AttributeUpdateException( + "Object type could not be determined from the URI : " + objectUri); + } + object.setObjectType(objectJavaType); + + // Set key attribute name + final List<String> primaryKeys = entity.getDescriptor().getPrimaryKeyFieldNames(); + + if (primaryKeys.isEmpty()) { + throw new AttributeUpdateException("Object primary key not found in OXM version " + version); + } + + for (int i = 0; i < primaryKeys.size(); i++) { + final String primaryKey = primaryKeys.get(i); + if (primaryKey.indexOf("/text()") != -1) { + primaryKeys.set(i, primaryKey.replace("/text()", "")); + } + } + object.setKeyName(primaryKeys.iterator().next()); + + return object; + } + + /** + * Gets the version from uri. + * + * @param objectUri the object uri + * @return the version from uri + * @throws AttributeUpdateException the attribute update exception + */ + private String getVersionFromUri(String objectUri) throws AttributeUpdateException { + final Pattern pattern = Pattern.compile(MESSAGE_VERSION_EXTRACTION_REGEX, Pattern.DOTALL); + Matcher matcher = pattern.matcher(objectUri); + String messageSchemaVersion = null; + while (matcher.find()) { + messageSchemaVersion = matcher.group(1); + break; + } + return messageSchemaVersion; + } + + /** + * Convert edit request to json. + * + * @param object the object + * @param attributeValues the attribute values + * @return the string + * @throws AttributeUpdateException the attribute update exception + */ + private static String convertEditRequestToJson(AaiEditObject object, + Map<String, Object> attributeValues) throws AttributeUpdateException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy()); + ObjectWriter ow = mapper.writer(); + + Map<String, Object> patchAttributes = new HashMap<>(); + patchAttributes.put(object.getKeyName(), object.getKeyValue()); + patchAttributes.putAll(attributeValues); + + try { + return ow.writeValueAsString(patchAttributes); + } catch (JsonProcessingException exc) { + throw new AttributeUpdateException("Caught a JPE while creating PATCH request body = ", exc); + } + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java new file mode 100644 index 0000000..a5c251e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserAuthorizationReader.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Reads user IDs from a file. Each line in the user authorization file should contain a single user + * ID. For example, + * + * <pre> + * user1 + * user2 + * </pre> + */ +public class UserAuthorizationReader { + + private File userAuthorizationFile; + + /** + * Set the user authorization file. + * + * @param file a user authorization file + */ + public UserAuthorizationReader(File file) { + this.userAuthorizationFile = file; + } + + /** + * Gets user IDs from a file. + * + * @return a list of user IDs + * @throws IOException if there is a problem reading the user configuration file + */ + public List<String> getUsers() throws IOException { + List<String> userList = new ArrayList<>(); + try (Stream<String> stream = Files.lines(getUserAuthorizationFile().toPath())) { + userList.addAll(stream.map(String::trim).collect(Collectors.toList())); + } + return userList; + } + + // Getters and setters + public File getUserAuthorizationFile() { + return userAuthorizationFile; + } + + public void setUserAuthorizationFile(File file) { + this.userAuthorizationFile = file; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java new file mode 100644 index 0000000..8999105 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/UserValidator.java @@ -0,0 +1,67 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +/** + * Validates users against a user authorization file. + */ +public class UserValidator { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(UserValidator.class); + private static final String USER_AUTH_FILE = + SparkyConstants.AUTHORIZED_USERS_FILE_LOCATION; + + private UserAuthorizationReader userAuthorizationReader = + new UserAuthorizationReader(new File(USER_AUTH_FILE)); + + /** + * Returns true if the user is authorized. + * + * @param userId a user identifier + * @return true if the user ID is present in the user authorization file + */ + public boolean isAuthorizedUser(String userId) { + if (userId != null && !userId.isEmpty()) { + try { + List<String> users = userAuthorizationReader.getUsers(); + return users.contains(userId); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.USER_AUTHORIZATION_FILE_UNAVAILABLE, userId); + return false; + } + } else { + return false; + } + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java new file mode 100644 index 0000000..0e8ce17 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/entity/EditRequest.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class EditRequest. + */ +public class EditRequest { + + @JsonProperty("entity-uri") + private String entityUri; + + @JsonProperty("entity-type") + private String entityType; + + @JsonProperty("attributes") + private Map<String, Object> attributes = new HashMap<>(); + + public String getEntityUri() { + return entityUri; + } + + public void setEntityUri(String entityUri) { + this.entityUri = entityUri; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public Map<String, Object> getAttributes() { + return attributes; + } + + public void setAttributes(Map<String, Object> attributes) { + this.attributes = attributes; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java new file mode 100644 index 0000000..119d680 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/editattributes/exception/AttributeUpdateException.java @@ -0,0 +1,62 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.editattributes.exception; + +/** + * The Class AttributeUpdateException. + */ +public class AttributeUpdateException extends Exception { + + private static final long serialVersionUID = 1L; + + /** + * Attribute Edit specific Exception Class. + * + * @param exc the exc + */ + + public AttributeUpdateException(Exception exc) { + super(exc); + } + + /** + * Instantiates a new attribute update exception. + * + * @param message the message + */ + public AttributeUpdateException(String message) { + super(message); + } + + /** + * Instantiates a new attribute update exception. + * + * @param message the message + * @param exc the exc + */ + public AttributeUpdateException(String message, Exception exc) { + super(message, exc); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java new file mode 100644 index 0000000..a2039b4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilder.java @@ -0,0 +1,143 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +/** + * The Class EntityHistoryQueryBuilder. + */ +public class EntityHistoryQueryBuilder { + + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + /** + * Gets the query. + * + * @param type the type + * @return the query + */ + public static JsonObject getQuery(String type) { + if (type.equalsIgnoreCase(TABLE)) { + return createTableQuery(); + } else if (type.equalsIgnoreCase(GRAPH)) { + return createGraphQuery(); + } else { + return null; + } + } + + /** + * Creates the graph query. + * + * @return the json object + */ + public static JsonObject createGraphQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("group_by_date", + Json.createObjectBuilder().add("date_histogram", createDateHistogram()) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the table query. + * + * @return the json object + */ + public static JsonObject createTableQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the date histogram. + * + * @return the json object + */ + private static JsonObject createDateHistogram() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("field", "timestamp"); + jsonBuilder.add("min_doc_count", 1); + jsonBuilder.add("interval", "day"); + jsonBuilder.add("format", "epoch_millis"); + + return jsonBuilder.build(); + } + + /** + * Creates the top hits blob. + * + * @return the json object + */ + private static JsonObject createTopHitsBlob() { + JsonObjectBuilder builder = Json.createObjectBuilder(); + builder.add("size", 1); + builder.add("sort", getSortCriteria()); + return builder.build(); + } + + public static JsonArray getSortCriteria() { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add("timestamp", + Json.createObjectBuilder().add("order", "desc"))); + + return jsonBuilder.build(); + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + System.out.println("TABLE-QUERY: " + createTableQuery().toString()); + System.out.println("GRAPH_QUERY: " + createGraphQuery().toString()); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java new file mode 100644 index 0000000..a0e0630 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/GeoVisualizationProcessor.java @@ -0,0 +1,180 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory; + +import java.io.IOException; + +import org.apache.camel.Exchange; +import org.apache.camel.component.restlet.RestletConstants; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.Form; +import org.restlet.data.MediaType; +import org.restlet.data.Parameter; +import org.restlet.data.Status; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class GeoVisualizationServlet. + */ +public class GeoVisualizationProcessor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(GeoVisualizationProcessor.class); + + private ObjectMapper mapper; + private ElasticSearchAdapter elasticSearchAdapter = null; + private String topographicalSearchIndexName; + + private static final String SEARCH_STRING = "_search"; + private static final String SEARCH_PARAMETER = "?filter_path=hits.hits._source&_source=location&size=5000&q=entityType:"; + private static final String PARAMETER_KEY = "entity"; + + /** + * Instantiates a new geo visualization processor + */ + public GeoVisualizationProcessor(ElasticSearchAdapter elasticSearchAdapter, String topographicalSearchIndexName) { + this.mapper = new ObjectMapper(); + this.elasticSearchAdapter = elasticSearchAdapter; + this.topographicalSearchIndexName = topographicalSearchIndexName; + } + + /** + * Gets the geo visualization results. + * + * @param response the response + * @param entityType the entity type + * @return the geo visualization results + * @throws Exception the exception + */ + protected OperationResult getGeoVisualizationResults(Exchange exchange) throws Exception { + OperationResult operationResult = new OperationResult(); + + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + + /* Disables automatic Apache Camel Restlet component logging which prints out an undesirable log entry + which includes client (e.g. browser) information */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String entityType = ""; + + Form form = request.getResourceRef().getQueryAsForm(); + for (Parameter parameter : form) { + if(PARAMETER_KEY.equals(parameter.getName())) { + entityType = parameter.getName(); + } + } + + String api = SEARCH_STRING + SEARCH_PARAMETER + entityType; + + final String requestUrl = elasticSearchAdapter.buildElasticSearchUrlForApi(topographicalSearchIndexName, api); + + try { + + OperationResult opResult = + elasticSearchAdapter.doGet(requestUrl, javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE); + + JSONObject finalOutputJson = formatOutput(opResult.getResult()); + + Response response = exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + response.setStatus(Status.SUCCESS_OK); + response.setEntity(String.valueOf(finalOutputJson), MediaType.APPLICATION_JSON); + exchange.getOut().setBody(response); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Error processing Geo Visualization request"); + } + + return operationResult; + } + + /** + * Format output. + * + * @param results the results + * @return the JSON object + */ + private JSONObject formatOutput(String results) { + JsonNode resultNode = null; + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode hitsNode = resultNode.get("hits").get("hits"); + if (hitsNode.isArray()) { + + for (final JsonNode arrayNode : hitsNode) { + JsonNode sourceNode = arrayNode.get("_source"); + if (sourceNode.get("location") != null) { + JsonNode locationNode = sourceNode.get("location"); + if (NodeUtils.isNumeric(locationNode.get("lon").asText()) + && NodeUtils.isNumeric(locationNode.get("lat").asText())) { + JSONObject location = new JSONObject(); + location.put("longitude", locationNode.get("lon").asText()); + location.put("latitude", locationNode.get("lat").asText()); + + entitiesArr.put(location); + } + + } + } + } + finalResult.put("plotPoints", entitiesArr); + + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java new file mode 100644 index 0000000..86918ad --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/GeoIndexDocument.java @@ -0,0 +1,289 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory.entity; + +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.sync.entity.IndexDocument; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class GeoIndexDocument. + */ +public class GeoIndexDocument implements Serializable, IndexDocument { + + @JsonIgnore + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + + @JsonIgnore + protected OxmEntityLookup oxmEntityLookup; + + @JsonIgnore + protected ObjectMapper mapper = new ObjectMapper(); + // generated, SHA-256 digest + @JsonIgnore + protected String id; + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + + @JsonIgnore + public boolean isValidGeoDocument() { + + boolean isValid = true; + + isValid &= (this.getEntityType() != null); + isValid &= (this.getLatitude() != null); + isValid &= (this.getLongitude() != null); + isValid &= (this.getId() != null); + isValid &= (this.getSelfLink() != null); + + isValid &= NodeUtils.isNumeric(this.getLatitude()); + isValid &= NodeUtils.isNumeric(this.getLongitude()); + + return isValid; + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List<String> list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new geo index document. + */ + public GeoIndexDocument() {} + + /* + * (non-Javadoc) + * + */ + + @Override + @JsonIgnore + public String getAsJson() throws JsonProcessingException { + + if (latitude != null && longitude != null) { + + /** + * A valid entry from this class is one that has both lat and long. If one or both is missing + * we shouldn't be indexing anything. + */ + + return NodeUtils.convertObjectToJson(this, true); + + } + + return null; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + + OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + String entityPrimaryKeyName = NodeUtils.concatArray( + descriptor.getPrimaryKeyAttributeNames(), "/"); + + this.id = + NodeUtils.generateUniqueShaDigest(entityType, entityPrimaryKeyName, entityPrimaryKeyValue); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + @Override + @JsonIgnore + public String getId() { + return this.id; + } + + @JsonProperty("entityType") + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + @JsonProperty("entityPrimaryKeyValue") + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + @JsonProperty("entityPrimaryKeyName") + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + @JsonProperty("lat") + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + @JsonProperty("long") + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + @JsonProperty("link") + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + @JsonIgnore + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java new file mode 100644 index 0000000..ac89c6b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/inventory/entity/TopographicalEntity.java @@ -0,0 +1,219 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.inventory.entity; + +import java.io.IOException; +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonObject; + +/** + * The Class TopographicalEntity. + */ +public class TopographicalEntity implements Serializable { + + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + + // generated, SHA-256 digest + protected String id; + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List<String> list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new topographical entity. + */ + public TopographicalEntity() {} + + /* + * (non-Javadoc) + * + */ + public String getAsJson() throws IOException { + + JsonObject obj = + Json.createObjectBuilder().add("entityType", entityType).add("pkey", entityPrimaryKeyValue) + .add("location", Json.createObjectBuilder().add("lat", latitude).add("lon", longitude)) + .add("selfLink", selfLink).build(); + + return obj.toString(); + } + + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + public String getId() { + return this.id; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java new file mode 100644 index 0000000..7ae73a1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/AaiUiMsgs.java @@ -0,0 +1,472 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.logging; + +import org.onap.aai.cl.eelf.LogMessageEnum; + +import com.att.eelf.i18n.EELFResourceManager; + +/** + * The Enum AaiUiMsgs. + */ +public enum AaiUiMsgs implements LogMessageEnum { + /** Arguments: {0} = Exception/error. */ + FAILURE_TO_PROCESS_REQUEST, + /** Arguments: {0} = Message and or error body. */ + FAILED_TO_DETERMINE, + /** Arguments: {0} = Exception/error. */ + UNKNOWN_SERVER_ERROR, + /** Arguments: {0} = Message and or error body. */ + FAILED_TO_ANALYZE, + /** Arguments: {0} = Exception/error. */ + FAILED_TO_GET_NODES_QUERY_RESULT, + /** Arguments: {0} = Expected link count, {1} = Actual link count. */ + UNEXPECTED_NUMBER_OF_LINKS, + /** Arguments: {0} = Reason. */ + DANGLING_NODE_WARNING, + /** Arguments: {0} = Node count, {1} = Link count. */ + VISUALIZATION_GRAPH_OUTPUT, + /** Arguments: {0} = JsonNode. */ + ITEM_TYPE_NULL, + /** Arguments: {0} = Filter property. */ + UNEXPECTED_TOKEN_COUNT, + /** Arguments: {0} = Error/exception message. */ + ADD_SEARCH_TARGET_ATTRIBUTES_FAILED, + /** Arguments: {0} = Error/exception message. */ + NODE_INTEGRITY_OVERLAY_ERROR, + /** Arguments: {0} = Node ID. */ + NODE_INTEGRITY_ALREADY_PROCESSED, + /** Arguments: {0} = Node ID. */ + SKIPPING_PROCESS_NODE_INTEGRITY, + /** Arguments: {0} = Error/exception message. */ + FAILED_TO_PROCESS_NODE_INTEGRITY, + /** No argument */ + MAX_EVALUATION_ATTEMPTS_EXCEEDED, + /** Arguments: {0} = Error/exception message. */ + VISUALIZATION_OUTPUT_ERROR, + /** Arguments: {0} = Total resolve time, {1} = Total links retrieved, {2} = Op time. */ + ALL_TRANSACTIONS_RESOLVED, + /** Arguments: {0} = Error/exception message. */ + PROCESSING_LOOP_INTERUPTED, + /** Arguments: {0} = Node ID. */ + IGNORING_SKELETON_NODE, + /** Arguments: {0} = Node count. */ + OUTSTANDING_WORK_PENDING_NODES, + /** Arguments: {0} = Reason. */ + FAILED_TO_ADD_SKELETON_NODE, + /** Arguments: {0} = Reason. */ + FAILED_TO_PROCESS_SKELETON_NODE, + INVALID_RESOLVE_STATE_DURING_INIT, + /** Arguments: {0} = Reason. */ + FAILED_TO_PROCESS_INITIAL_STATE, + /** Arguments: {0} = Relationship. */ + SKIPPING_RELATIONSHIP, + /** Arguments: {0} = Failure reason. */ + FAILED_TO_DETERMINE_NODE_ID, + /** Arguments: {0} = Error/exception message. */ + EXTRACTION_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_NODE_PARSE_ERROR, + /** Arguments: {0} = Node ID. */ + ROOT_NODE_DISCOVERED, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_PROCESS_NEIGHBORS_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_JSON_PARSE_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_PROCESSING_ERROR, + /** Arguments: {0} = Entity type. */ + UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE, + /** Arguments: {0} = Attribute group. */ + ATTRIBUTE_GROUP_FAILURE, + /** Arguments: {0} = Situational description, {1} = Exception message. */ + EXCEPTION_CAUGHT, + /** Arguments: {0} = Operation name, {1} = Operation time. */ + OPERATION_TIME, + /** Arguments: {0} = Error message. */ + SEARCH_SERVLET_ERROR, + /** Arguments: {0} = Exception message. */ + SEARCH_RESPONSE_BUILDING_EXCEPTION, + /** Arguments: {0} = Error message, {1} = Error message. */ + SEARCH_TAG_ANNOTATION_ERROR, + /** Arguments: {0} = App type. */ + QUERY_FAILED_UNHANDLED_APP_TYPE, + /** Arguments: {0} = Entity type. */ + ENTITY_NOT_FOUND_IN_OXM, + /** Arguments: {0} = JSON conversion type, {1} = Error thrown. */ + JSON_CONVERSION_ERROR, + /** Arguments: {0} = Node ID */ + NO_RELATIONSHIP_DISCOVERED, + /** No argument */ + SELF_LINK_NULL_EMPTY_RESPONSE, + /** Arguments: {0} = Error message. */ + SELF_LINK_RELATIONSHIP_LIST_ERROR, + /** Arguments: {0} = AIN id, {1} = old depth, {2} = new depth. */ + ACTIVE_INV_NODE_CHANGE_DEPTH, + /** Arguments: {0} = Node ID, {1} = Current state, {2} = New state {3} = Triggering action */ + ACTIVE_INV_NODE_CHANGE_STATE, + /** Arguments: {0} = Current state, {1} = New state {2} = Triggering action */ + ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID, + /** Arguments: {0} = Count Key {1} = Aggregation Key. */ + AGGREGATION_KEY_ERROR, + /** Arguments: {0} Configuration */ + CONFIGURATION_ERROR, + /** Arguments: {0} = Source. */ + ERROR_PARSING_JSON_PAYLOAD_NONVERBOSE, + /** Arguments: {0} = Payload. */ + ERROR_PARSING_JSON_PAYLOAD_VERBOSE, + /** Arguments: {0} = Key {1} = JSON Blob. */ + ERROR_FETCHING_JSON_VALUE, + /** Arguments: {0} = Error. */ + ERROR_PARSING_PARAMS, + /** No argument */ + INVALID_REQUEST_PARAMS, + /** Arguments: {0} = Key. */ + ERROR_SORTING_VIOLATION_DATA, + /** Arguments: {0} = exception */ + ERROR_SERVLET_PROCESSSING, + /** Arguments: {0} = exception */ + ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, + /** Arguments: {0} = exception */ + ERROR_BUILDING_SEARCH_RESPONSE, + /** No argument */ + ERROR_CSP_CONFIG_FILE, + /** Arguments: {0} = exception */ + ERROR_SHUTDOWN_EXECUTORS, + /** No argument */ + ERROR_LOADING_OXM, + /** Arguments: {0} = exception */ + ERROR_GETTING_DATA_FROM_AAI, + /** No argument */ + WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED, + /** Arguments: {0} = Entity Type */ + MISSING_ENTITY_DESCRIPTOR, + /** Arguments: {0} = Error */ + SELF_LINK_GET, + /** Arguments: {0} = Error */ + ES_FAILED_TO_CONSTRUCT_QUERY, + /** Arguments: {0} = Error */ + ES_RETRIEVAL_FAILED, + /** Arguments: {0} = Error */ + ES_LINK_UPSERT, + /** Arguments: {0} = Element */ + ES_SIMPLE_PUT, + /** Arguments: {0} = Value {1} = Element {2} = Error */ + ES_ABORT_CROSS_ENTITY_REF_SYNC, + /** Arguments: {0} Return Code */ + ES_OPERATION_RETURN_CODE, + /** Arguments: {0} = Error */ + ES_CROSS_ENTITY_REF_PUT, + /** No argument */ + ES_CROSS_REF_SYNC_VERSION_CONFLICT, + /** Arguments: {0} Result Code {1} = Error */ + ES_CROSS_REF_SYNC_FAILURE, + /** Arguments: {0} = Error */ + ES_FAILED_TO_CONSTRUCT_URI, + /** No argument */ + ES_RETRIEVAL_FAILED_RESYNC, + /** Arguments: {0} = Entity */ + ES_CROSS_ENTITY_RESYNC_LIMIT, + /** Arguments: {0} Entity Name */ + ES_PKEYVALUE_NULL, + /** Arguments: {0} = Error */ + ES_STORE_FAILURE, + /** Arguments: {0} Index Name {1} = Error */ + ES_PRE_SYNC_FAILURE, + /** Arguments: {0} Index Name */ + ES_SYNC_CLEAN_UP, + /** Arguments: {0} Index Name {1} Size before clean up {2} = Size after clean up */ + ES_SYNC_CLEAN_UP_SIZE, + /** Arguments: {0} Index Name {1} Index Type {2} = Size before delete */ + ES_SYNC_SELECTIVE_DELETE, + /** Arguments: {0} Index Name {1} Number of records */ + ES_BULK_DELETE, + /** Arguments: {0} Index name {1} = Error */ + ES_BULK_DELETE_ERROR, + /** Arguments: {0} Type of retrieval {1} Completion Time */ + COLLECT_TIME_WITH_ERROR, + /** Arguments: {0} Type of retrieval {1} Completion Time */ + COLLECT_TIME_WITH_SUCCESS, + /** Arguments: {0} Type of retrieval {1} Number of records */ + COLLECT_TOTAL, + /** Arguments: {0} Number of required fetches */ + SYNC_NUMBER_REQ_FETCHES, + /** Arguments: {0} Number of total fetches {1} Number of available records*/ + SYNC_NUMBER_TOTAL_FETCHES, + /** Arguments: {0} Completion Time */ + COLLECT_TOTAL_TIME, + /** Arguments: {0} = Error */ + ES_SCROLL_CONTEXT_ERROR, + /** No argument */ + ES_BULK_DELETE_SKIP, + /** Arguments: {0} = Number of docs */ + ES_BULK_DELETE_START, + /** No argument */ + SELF_LINK_CROSS_REF_SYNC, + /** Arguments: {0} = message */ + ERROR_GENERIC, + /** Arguments: {0} = error */ + JSON_PROCESSING_ERROR, + /** Arguments: {0} = exception */ + ERROR_PROCESSING_REQUEST, + /** Arguments: {0} = Self Link */ + SELF_LINK_GET_NO_RESPONSE, + /** Arguments: {0} = error */ + HISTORICAL_COLLECT_ERROR, + /** Arguments: {0} = Time */ + HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING, + /** No argument */ + HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED, + /** Arguments: {0} = Controller {1} = Time */ + HISTORICAL_SYNC_DURATION, + /** No argument */ + HISTORICAL_SYNC_PENDING, + /** Arguments: {0} = Time */ + HISTORICAL_SYNC_TO_BEGIN, + /** Arguments: {0} = message */ + DEBUG_GENERIC, + /** Arguments: {0} = message */ + INFO_GENERIC, + /** Arguments: {0} = message */ + WARN_GENERIC, + /** Arguments: {0} = context {1} = Exception*/ + INTERRUPTED, + /** Arguments: {0} = Entity Type {1} Entity */ + GEO_SYNC_IGNORING_ENTITY, + /** Arguments: {0} = reason */ + OXM_LOADING_ERROR, + /** Arguments: {0} = type */ + OXM_FAILED_RETRIEVAL, + OXM_FILE_NOT_FOUND, + /** No argument */ + OXM_READ_ERROR_NONVERBOSE, + /** Arguments: {0} = OXM File name */ + OXM_READ_ERROR_VERBOSE, + /** No argument */ + OXM_PARSE_ERROR_NONVERBOSE, + /** Arguments: {0} = OXM File name {1} = Exception*/ + OXM_PARSE_ERROR_VERBOSE, + /** Arguments: {0} = Numerical value for loaded OXM version */ + OXM_LOAD_SUCCESS, + /** Arguments: {0} = Entity {1} = Found property-value*/ + OXM_PROP_DEF_ERR_CROSS_ENTITY_REF, + /** Arguments: {0} = Sequence Number */ + ETAG_RETRY_SEQ, + /** Arguments: {0} = Reason */ + ETAG_WAIT_INTERRUPTION, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_SEQ, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_DONE_SEQ, + /** Arguments: {0} = Reason */ + QUERY_AAI_WAIT_INTERRUPTION, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_FAILURE_WITH_SEQ, + /** Arguments: {0} = URL */ + QUERY_AAI_RETRY_MAXED_OUT, + /** Arguments: {0} = Reason */ + PEGGING_ERROR, + /** Arguments: {0} = Key */ + DATA_CACHE_SUCCESS, + /** Arguments: {0} = URL {1} = Sequence Number */ + EXECUTOR_SERV_EXCEPTION, + /** Arguments: {0} = Exception */ + DISK_CACHE_READ_IO_ERROR, + /** Arguments: {0} = Exception */ + DISK_CREATE_DIR_IO_ERROR, + /** Arguments: {0} = Exception */ + DISK_DATA_WRITE_IO_ERROR, + /** Arguments: {0} = Data Item {1} = Exception */ + DISK_NAMED_DATA_WRITE_IO_ERROR, + /** Arguments: {0} = Data Item {1} = Exception */ + DISK_NAMED_DATA_READ_IO_ERROR, + /** No argument */ + OFFLINE_STORAGE_PATH_ERROR, + /** Arguments: {0} = URL {1} = Error */ + RESTFULL_OP_ERROR_VERBOSE, + /** Arguments: {0} = Method {1} = Time {2} = URL {3} = Result Code */ + RESTFULL_OP_COMPLETE, + /** No argument */ + INITIALIZE_OXM_MODEL_LOADER, + /** Arguments: {0} = Exception */ + AAI_RETRIEVAL_FAILED_GENERIC, + /** Arguments: {0} = Self Link */ + AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + /** Arguments: {0} = Exception */ + ATTRIBUTES_NOT_UPDATED_EXCEPTION, + /** Arguments: {0} = Message */ + ATTRIBUTES_NOT_UPDATED_MESSAGE, + /** Arguments: {0} = Exception */ + ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER, + /** Arguments: {0} = Schema File URI */ + ATTRIBUTES_ERROR_LOADING_MODEL_VERSION, + /** Arguments: {0} = Request URI {1} = Edit Request Body */ + ATTRIBUTES_HANDLING_EDIT, + /** Arguments: {0} = Object URI {1} = Attribute ID {2} Attribute Values */ + ATTRIBUTES_UPDATE_METHOD_CALLED, + /** Arguments: {0} = Attribute ID */ + ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE, + /** Arguments: {0} = Cookie */ + COOKIE_FOUND, + /** No argument */ + COOKIE_NOT_FOUND, + /** Arguments: {0} = Message */ + INVALID_REQUEST, + /** Arguments: {0} = User ID */ + USER_AUTHORIZATION_FILE_UNAVAILABLE, + /** Arguments: {0} = URL {1} = Cause */ + INVALID_URL_VERBOSE, + /** Arguments: {0} = Row ID */ + DI_DATA_NOT_FOUND_NONVERBOSE, + /** Arguments: {0} = Row ID {1} Attempt count */ + DI_DATA_NOT_FOUND_VERBOSE, + /** Arguments: {0} = Time in ms {1} Status */ + DI_MS_TIME_FOR_DATA_FETCH, + /** Arguments: {0} = Number of Entity Links */ + ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, + /** Arguments: {0} = Message */ + ERROR_EXTRACTING_FROM_RESPONSE, + /** No argument */ + ERROR_LOADING_OXM_SEARCHABLE_ENTITIES, + /** Arguments: {0} = Message */ + ES_SEARCHABLE_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Message */ + FAILED_TO_REGISTER_DUE_TO_NULL, + /** Arguments: {0} = File Path */ + FAILED_TO_RESTORE_TXN_FILE_MISSING, + /** Arguments: {0} = Index Name */ + INDEX_ALREADY_EXISTS, + /** Arguments: {0} = Index Name */ + INDEX_EXISTS, + /** Arguments: {0} = Index Name {1} = Operation Result */ + INDEX_INTEGRITY_CHECK_FAILED, + /** Arguments: {0} = Index Name */ + INDEX_NOT_EXIST, + /** Arguments: {0} = Index Name */ + INDEX_RECREATED, + /** Arguments: {0} = Time */ + SEARCH_ENGINE_SYNC_STARTED, + /** Arguments: {0} = Time */ + SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH, + /** Arguments: {0} = Message */ + SYNC_DURATION, + /** Arguments: {0} = Entity Type */ + ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, + /** Arguments: {0} = AAI Query Result */ + ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION, + /** Arguments: {0} = Message */ + ENTITY_SYNC_FAILED_QUERY_ERROR, + /** Arguments: {0} = Self Link Query */ + SELF_LINK_DETERMINATION_FAILED_GENERIC, + /** Arguments: {0} = Number of Entity Links */ + SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS, + /** Arguments: {1} = Query {2} = Operation Result Code {3} = Operation Result */ + SELF_LINK_RETRIEVAL_FAILED, + /** Arguments: {0} = Controller {1} = Synchronizer Current Internal State {2} = New State {3} = Caused By Action */ + SYNC_INTERNAL_STATE_CHANGED, + /** Arguments: {0} = Message */ + SYNC_INVALID_CONFIG_PARAM, + /** Arguments: {0} = Synchronizer Current Internal State */ + SYNC_NOT_VALID_STATE_DURING_REQUEST, + /** No argument */ + SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED, + /** No argument */ + SYNC_START_TIME, + /** Arguments: {0} = Controller {1} = Time */ + SYNC_TO_BEGIN, + /** Arguments: {0} = File Path */ + WILL_RETRIEVE_TXN, + /** Arguments: {0} = Configuration file name {1} = Exception */ + CONFIG_NOT_FOUND_VERBOSE, + /** Arguments: {0} = File name */ + FILE_NOT_FOUND, + /** Arguments: {0} = File name */ + FILE_READ_IN_PROGRESS, + ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES, + /** Arguments: {0} = Error message */ + ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Error message */ + ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Error message. */ + ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, + /** Arguments: {0} = Error message */ + SEARCH_ADAPTER_ERROR, + /** Arguments: {0} = Decoding exception message */ + UNSUPPORTED_URL_ENCODING, + /** Arguments: {0} = Invalid URL */ + INVALID_REDIRECT_URL, + /** Arguments: {0} = Valid login URL */ + VALID_REDIRECT_URL, + /** Arguments: {0} = Query Parameter Self-Link Extraction Error */ + QUERY_PARAM_EXTRACTION_ERROR, + /** Arguments: {0} = Info message */ + LOGIN_FILTER_INFO, + /** Arguments: {0} = Debug message */ + LOGIN_FILTER_DEBUG, + /** Arguments: {0} = URL to extract parameter from */ + ERROR_REMOVING_URL_PARAM, + /** Arguments: {0} = Hash value */ + ERROR_INVALID_HASH, + ERROR_HASH_NOT_FOUND, + ERROR_FILTERS_NOT_FOUND, + ERROR_READING_HTTP_REQ_PARAMS, + /** Arguments: {0} = Exception */ + ERROR_D3_GRAPH_VISUALIZATION, + /** Arguments: {0} = Exception */ + ERROR_AAI_QUERY_WITH_RETRY, + /** Arguments: Error extracting resource path from self-link. Error = {0} */ + ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK, + /** Arguments: {0} = Schema file location */ + ERROR_READING_JSON_SCHEMA, + /** Arguments: {0} = UI view name */ + VIEW_NAME_NOT_SUPPORTED, + /** Arguments: {0} = response code, {1} = filter name */ + ERROR_FETCHING_FILTER_VALUES, + /** Arguments: {0} = query type, {1} = view name */ + ERROR_PROCESSING_WIDGET_REQUEST, + /** Arguments: {0} = Time in ms */ + DR_PROCESSING_TIME, + /** Arguments: {0} = Response code {1} = payload */ + DR_PROCESSING_FAILURE, + /** Arguments: {0} = request uri */ + DR_REQUEST_URI_FOR_PROXY_UNKNOWN, + /** Arguments: {0} = origin-url {1} = dr-url */ + DR_PROXY_FROM_TO, + /** Arguments: {0} = Exception */ + URI_DECODING_EXCEPTION, + /** Arguments: {0} = Value {1} = Error */ + ENCRYPTION_ERROR, + /** Arguments: {0} = Encrypted value {1} = Error */ + DECRYPTION_ERROR, + /** Arguments: {0} = URI */ + RESOURCE_NOT_FOUND; + + /** + * Static initializer to ensure the resource bundles for this class are loaded... + */ + static { + EELFResourceManager.loadMessageBundle("logging/AAIUIMsgs"); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java new file mode 100644 index 0000000..04ad83a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/LoggingUtils.java @@ -0,0 +1,43 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.logging.util; + +/** + * The Class LoggingUtils. + */ +public class LoggingUtils { + + /** + * Sets the duration. + * + * @param startTime the start time + * @param stopTime the stop time + * @return the string + */ + public static String setDuration(long startTime, long stopTime) { + return String.valueOf(stopTime - startTime); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java new file mode 100644 index 0000000..44068a1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/logging/util/ServletUtils.java @@ -0,0 +1,204 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.logging.util; + +import java.io.IOException; +import java.io.PrintWriter; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.SearchServiceAdapter; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.slf4j.MDC; + +/** + * The Class ServletUtils. + */ +public class ServletUtils { + + /** + * Execute get query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executeGetQuery(Logger logger, SearchServiceAdapter search, + HttpServletResponse response, String requestUrl) throws Exception { + + OperationResult opResult = search.doGet(requestUrl, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + + } + + /** + * Execute post query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @param requestJsonPayload the request json payload + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executePostQuery(Logger logger, SearchServiceAdapter search, + HttpServletResponse response, String requestUrl, String requestJsonPayload) throws Exception { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + } + + /** + * Handle search servlet errors. + * + * @param logger the logger + * @param errorMsg the error msg + * @param exc the exc + * @param response the response + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void handleSearchServletErrors(Logger logger, String errorMsg, Exception exc, + HttpServletResponse response) throws IOException { + String errorLogMsg = (exc == null ? errorMsg : errorMsg + ". Error:" + + exc.getLocalizedMessage()); + logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMsg)); + out.close(); + } + + /** + * Generate json error response. + * + * @param message the message + * @return the string + */ + public static String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + /** + * Sets the servlet response. + * + * @param logger the logger + * @param isError the is error + * @param responseCode the response code + * @param response the response + * @param postPayload the post payload + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void setServletResponse(Logger logger, boolean isError, int responseCode, + HttpServletResponse response, String postPayload) throws IOException { + + if (isError) { + logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); + } + + response.setStatus(responseCode); + + if (postPayload != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + /** + * Gets the full url. + * + * @param elasticConfig the elastic config + * @param resourceUrl the resource url + * @return the full url + */ + public static String getFullUrl(String eHost,String ePort, String resourceUrl) { + final String host = eHost; + final String port = ePort; + return String.format("http://%s:%s%s", host, port, resourceUrl); + } + + public static void setUpMdcContext(final Exchange exchange, final HttpServletRequest request) { + + String txnId; + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + txnId = NodeUtils.getRandomTxnId(); + } else { + txnId = (String) xTransactionId; + } + + String fromAppId; + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + fromAppId = SparkyConstants.APP_NAME; + } else { + fromAppId = (String) partnerName; + } + + MdcContext.initialize(txnId, "AAI-UI", "", fromAppId, + request.getRequestURI() + ":" + request.getLocalPort()); + } + + public static Map<String, List<String>> getTxnHeaders() { + Map<String, List<String>> headers = new HashMap<String, List<String>>(); + headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID))); + headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME))); + return headers; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java new file mode 100644 index 0000000..4c393e1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityCountHistoryProcessor.java @@ -0,0 +1,407 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TreeMap; + +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.component.restlet.RestletConstants; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.inventory.EntityHistoryQueryBuilder; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.RestletUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +/** + * Receives and processes Entity Count History requests + */ +public class EntityCountHistoryProcessor implements Processor { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(EntityCountHistoryProcessor.class); + + private static final long serialVersionUID = 1L; + + private ElasticSearchAdapter elasticSearchAdapter = null; + private ObjectMapper mapper; + + private static final String SEARCH_PRETTY_STRING = "_search?pretty"; + private static final String TYPE = "type"; + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + private List<String> entityTypesToSummarize; + private List<String> vnfEntityTypes; + + private String entityCountHistoryIndexName; + + private boolean summarizeVnfs = false; + + private RestletUtils restletUtils = new RestletUtils(); + + /** + * Instantiates a new Entity Count History + */ + + public EntityCountHistoryProcessor(ElasticSearchAdapter elasticSearchAdapter, + String entityTypesToSummarizeDelimitedList, String vnfEntityTypesDelimitedList, String entityCountHistoryIndexName) { + + this.elasticSearchAdapter = elasticSearchAdapter; + this.entityCountHistoryIndexName = entityCountHistoryIndexName; + + entityTypesToSummarize = + Arrays.asList(entityTypesToSummarizeDelimitedList.toLowerCase().split("[\\s,]+")); + + vnfEntityTypes = + Arrays.asList(vnfEntityTypesDelimitedList.toLowerCase().split("[\\s,]+")); + + summarizeVnfs = vnfEntityTypesDelimitedList.toLowerCase().contains("vnf"); + + this.mapper = new ObjectMapper(); + this.mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + } + + /** + * Processes a entity count history search request + * + * @param exchange The Exchange object generated by Apache Camel for the incoming request + */ + + @Override + public void process(Exchange exchange) throws Exception { + + Request request = exchange.getIn().getHeader(RestletConstants.RESTLET_REQUEST, Request.class); + Response restletResponse = + exchange.getIn().getHeader(RestletConstants.RESTLET_RESPONSE, Response.class); + + Object xTransactionId = exchange.getIn().getHeader("X-TransactionId"); + if (xTransactionId == null) { + xTransactionId = NodeUtils.getRandomTxnId(); + } + + Object partnerName = exchange.getIn().getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + /* + * Disables automatic Apache Camel Restlet component logging which prints out an undesirable log + * entry which includes client (e.g. browser) information + */ + request.setLoggable(false); + + ClientInfo clientInfo = request.getClientInfo(); + MdcContext.initialize((String) xTransactionId, "AAI-UI", "", (String) partnerName, + clientInfo.getAddress() + ":" + clientInfo.getPort()); + + String typeParameter = getTypeParameter(exchange); + + if (null != typeParameter && !typeParameter.isEmpty()) { + OperationResult operationResult = null; + + try { + operationResult = getResults(restletResponse, typeParameter); + restletResponse.setEntity(operationResult.getResult(), MediaType.APPLICATION_JSON); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, exc.getLocalizedMessage()); + } + } else { + LOG.error(AaiUiMsgs.RESOURCE_NOT_FOUND, request.getOriginalRef().toString()); + String errorMessage = + restletUtils.generateJsonErrorResponse("Unsupported request. Resource not found."); + restletResponse.setEntity(errorMessage, MediaType.APPLICATION_JSON); + restletResponse.setStatus(Status.CLIENT_ERROR_NOT_FOUND); + } + + exchange.getOut().setBody(restletResponse); + } + + + /** + * Format line graph output + * + * @param results The results + * @return The JSON object + * @throws JsonProcessingException The JSON processing exception + */ + public JSONObject formatLineGraphOutput(String results) throws JsonProcessingException { + Map<Long, Long> countByDateMap = new HashMap<Long, Long>(); + + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray finalResultArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + final JsonNode dateBucketNode = entityNode.get("group_by_date").get("buckets"); + if (dateBucketNode.isArray()) { + for (final JsonNode dateBucket : dateBucketNode) { + Long date = dateBucket.get("key").asLong(); + final JsonNode countBucketNode = + dateBucket.get("sort_by_date").get("hits").get("hits"); + + if (countBucketNode.isArray()) { + final JsonNode latestEntityNode = countBucketNode.get(0); + + long currentCount = latestEntityNode.get("_source").get("count").asLong(); + if (countByDateMap.containsKey(date)) { + // add to the value if map already contains this date + currentCount += countByDateMap.get(date); + } + + countByDateMap.put(date, currentCount); + } + } + + } + } + } + + /* + * Sort the map by epoch timestamp + */ + Map<Long, Long> sortedMap = new TreeMap<Long, Long>(countByDateMap); + for (Entry<Long, Long> entry : sortedMap.entrySet()) { + JSONObject dateEntry = new JSONObject(); + dateEntry.put("date", entry.getKey()); + dateEntry.put("count", entry.getValue()); + finalResultArr.put(dateEntry); + } + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult.put("result", finalResultArr); + } + + /** + * Format table output + * + * @param results The results + * @return The JSON object + * @throws JsonProcessingException The JSON processing exception + */ + public JSONObject formatTableOutput(String results) throws JsonProcessingException { + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + Map<String, Long> entityCountInTable = initializeEntityMap(); + + long vnfCount = 0; + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + String entityType = entityNode.get("key").asText(); + boolean isAVnf = vnfEntityTypes.contains(entityType); + long countValue = 0; + + if (isAVnf || entityCountInTable.get(entityType) != null) { + final JsonNode hitsBucketNode = entityNode.get("sort_by_date").get("hits").get("hits"); + if (hitsBucketNode.isArray()) { + // the first bucket will be the latest + final JsonNode hitNode = hitsBucketNode.get(0); + + countValue = hitNode.get("_source").get("count").asLong(); + + /* + * Special case: Add all the VNF types together to get aggregate count + */ + if (summarizeVnfs && isAVnf) { + vnfCount += countValue; + countValue = vnfCount; + entityType = "vnf"; + } + + entityCountInTable.replace(entityType, countValue); + } + } + + } + } + for (Entry<String, Long> entry : entityCountInTable.entrySet()) { + JSONObject entityType = new JSONObject(); + entityType.put("key", entry.getKey()); + entityType.put("doc_count", entry.getValue()); + entitiesArr.put(entityType); + } + + finalResult.put("result", entitiesArr); + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, exc.getLocalizedMessage()); + } + + return finalResult; + } + + /** + * Gets the results + * + * @param response The response + * @param type The type + * @return The results + */ + public OperationResult getResults(Response response, String type) { + OperationResult operationResult = new OperationResult(); + + String reqPayload = EntityHistoryQueryBuilder.getQuery(type).toString(); + + try { + final String fullUrlStr = elasticSearchAdapter + .buildElasticSearchUrlForApi(entityCountHistoryIndexName, SEARCH_PRETTY_STRING); + + OperationResult opResult = elasticSearchAdapter.doPost(fullUrlStr, reqPayload, + javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE); + + JSONObject finalOutput = null; + if (type.equalsIgnoreCase(TABLE)) { + finalOutput = formatTableOutput(opResult.getResult()); + } else if (type.equalsIgnoreCase(GRAPH)) { + finalOutput = formatLineGraphOutput(opResult.getResult()); + } + + if (finalOutput != null) { + response.setEntity(finalOutput.toString(), MediaType.APPLICATION_JSON); + operationResult.setResult(finalOutput.toString()); + } + } catch (JsonProcessingException exc) { + restletUtils.handleRestletErrors(LOG, "Unable to map JSONpayload", exc, response); + } + + return operationResult; + } + + /** + * Gets the buckets node + * + * @param node The node + * @return The buckets node + * @throws Exception The exception + */ + public JsonNode getBucketsNode(JsonNode node) throws Exception { + if (node.get("aggregations").get("group_by_entityType").get("buckets") != null) { + return node.get("aggregations").get("group_by_entityType").get("buckets"); + } else { + throw new Exception("Failed to map JSON response"); + } + } + + /** + * Initialize entity map + * + * @return the map + */ + private Map<String, Long> initializeEntityMap() { + Map<String, Long> entityMap = new HashMap<String, Long>(); + for (String entity : entityTypesToSummarize) { + entityMap.put(entity, (long) 0); + } + + return entityMap; + } + + /** + * Extracts the "type" query parameter from the request URI + * + * @param exchange + * @return String containing the value of the "type" query parameter of the request. Returns null + * if no "type" parameter found + */ + public String getTypeParameter(Exchange exchange) { + String typeParameter = null; + + String requestUriParameterString = exchange.getIn().getHeader("CamelHttpQuery", String.class); + + if (null != requestUriParameterString) { + String[] requestParameterParts = requestUriParameterString.split("&"); + + String[] parameter = requestParameterParts[0].split("="); + String currentParameterKey = parameter[0]; + + if (null != currentParameterKey && !currentParameterKey.isEmpty()) { + // Check if we're looking at the "type" parameter key + if (currentParameterKey.equals(TYPE)) { + boolean uriIncludesTypeParameterValue = + (parameter.length >= 2) && !parameter[1].isEmpty(); + + if (uriIncludesTypeParameterValue) { + String typeParameterValue = parameter[1]; + + // Is the parameter value one that we return data for? + if (typeParameterValue.equalsIgnoreCase(TABLE) + || typeParameterValue.equalsIgnoreCase(GRAPH)) { + typeParameter = typeParameterValue; + } + } + } + } + } + + return typeParameter; + } + + + public void setRestletUtils(RestletUtils restletUtils) { + this.restletUtils = restletUtils; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java new file mode 100644 index 0000000..b36753e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummary.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.ArrayList; +import java.util.List; + +public class EntityTypeSummary { + private int totalChartHits; + private List<EntityTypeSummaryBucket> buckets = new ArrayList<>(); + + public int getTotalChartHits() { + return totalChartHits; + } + + public List<EntityTypeSummaryBucket> getBuckets() { + return buckets; + } + + public void setTotalChartHits(int totalChartHits) { + this.totalChartHits = totalChartHits; + } + + public void setBuckets(List<EntityTypeSummaryBucket> buckets) { + this.buckets = buckets; + } + + public void addBucket(EntityTypeSummaryBucket bucket) { + this.buckets.add(bucket); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java new file mode 100644 index 0000000..9568232 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/EntityTypeSummaryBucket.java @@ -0,0 +1,46 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +public class EntityTypeSummaryBucket { + private int count; + private String key; + + public int getCount() { + return count; + } + + public String getKey() { + return key; + } + + public void setCount(int count) { + this.count = count; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java new file mode 100644 index 0000000..c790bb1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchEntityProperties.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.HashMap; +import java.util.Map; + +public class SearchEntityProperties { + private String type; + private Map<String, String> fields = new HashMap<>(); + + public String getType() { + return type; + } + + public Map<String, String> getFields() { + return fields; + } + + public void setType(String type) { + this.type = type; + } + + public void setFields(Map<String, String> field) { + this.fields = field; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java new file mode 100644 index 0000000..201c154 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchResponse.java @@ -0,0 +1,102 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.entity.SearchSuggestion; + +/** + * The Class SearchResponse. + */ +public class SearchResponse { + + private long processingTimeInMs; + private int totalFound; + + private List<SearchSuggestion> suggestions; + + /** + * Instantiates a new search response. + */ + public SearchResponse() { + this.suggestions = new ArrayList<SearchSuggestion>(); + this.processingTimeInMs = 0; + this.totalFound = 0; + } + + public long getProcessingTimeInMs() { + return processingTimeInMs; + } + + public void setProcessingTimeInMs(long processingTimeInMs) { + this.processingTimeInMs = processingTimeInMs; + } + + public int getTotalFound() { + return totalFound; + } + + public void setTotalFound(int totalFound) { + this.totalFound = totalFound; + } + + public List<SearchSuggestion> getSuggestions() { + return suggestions; + } + + public void setSuggestions(List<SearchSuggestion> suggestions) { + this.suggestions = suggestions; + } + + /** + * Adds the entity entry. + * + * @param suggestionEntry that will be converted to JSON + */ + public void addSuggestion(SearchSuggestion suggestionEntity){ + suggestions.add(suggestionEntity); + } + + /** + * Increments the total number of hits for this SearchResponse by + * the value passed in. + * + * @param additionalCount - Count to increment the total found + */ + public void addToTotalFound(int additionalCount) { + totalFound += additionalCount; + } + + @Override + public String toString() { + return "SearchResponse [processingTimeInMs=" + processingTimeInMs + ", totalFound=" + totalFound + + ", " + (suggestions != null ? "suggestions=" + suggestions : "") + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java new file mode 100644 index 0000000..d37997a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/SearchServiceAdapter.java @@ -0,0 +1,139 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.Headers; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.sparky.dal.rest.RestClientFactory; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.slf4j.MDC; + + +/** + * The Class SearchServiceAdapter. + */ +public class SearchServiceAdapter { + + private static final String VALUE_QUERY = "query"; + + private RestClient client; + private RestEndpointConfig endpointConfig; + private String serviceApiVersion; + + private Map<String, List<String>> commonHeaders; + + /** + * Instantiates a new search adapter. + * @throws Exception + */ + public SearchServiceAdapter(RestEndpointConfig endpointConfig, String serviceApiVersion) throws Exception { + + client = RestClientFactory.buildClient(endpointConfig); + + commonHeaders = new HashMap<String, List<String>>(); + commonHeaders.put("Accept", Arrays.asList("application/json")); + commonHeaders.put(Headers.FROM_APP_ID, Arrays.asList("AAI-UI")); + + this.serviceApiVersion = serviceApiVersion; + this.endpointConfig = endpointConfig; + } + + public String getServiceApiVersion() { + return serviceApiVersion; + } + + public void setServiceApiVersion(String serviceApiVersion) { + this.serviceApiVersion = serviceApiVersion; + } + + public RestEndpointConfig getEndpointConfig() { + return endpointConfig; + } + + public void setEndpointConfig(RestEndpointConfig endpointConfig) { + this.endpointConfig = endpointConfig; + } + + public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { + OperationResult or = client.post(url, jsonPayload, getTxnHeader(), + MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doGet(String url, String acceptContentType) { + OperationResult or = + client.get(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doPut(String url, String payload, String acceptContentType) { + OperationResult or = client.put(url, payload, getTxnHeader(), + MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doDelete(String url, String acceptContentType) { + + OperationResult or = + client.delete(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public Map<String, List<String>> getTxnHeader() { + HashMap<String, List<String>> headers = new HashMap<String, List<String>>(); + headers.putAll(this.commonHeaders); + headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID))); + headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME))); + return headers; + } + + /** + * Get Full URL for search + * + * @param api the api + * @param indexName + * @return the full url + */ + public String buildSearchServiceQueryUrl(String indexName) { + return buildSearchServiceUrlForApi(indexName, VALUE_QUERY); + } + + public String buildSearchServiceUrlForApi(String indexName, String api) { + return String.format("https://%s:%s/services/search-data-service/%s/search/indexes/%s/%s", + endpointConfig.getEndpointIpAddress(), endpointConfig.getEndpointServerPort(), + serviceApiVersion, indexName, api); + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java new file mode 100644 index 0000000..dfe9016 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/UnifiedSearchProcessor.java @@ -0,0 +1,188 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.registry.SearchProviderRegistry; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class UnifiedSearchProcessor { + + protected static final String HASH_ID_KEY = "hashId"; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(UnifiedSearchProcessor.class); + + protected SearchProviderRegistry searchProviderRegistry; + protected ObjectMapper mapper; + protected boolean useOrderedSearchProviderKeys; + + public UnifiedSearchProcessor() { + mapper = new ObjectMapper(); + this.useOrderedSearchProviderKeys = false; + } + + public boolean isUseOrderedSearchProviderKeys() { + return useOrderedSearchProviderKeys; + } + + public void setUseOrderedSearchProviderKeys(boolean useOrderedSearchProviderKeys) { + this.useOrderedSearchProviderKeys = useOrderedSearchProviderKeys; + } + + public void search(Exchange exchange) { + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + ServletUtils.setUpMdcContext(exchange, request); + + SearchResponse searchResponse = new SearchResponse(); + long processTime = System.currentTimeMillis(); + int totalAdded = 0; + + try { + + String payload = exchange.getIn().getBody(String.class); + + if (payload == null || payload.isEmpty()) { + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty"); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 404); + return; + + } else { + + QuerySearchEntity searchRequest = mapper.readValue(payload, QuerySearchEntity.class); + int maxResultsPerSearch = Integer.valueOf(searchRequest.getMaxResults()); + + Map<String, List<SearchSuggestion>> searchProviderSuggestions = + new HashMap<String, List<SearchSuggestion>>(); + + int totalSuggestionsFromProviders = 0; + List<SearchSuggestion> suggestions = null; + for (SearchProvider searchProvider : searchProviderRegistry.getSearchProviders()) { + suggestions = searchProvider.search(searchRequest); + totalSuggestionsFromProviders += suggestions.size(); + searchProviderSuggestions.put(searchProvider.getClass().getCanonicalName(), suggestions); + } + + /* + * Using ordered search provider keys allows us to deterministically calculate how many + * results from each provider should be returned. At the moment, this behavior is primarily + * only beneficial to test classes. As there is a cost to sorted-collections in the call + * processing path, this behavior has been made optional. + */ + + if (useOrderedSearchProviderKeys) { + searchProviderSuggestions = + new TreeMap<String, List<SearchSuggestion>>(searchProviderSuggestions); + } + + if (totalSuggestionsFromProviders > 0) { + + int suggestionIndex = 0; + + Set<Entry<String, List<SearchSuggestion>>> searchProviderResults = + searchProviderSuggestions.entrySet(); + + while (totalAdded < maxResultsPerSearch && (totalAdded < totalSuggestionsFromProviders)) { + + for (Entry<String, List<SearchSuggestion>> searchProviderResultList : searchProviderResults) { + + if ((suggestionIndex <= (searchProviderResultList.getValue().size() - 1))) { + + if (totalAdded < maxResultsPerSearch) { + searchResponse + .addSuggestion(searchProviderResultList.getValue().get(suggestionIndex)); + totalAdded++; + } + } + + } + + suggestionIndex++; + + } + + } + + } + + searchResponse.addToTotalFound(totalAdded); + + processTime = System.currentTimeMillis() - processTime; + searchResponse.setProcessingTimeInMs(processTime); + String searchResponseJson = NodeUtils.convertObjectToJson(searchResponse, true); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200); + exchange.getOut().setBody(searchResponseJson); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc); + + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500); + exchange.getOut().setBody( + ServletUtils.generateJsonErrorResponse("Processing error = " + exc.getMessage()), + String.class); + + } finally { + /* + * Restore the txnId + appId from the current thread local via the MdcContext + */ + + ServletUtils.getTxnHeaders().forEach((key, value) -> { + exchange.getOut().setHeader(key, value); + }); + + exchange.getOut().setHeader("RequestUrl", request.getRequestURI()); + exchange.getOut().setHeader("RequestPort", request.getLocalPort()); + + } + } + + public SearchProviderRegistry getSearchProviderRegistry() { + return searchProviderRegistry; + } + + public void setSearchProviderRegistry(SearchProviderRegistry searchProviderRegistry) { + this.searchProviderRegistry = searchProviderRegistry; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java new file mode 100644 index 0000000..923c4d6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/api/SearchProvider.java @@ -0,0 +1,36 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.api; + +import java.util.List; + +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; + +public interface SearchProvider { + + List<SearchSuggestion> search(QuerySearchEntity queryRequest); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java new file mode 100644 index 0000000..070f305 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/config/SuggestionConfig.java @@ -0,0 +1,76 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.config; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +public class SuggestionConfig { + + + private Map<String, String> pairingList; + private Collection<String> stopWords; + private String defaultPairingValue; + private String ViSuggestionRoute; + + + public SuggestionConfig() {} + + + public Collection<String> getStopWords() { + return stopWords; + } + + public void setStopWords(Collection<String> stopWords) { + this.stopWords = stopWords; + } + + public Map<String, String> getPairingList() { + return pairingList; + } + + public void setPairingList(HashMap<String, String> pairingList) { + this.pairingList = pairingList; + } + + public String getDefaultPairingValue() { + return defaultPairingValue; + } + + public void setDefaultPairingValue(String defaultPairingValue) { + this.defaultPairingValue = defaultPairingValue; + } + + public String getViSuggestionRoute() { + return ViSuggestionRoute; + } + + public void setViSuggestionRoute(String ViSuggestionRoute) { + this.ViSuggestionRoute = ViSuggestionRoute; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java new file mode 100644 index 0000000..37059fb --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/QuerySearchEntity.java @@ -0,0 +1,73 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.entity; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/** + * The Class ViewAndInspectSearchRequest. + */ +public class QuerySearchEntity { + + private static final String DEFAULT_MAX_RESULTS = "10"; + public String maxResults; + public String queryStr; + + /** + * Instantiates a new view and inspect search request. + */ + public QuerySearchEntity() { + maxResults = DEFAULT_MAX_RESULTS; + queryStr = null; + } + + public String getMaxResults() { + return maxResults; + } + + public void setMaxResults(String maxResults) { + this.maxResults = maxResults; + } + + public String getQueryStr() { + return queryStr; + } + + public void setQueryStr(String queryStr) { + this.queryStr = queryStr; + } + + @JsonIgnore + public String[] getSearchTerms() { + + if (queryStr == null) { + return null; + } + + return queryStr.split(" "); + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java new file mode 100644 index 0000000..4529af5 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/entity/SearchSuggestion.java @@ -0,0 +1,39 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.entity; + +public interface SearchSuggestion { + public String getHashId(); + + public void setHashId(String hashId); + + public String getRoute(); + + public void setRoute(String route); + + public String getText(); + + public void setText(String searchText); +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java new file mode 100644 index 0000000..3a01e2b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterElasticSearchAdapter.java @@ -0,0 +1,119 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; +import javax.ws.rs.core.MediaType; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.filters.config.UiFilterDataSourceConfig; +import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + + +/** + * Performs all Elasticsearch related queries for filters related to + * the Sparky-FE. + * + * @author RICHARV + */ +public class FilterElasticSearchAdapter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilterElasticSearchAdapter.class); + private static final String AGGS = "aggregations"; + private static final String CONTAINER = "default"; + private static final String BUCKETS = "buckets"; + private static final String FILTER_VALUE_KEY = "key"; + private ElasticSearchAdapter elasticSearchAdapter; + + public FilterElasticSearchAdapter(ElasticSearchAdapter elasticSearchAdapter) { + this.elasticSearchAdapter = elasticSearchAdapter; + } + + /** + * For a given UiFilterEntity, will attempt to contact an Elasticsearch instance + * and fetch all possible values for filter's field name. + * + * @param filter - Filter object against which the search will take place. + * @param sourceData - If present, contains the index name and field value to search against. + * @return - A List of strings if results were found, else empty list. + */ + public List<String> fetchValuesForFilter(UiFilterEntity filter, UiFilterDataSourceConfig dataSourceConfig) { + ArrayList<String> filterValues = new ArrayList<String>(); + + if(dataSourceConfig != null) { + JsonObject filterValueQuery = null; + if(dataSourceConfig.getPathToField() != null) { + filterValueQuery = FilterQueryBuilder.createNestedFilterValueQueryObject(dataSourceConfig.getFieldName(), dataSourceConfig.getPathToField()); + } else { + filterValueQuery = FilterQueryBuilder.createFilterValueQueryObject(dataSourceConfig.getFieldName()); + } + + OperationResult opResult = elasticSearchAdapter.doPost( + elasticSearchAdapter.buildElasticSearchUrlForApi(dataSourceConfig.getIndexName(), + SparkyConstants.ES_SEARCH_API), + filterValueQuery.toString(), MediaType.APPLICATION_JSON_TYPE); + + String result = opResult.getResult(); + if(opResult.wasSuccessful() && result != null) { + JSONObject responseJson = new JSONObject(result); + JSONObject aggJson = responseJson.getJSONObject(AGGS); + + JSONObject containerJson = null; + if(dataSourceConfig.getPathToField() != null) { + JSONObject nestedContainer = aggJson.getJSONObject(dataSourceConfig.getPathToField()); + containerJson = nestedContainer.getJSONObject(dataSourceConfig.getFieldName()); + } else { + containerJson = aggJson.getJSONObject(CONTAINER); + } + + JSONArray buckets = containerJson.getJSONArray(BUCKETS); + + int bucketLength = buckets.length(); + for(int i = 0; i < bucketLength; i++) { + JSONObject filterBucket = buckets.getJSONObject(i); + + String filterValue = filterBucket.getString(FILTER_VALUE_KEY); + if(filterValue != null && !filterValue.isEmpty()) { + filterValues.add(filterValue); + } + } + } else { + LOG.error(AaiUiMsgs.ERROR_FETCHING_FILTER_VALUES, String.valueOf(opResult.getResultCode()), filter.getFilterName()); + } + } + filterValues.sort(String::compareToIgnoreCase); + return filterValues; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java new file mode 100644 index 0000000..9d404a9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterProcessor.java @@ -0,0 +1,144 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.JsonObject; +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; +import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class FilterProcessor { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilterProcessor.class); + + private ObjectMapper mapper; + private FilteredSearchHelper filteredSearchHelper; + + public FilterProcessor() { + this.mapper = new ObjectMapper(); + } + + public ObjectMapper getMapper() { + return mapper; + } + + public FilteredSearchHelper getFilteredSearchHelper() { + return filteredSearchHelper; + } + + public void setFilteredSearchHelper(FilteredSearchHelper filteredSearchHelper) { + this.filteredSearchHelper = filteredSearchHelper; + } + + public void getFiltersWithValues(Exchange exchange) { + + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + ServletUtils.setUpMdcContext(exchange, request); + + UiFiltersEntity viewFiltersList = null; + boolean wasErrorDuringFilterDiscovery = false; + + try { + String payload = exchange.getIn().getBody(String.class); + + if (payload == null || payload.isEmpty()) { + /* Don't throw back an error, just return an empty set */ + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, "Request Payload is empty"); + wasErrorDuringFilterDiscovery = true; + } else { + String viewName = mapper.readValue(payload, JsonNode.class).get(SparkyConstants.UI_FILTER_VIEW_NAME_PARAMETER).asText(); + + if (viewName == null || viewName.isEmpty()) { + wasErrorDuringFilterDiscovery = true; + } else { + viewFiltersList = filteredSearchHelper.doFilterDiscovery(viewName); + } + } + } catch(Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to get filter list due to error = " + exc.getMessage()); + + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500); + + exchange.getOut().setBody( + ServletUtils.generateJsonErrorResponse("FilterProcessor failed to get filter list due to error = " + exc.getMessage()), + String.class); + + return; + } + + boolean wasErrorDuringValueSearch = false; + if(!wasErrorDuringFilterDiscovery) { + try { + if(!viewFiltersList.getFilters().isEmpty()) { + List<String> filterIds = new ArrayList<String>(); + + for(UiFilterEntity filterEntity : viewFiltersList.getFilters()) { + filterIds.add(filterEntity.getFilterId()); + } + + UiFiltersEntity responseFiltersList = filteredSearchHelper.doFilterEnumeration(filterIds); + + JsonObject finalResponse = UiFiltersEntityConverter.convertUiFiltersEntityToUnifiedFilterResponse(responseFiltersList); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 200); + exchange.getOut().setBody(finalResponse.toString()); + } else { + wasErrorDuringValueSearch = true; + } + } catch(Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "FilterProcessor failed to generate valid unifiedFilterRequest response due to error, " + exc.getMessage()); + + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 500); + + exchange.getOut().setBody( + ServletUtils.generateJsonErrorResponse("FilterProcessor failed to generate valid unifiedFilterRequest response due to error = " + exc.getMessage()), + String.class); + + return; + } + } + + // In the case of an error we want to respond with a valid empty response + if(wasErrorDuringFilterDiscovery || wasErrorDuringValueSearch) { + //response.setStatus(Status.SUCCESS_OK); + //response.setEntity(UiFiltersEntityConverter.generateEmptyResponse().toString(), MediaType.APPLICATION_JSON); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, 404); + exchange.getOut().setBody(UiFiltersEntityConverter.generateEmptyResponse().toString()); + } + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java new file mode 100644 index 0000000..41a7b91 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilterQueryBuilder.java @@ -0,0 +1,218 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.search.filters.entity.AggregationEntity; +import org.onap.aai.sparky.search.filters.entity.BoolQueryBuilder; +import org.onap.aai.sparky.search.filters.entity.FilteredAggregationQueryBuilder; +import org.onap.aai.sparky.search.filters.entity.MatchFilterCriteriaEntity; +import org.onap.aai.sparky.search.filters.entity.SearchFilter; + +/** + * Used to generate queries against Elasticsearch for filter related queries. + */ +public class FilterQueryBuilder { + + private static final int EXISTING_FILTERS_LIMIT = 0; + private static final int SHOULD_BRANCH_LIMIT = 2; + + public static JsonObject createFilteredBoolQueryObject(FiltersConfig filtersConfig, List<SearchFilter> searchFilters, int minShouldMatch, List<String> fields) { + + if (searchFilters == null || searchFilters.size() == 0) { + return null; + } + + int searchFilterValueSize = 0; + + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + + for (SearchFilter searchFilter : searchFilters) { + + searchFilterValueSize = searchFilter.getValues().size(); + + /* + * translate the filter-id into the filter-name from the oxm data model/config file + */ + UiFilterConfig filter = filtersConfig.getFilterById(searchFilter.getFilterId()); + + if (filter == null || filter.getFilterName() == null) { + // log error and continue + } else { + + String fieldName = filter.getDataSource().getFieldName(); + if(!fields.contains(fieldName)) { + fields.add(fieldName); + } + + if (searchFilterValueSize >= SHOULD_BRANCH_LIMIT) { + // Add should branches + for (String filterValue : searchFilter.getValues()) { + boolQueryBuilder.addShouldFilter(new MatchFilterCriteriaEntity(fieldName, filterValue)); + } + + } else if (searchFilterValueSize > EXISTING_FILTERS_LIMIT) { + // Add must branch + for (String filterValue : searchFilter.getValues()) { + boolQueryBuilder.addMustFilter(new MatchFilterCriteriaEntity(fieldName, filterValue)); + } + } + } + } + + boolQueryBuilder.setMinShouldMatch(minShouldMatch); + + return boolQueryBuilder.getJsonObject(); + } + + public static JsonObject createAggregationQueryArray(FiltersConfig filtersConfig, List<SearchFilter> searchFilters) { + + if (searchFilters == null || searchFilters.size() == 0) { + // log error + return null; + } + + FilteredAggregationQueryBuilder aggQueryBuilder = new FilteredAggregationQueryBuilder(); + + for (SearchFilter searchFilter : searchFilters) { + + /* + * translate the filter-id into the filter-name from the oxm data model/config file + */ + UiFilterConfig filter = filtersConfig.getFilterById(searchFilter.getFilterId()); + + if (filter == null || filter.getFilterName() == null) { + // log error and continue + } else { + String fieldName = filter.getDataSource().getFieldName(); + aggQueryBuilder.addAggregationEntity(new AggregationEntity(fieldName, fieldName, 0)); + } + + } + + return aggQueryBuilder.getJsonObject(); + } + + public static JsonObject createCombinedBoolAndAggQuery(FiltersConfig filtersConfig, List<SearchFilter> searchFilters, int minShouldMatch) { + JsonObjectBuilder wrappedQueryBuilder = Json.createObjectBuilder(); + if(searchFilters != null) { + List<String> fields = new ArrayList<String>(); + JsonObject boolQuery = createFilteredBoolQueryObject(filtersConfig,searchFilters, minShouldMatch, fields); + JsonObject aggQuery = createAggregationQueryArray(filtersConfig, searchFilters); + + if (boolQuery != null) { + wrappedQueryBuilder.add("size", 0); + + JsonArrayBuilder filedsArrayBuilder = Json.createBuilderFactory(null).createArrayBuilder(); // TODO -> Should we use a class instance factory? + for(String field : fields) { + filedsArrayBuilder.add(field); + } + wrappedQueryBuilder.add("fields", filedsArrayBuilder.build()); + + wrappedQueryBuilder.add("query", boolQuery); + } + + if (aggQuery != null) { + wrappedQueryBuilder.add("aggs", aggQuery); + } + } + return wrappedQueryBuilder.build(); + } + + public static JsonObject createFilterValueQueryObject(String fieldValue) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("size", "0"); // avoid source data + buildZeroTermSummaryQuery(jsonBuilder, fieldValue); + + return jsonBuilder.build(); + } + + public static JsonObject createNestedFilterValueQueryObject(String fieldValue, + String pathToField) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("size", "0"); // avoid source data + generateNestedAggregations(jsonBuilder, fieldValue, pathToField); + + return jsonBuilder.build(); + } + + public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String fieldValue) { + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + getSummaryAggsBlob(aggsBlobBuilder, fieldValue, 0); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String fieldValue, + int resultSize) { + JsonObjectBuilder fieldBuilder = + Json.createObjectBuilder().add("field", fieldValue).add("size", resultSize); + JsonObject aggsFieldBlob = fieldBuilder.build(); + JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob); + JsonObject defaultBlob = defaultBlobBuilder.build(); + aggsBlobBuilder.add("default", defaultBlob); + } + + public static void addNestedSummaryAggsBlob(JsonObjectBuilder nestedAggsBuilder, + String containerValue, String fieldValue, int resultSize) { + JsonObjectBuilder fieldBuilder = Json.createObjectBuilder() + .add("field", containerValue + "." + fieldValue).add("size", resultSize); + JsonObject aggsFieldObject = fieldBuilder.build(); + + JsonObjectBuilder termBuilder = Json.createObjectBuilder().add("terms", aggsFieldObject); + JsonObject termObject = termBuilder.build(); + + JsonObjectBuilder namedAggsBuilder = Json.createObjectBuilder().add(fieldValue, termObject); + JsonObject namedAggsObject = namedAggsBuilder.build(); + + nestedAggsBuilder.add("aggs", namedAggsObject); + } + + public static void generateNestedAggregations(JsonObjectBuilder jsonBuilder, String fieldValue, + String pathToField) { + JsonObjectBuilder nestedAggsBuilder = Json.createObjectBuilder(); + + JsonObjectBuilder pathObjectBuilder = Json.createObjectBuilder().add("path", pathToField); + JsonObject nestedPathObject = pathObjectBuilder.build(); + + JsonObjectBuilder nestedObjectBuilder = + Json.createObjectBuilder().add("nested", nestedPathObject); + + addNestedSummaryAggsBlob(nestedObjectBuilder, pathToField, fieldValue, 0); + + JsonObject nestedObject = nestedObjectBuilder.build(); + nestedAggsBuilder.add(pathToField, nestedObject); + + jsonBuilder.add("aggs", nestedAggsBuilder.build()); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java new file mode 100644 index 0000000..0e981b4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/FilteredSearchHelper.java @@ -0,0 +1,158 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterDataSourceConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig; +import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig; +import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; +import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity; + +public class FilteredSearchHelper { + private static final Logger LOG = LoggerFactory.getInstance().getLogger(FilteredSearchHelper.class); + + private FiltersConfig filtersConfig; + private Map<String, UiFilterConfig> filtersMap = null; + private FilterElasticSearchAdapter filterSearchAdapter = null; + + public FilteredSearchHelper(FiltersConfig filterConfig,FilterElasticSearchAdapter filterElasticSearchAdapter) { + this.filtersConfig = filterConfig; + this.filterSearchAdapter = filterElasticSearchAdapter; + + if (filtersMap == null) { + filtersMap = new HashMap<>(); + + final FiltersDetailsConfig uiFiltersConfig = filterConfig.getFiltersConfig(); + + if (uiFiltersConfig != null) { + for (UiFilterConfig filter : uiFiltersConfig.getFilters()) { + filtersMap.put(filter.getFilterId(), filter); + } + } + } + + } + + public FiltersConfig getFiltersConfig() { + return filtersConfig; + } + + public void setFiltersConfig(FiltersConfig filterConfig) { + this.filtersConfig = filterConfig; + } + + public UiFiltersEntity doFilterDiscovery(String viewName) { + List<UiViewListItemConfig> views = filtersConfig.getViewsConfig().getViews(); + List<UiFilterListItemConfig> filters = null; + UiFiltersEntity viewFiltersList = new UiFiltersEntity(); + + if(viewName != null) { + for (UiViewListItemConfig view: views) { + if (viewName.equalsIgnoreCase(view.getViewName())) { + filters = view.getFilters(); + break; + } + } + + if (filters == null) { + LOG.error(AaiUiMsgs.VIEW_NAME_NOT_SUPPORTED, viewName); + } else { + for (UiFilterListItemConfig filter : filters) { + FiltersDetailsConfig filtersDetailsConfig = filtersConfig.getFiltersConfig(); + + for (UiFilterConfig filterConfig: filtersDetailsConfig.getFilters()) { + if (filterConfig.getFilterId().equals(filter.getFilterId())) { + UiFilterEntity filterEntity = new UiFilterEntity(filterConfig); + if(filter.getDefaultValue() != null) { + filterEntity.setDefaultValue(filter.getDefaultValue()); + } + viewFiltersList.addFilter(filterEntity); + } + } + } + } + } + return viewFiltersList; + } + + public UiFiltersEntity doFilterEnumeration(List<String> requestedFilterIds) { + UiFiltersEntity viewFiltersList = new UiFiltersEntity(); + + for (String requestedFilterId : requestedFilterIds) { + if (null == filtersMap.get(requestedFilterId)) { + String errorMessage = "Requested filter ID '" + requestedFilterId + "' does not exist."; + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorMessage); + } else { + UiFilterConfig sourceData = filtersMap.get(requestedFilterId); + UiFilterEntity filterEntity = new UiFilterEntity(sourceData); + this.getFilterEnumeration(filterEntity, sourceData); + viewFiltersList.addFilter(filterEntity); + } + } + + return viewFiltersList; + } + + public void getFilterEnumeration(UiFilterEntity filter, UiFilterConfig sourceData) { + List<String> filterValues = filterSearchAdapter.fetchValuesForFilter(filter, sourceData.getDataSource()); + + for(String value : filterValues) { + UiFilterValueEntity valueEntity = new UiFilterValueEntity(); + valueEntity.setDisplayName(value); + valueEntity.setFilterValue(value); + filter.addFilterValue(valueEntity); + } + } + + public Map<String, UiFilterConfig> getFiltersMap() { + return filtersMap; + } + + public void setFiltersMap(Map<String, UiFilterConfig> filtersMap) { + this.filtersMap = filtersMap; + } + + public UiFilterDataSourceConfig getFilterDataSource(String filterId) { + UiFilterConfig filterConfig = filtersMap.get(filterId); + UiFilterDataSourceConfig returnValue = null; + + if(filterConfig != null) { + returnValue = filterConfig.getDataSource(); + } + + return returnValue; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java new file mode 100644 index 0000000..46f62ac --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/UiFiltersEntityConverter.java @@ -0,0 +1,180 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters; + +import java.util.List; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig; +import org.onap.aai.sparky.search.filters.entity.UiFilterEntity; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; +import org.onap.aai.sparky.search.filters.entity.UiFiltersEntity; + +public class UiFiltersEntityConverter { + + private static final String KEY_TYPE = "type"; + private static final String KEY_MULTISELECT = "multiSelect"; + private static final String KEY_WATERMARK = "watermark"; + private static final String KEY_CONTROLS = "controls"; + private static final String KEY_LABEL = "label"; + private static final String KEY_FILTERS = "filters"; + private static final String KEY_DECODE = "decode"; + private static final String KEY_CODE = "code"; + private static final String KEY_DEFAULT_VALUE = "defaultValue"; + + /** + * Converts a UiFiltersEntity into a JSON object to satisfy a new (as of 23 Oct 2017) + * filter library being used in the FE. + * + * @param entityToConvert - The UiFiltersEntity to be converted into a JSON response. + * @return A JsonObject representing the passed in UiFiltersEntity. + */ + public static JsonObject convertUiFiltersEntityToUnifiedFilterResponse(UiFiltersEntity entityToConvert) { + JsonObjectBuilder filterBuilder = Json.createObjectBuilder(); + + if(entityToConvert != null) { + List<UiFilterEntity> filterEntities = entityToConvert.getFilters(); + if(filterEntities != null) { + for(UiFilterEntity entity : filterEntities) { + filterBuilder.add(entity.getFilterId(), generateFilterObject(entity)); + } + } + } + + JsonObjectBuilder finalObject = Json.createObjectBuilder(); + finalObject.add(KEY_FILTERS, filterBuilder.build()); + return finalObject.build(); + } + + /** + * Generates the core body of the a single filter within the JSON body. + * + * @param entity - The filter entity (loaded from config and populated from data store). + * @return A JsonObject representing the core data of a filter. + */ + private static JsonObject generateFilterObject(UiFilterEntity entity) { + JsonObjectBuilder filterBuilder = Json.createObjectBuilder(); + + filterBuilder.add(KEY_LABEL, entity.getDisplayName()); + filterBuilder.add(KEY_CONTROLS, generateControlObject(entity, entity.getFilterValueList())); + + return filterBuilder.build(); + } + + /** + * Generates the "controls" object within the filter JSON. + * + * @param filterEntity - The filter entity on which this filter will be based. + * @param filterValues - The list of values associated with the filter + * from data store queries. + * @return A JsonObject representing the "controls" object of the filter JSON. + */ + private static JsonObject generateControlObject(UiFilterEntity filterEntity, List<UiFilterValueEntity> filterValues) { + JsonObjectBuilder controls = Json.createObjectBuilder(); + JsonObjectBuilder subControl = Json.createObjectBuilder(); + + subControl.add(KEY_TYPE, filterEntity.getDataType()); + subControl.add(KEY_MULTISELECT, filterEntity.getMultiSelect()); + subControl.add(KEY_WATERMARK, filterEntity.getWatermark()); + + if(filterEntity.getDefaultValue() != null && !filterEntity.getDefaultValue().getCode().isEmpty() && !filterEntity.getDefaultValue().getDecode().isEmpty()) { + JsonObjectBuilder defaultValueBuilder = Json.createObjectBuilder(); + + defaultValueBuilder.add(KEY_DECODE, filterEntity.getDefaultValue().getDecode()); + defaultValueBuilder.add(KEY_CODE, filterEntity.getDefaultValue().getCode()); + + subControl.add(KEY_DEFAULT_VALUE, defaultValueBuilder.build()); + } + + if(filterEntity.getOptionsValues() == null || filterEntity.getOptionsValues().isEmpty()) { + subControl.add(filterEntity.getOptionsType(), generateOptionsObject(filterValues)); + } else { + subControl.add(filterEntity.getOptionsType(), generateOptionsValuesObject(filterEntity.getOptionsValues())); + } + + controls.add(filterEntity.getFilterName(), subControl.build()); + + return controls.build(); + } + + /** + * Creates a JsonArray representing the list of options for a filter. + * Similar to function generateOptionsObject, except using different arguments. + * + * @param optionsValues - Values that are loaded from config. + * @return JsonArray of options for a filter. + */ + private static JsonArray generateOptionsValuesObject(List<UiFilterOptionsValuesConfig> optionsValues) { + JsonArrayBuilder optionsBuilder = Json.createArrayBuilder(); + + if(optionsValues != null && !optionsValues.isEmpty()) { + for(UiFilterOptionsValuesConfig optionValue : optionsValues) { + JsonObjectBuilder option = Json.createObjectBuilder(); + + option.add(KEY_DECODE, optionValue.getDecode()); + option.add(KEY_CODE, optionValue.getCode()); + + optionsBuilder.add(option.build()); + } + } + + return optionsBuilder.build(); + } + + /** + * Creates a JsonArray representing the list of options for a filter. + * + * @param filterValues - The list of values associates with a filter (likely from a data store query). + * @return JsonArray of options for a filter. + */ + private static JsonArray generateOptionsObject(List<UiFilterValueEntity> filterValues) { + JsonArrayBuilder optionsBuilder = Json.createArrayBuilder(); + + if(filterValues != null && !filterValues.isEmpty()) { + for(UiFilterValueEntity valueEntity : filterValues) { + JsonObjectBuilder option = Json.createObjectBuilder(); + + option.add(KEY_DECODE, valueEntity.getDisplayName()); + option.add(KEY_CODE, valueEntity.getFilterValue()); + + optionsBuilder.add(option.build()); + } + } + + return optionsBuilder.build(); + } + + public static JsonObject generateEmptyResponse() { + JsonObjectBuilder filterBuilder = Json.createObjectBuilder(); + JsonObjectBuilder finalObject = Json.createObjectBuilder(); + finalObject.add(KEY_FILTERS, filterBuilder.build()); + return finalObject.build(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java new file mode 100644 index 0000000..3aded85 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersConfig.java @@ -0,0 +1,158 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import java.io.File; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class FiltersConfig { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(FiltersConfig.class); + + private String filtersFileName; + + private String viewsFileName; + + private FiltersForViewsConfig viewsConfig; + + private FiltersDetailsConfig filtersConfig; + + private SparkyResourceLoader resourceLoader; + + public FiltersConfig() { + //exposed for testing + } + + public FiltersConfig(String filtersFileName, String viewsFileName, SparkyResourceLoader resourceLoader) { + this.filtersFileName = filtersFileName; + this.viewsFileName = viewsFileName; + this.resourceLoader = resourceLoader; + + initializeFilters(); + } + + /** + * Initialize config. + */ + private void initializeFilters() { + viewsConfig = this.readUiViewsConfig(); + filtersConfig = this.readUiFiltersConfig(); + } + + public String getViewsFileName() { + return viewsFileName; + } + + public void setViewsFileName(String viewsFileName) { + this.viewsFileName = viewsFileName; + } + + public String getFiltersFileName() { + return filtersFileName; + } + + public void setFiltersFileName(String filtersFileName) { + this.filtersFileName = filtersFileName; + } + + public FiltersForViewsConfig getViewsConfig() { + return viewsConfig; + } + + public void setViewsConfig(FiltersForViewsConfig filtersMapEntity) { + this.viewsConfig = filtersMapEntity; + } + + public FiltersDetailsConfig getFiltersConfig() { + return filtersConfig; + } + + public UiFilterConfig getFilterById(String filterId) { + for ( UiFilterConfig filter : filtersConfig.getFilters()) { + if ( filter.getFilterId().equals(filterId)) { + return filter; + } + } + + return null; + } + + public void setFiltersConfig(FiltersDetailsConfig filtersConfig) { + this.filtersConfig = filtersConfig; + } + + public FiltersDetailsConfig readUiFiltersConfig() { + ObjectMapper mapper = new ObjectMapper(); + FiltersDetailsConfig filtersConfig = null; + try{ + filtersConfig = mapper.readValue(resourceLoader.getResourceAsFile(this.getFiltersFileName(),true), FiltersDetailsConfig.class); + } catch (Exception e){ + LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, this.getFiltersFileName()); + } + + return filtersConfig; + } + + public FiltersForViewsConfig readUiViewsConfig() { + ObjectMapper mapper = new ObjectMapper(); + FiltersForViewsConfig viewsConfig = null; + + try { + viewsConfig = mapper.readValue(resourceLoader.getResourceAsFile(this.getViewsFileName(),true), FiltersForViewsConfig.class); + } catch (Exception e){ + LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, this.getViewsFileName()); + } + + return viewsConfig; + } + + public void initializeFiltersDetailsConfig(File filtersFile) { + ObjectMapper mapper = new ObjectMapper(); + try{ + this.filtersConfig = mapper.readValue(filtersFile, FiltersDetailsConfig.class); + } catch (Exception e){ + LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, filtersFile.getAbsolutePath()); + } + } + + public void initializeFiltersForViewsConfig(File viewsFile) { + ObjectMapper mapper = new ObjectMapper(); + + try { + this.viewsConfig = mapper.readValue(viewsFile, FiltersForViewsConfig.class); + } catch (Exception e){ + LOG.error(AaiUiMsgs.ERROR_READING_JSON_SCHEMA, viewsFile.getAbsolutePath()); + } + + } + +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java new file mode 100644 index 0000000..ab5e1d4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfig.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class FiltersDetailsConfig { + @JsonProperty("filters") + private List<UiFilterConfig> filters = new ArrayList<UiFilterConfig>(); + + public FiltersDetailsConfig(){} + + @JsonCreator + public FiltersDetailsConfig(@JsonProperty("filters") final List<UiFilterConfig> filters) { + this.filters = filters; + } + + public List<UiFilterConfig> getFilters() { + return filters; + } + + public void setFilters(List<UiFilterConfig> filters) { + this.filters = filters; + } + + @Override + public String toString() { + return "UiFiltersConfig [filters=" + filters + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java new file mode 100644 index 0000000..1d0adfe --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfig.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class FiltersForViewsConfig { + + private List<UiViewListItemConfig> views = new ArrayList<UiViewListItemConfig>(); + + public FiltersForViewsConfig(){} + + @JsonCreator + public FiltersForViewsConfig(@JsonProperty("views") final List<UiViewListItemConfig> views) { + this.views = views; + } + + @JsonProperty("views") + public List<UiViewListItemConfig> getViews() { + return views; + } + + public void setViews(List<UiViewListItemConfig> views) { + this.views = views; + } + + @Override + public String toString() { + return "UiViewToFilterMappingEntity [allUiViews=" + views + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java new file mode 100644 index 0000000..d3ef9ba --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterConfig.java @@ -0,0 +1,188 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class UiFilterConfig { + + @JsonProperty("filterId") + private String filterId; + + @JsonProperty("filterName") + private String filterName; + + @JsonProperty("displayName") + private String displayName; + + @JsonProperty("dataType") + private String dataType; + + @JsonProperty("multiSelect") + private String multiSelect; + + @JsonProperty("watermark") + private String watermark; + + @JsonProperty("defaultValue") + private UiFilterOptionsValuesConfig defaultValue; + + @JsonProperty("optionsType") + private String optionsType; + + @JsonProperty("optionsValues") + private List<UiFilterOptionsValuesConfig> optionsValues; + + @JsonProperty("dataSource") + private UiFilterDataSourceConfig dataSource = new UiFilterDataSourceConfig(); + + @JsonCreator + public UiFilterConfig(@JsonProperty("filterId") final String filterId, + @JsonProperty("filterName") final String filterName, + @JsonProperty("displayName") final String displayName, + @JsonProperty("dataType") final String dataType, + @JsonProperty("multiSelect") final String multiSelect, + @JsonProperty("watermark") final String watermark, + @JsonProperty("defaultValue") final UiFilterOptionsValuesConfig defaultValue, + @JsonProperty("optionsType") final String optionsType, + @JsonProperty("optionsValues") final List<UiFilterOptionsValuesConfig> optionsValues, + @JsonProperty("dataSource") final UiFilterDataSourceConfig dataSource + ) { + this.filterId = filterId; + this.filterName = filterName; + this.displayName = displayName; + this.dataType = dataType; + this.multiSelect = multiSelect; + this.watermark = watermark; + this.defaultValue = defaultValue; + this.optionsType = optionsType; + this.optionsValues = optionsValues; + this.dataSource = dataSource; + } + + @JsonProperty("filterId") + public String getFilterId() { + return filterId; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + @JsonProperty("filterName") + public String getFilterName() { + return filterName; + } + + public void setFilterName(String filterName) { + this.filterName = filterName; + } + + @JsonProperty("displayName") + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + @JsonProperty("dataType") + public String getDataType() { + return dataType; + } + + public void setDataType(String dataType) { + this.dataType = dataType; + } + + @JsonProperty("multiSelect") + public String getMultiSelect() { + return multiSelect; + } + + public void setMultiSelect(String multiSelect) { + this.multiSelect = multiSelect; + } + + @JsonProperty("watermark") + public String getWatermark() { + return watermark; + } + + public void setWatermark(String watermark) { + this.watermark = watermark; + } + + @JsonProperty("defaultValue") + public UiFilterOptionsValuesConfig getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) { + this.defaultValue = defaultValue; + } + + @JsonProperty("optionsType") + public String getOptionsType() { + return optionsType; + } + + public void setOptionsType(String optionsType) { + this.optionsType = optionsType; + } + @JsonProperty("optionsValues") + public List<UiFilterOptionsValuesConfig> getOptionsValues() { + return optionsValues; + } + + public void setOptionsValues(List<UiFilterOptionsValuesConfig> optionsValues) { + this.optionsValues = optionsValues; + } + + @JsonProperty("dataSource") + public UiFilterDataSourceConfig getDataSource() { + return dataSource; + } + + public void setDataSource(UiFilterDataSourceConfig dataSource) { + this.dataSource = dataSource; + } + + @Override + public String toString() { + return "UiFilterConfig [filterId=" + filterId + ", filterName=" + filterName + ", displayName=" + + displayName + ", dataType=" + dataType + ", multiSelect=" + multiSelect + ", watermark=" + + watermark + ", optionsType=" + optionsType + ", optionsValues=" + optionsValues + + ", dataSource=" + dataSource + "]"; + } +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java new file mode 100644 index 0000000..4f9e2da --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfig.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class UiFilterDataSourceConfig { + + @JsonProperty("indexName") + private String indexName; + + @JsonProperty("docType") + private String docType; + + @JsonProperty("fieldName") + private String fieldName; + + @JsonProperty("pathToField") + private String pathToField; + + public UiFilterDataSourceConfig(){} + + @JsonCreator + public UiFilterDataSourceConfig(@JsonProperty("indexName") final String indexName, @JsonProperty("docType") final String docType, @JsonProperty("fieldName") final String fieldName, @JsonProperty("pathToField") final String pathToField) { + this.indexName = indexName; + this.docType = docType; + this.fieldName = fieldName; + this.pathToField = pathToField; + } + + @JsonProperty("indexName") + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + @JsonProperty("docType") + public String getDocType() { + return docType; + } + + public void setDocType(String docType) { + this.docType = docType; + } + + @JsonProperty("fieldName") + public String getFieldName() { + return fieldName; + } + + public void setFieldName(String fieldName) { + this.fieldName = fieldName; + } + + @JsonProperty("pathToField") + public String getPathToField() { + return pathToField; + } + + public void setPathToField(String pathToField) { + this.pathToField = pathToField; + } + + @Override + public String toString() { + return "UiFilterDataSourceConfig [indexName=" + indexName + ", docType=" + docType + + ", fieldName=" + fieldName + ", pathToField=" + pathToField + "]"; + } +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java new file mode 100644 index 0000000..2f955fc --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfig.java @@ -0,0 +1,70 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class UiFilterListItemConfig { + @JsonProperty("filterId") + private String filterId; + + @JsonProperty("defaultValue") + private UiFilterOptionsValuesConfig defaultValue; + + @JsonCreator + public UiFilterListItemConfig(@JsonProperty("filterId") final String filterId, + @JsonProperty("defaultValue") final UiFilterOptionsValuesConfig defaultValue) { + this.filterId = filterId; + this.defaultValue = defaultValue; + } + + @JsonProperty("filterId") + public String getFilterId() { + return filterId; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + @JsonProperty("defaultValue") + public UiFilterOptionsValuesConfig getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) { + this.defaultValue = defaultValue; + } + + @Override + public String toString() { + return "FilterListItemEntity [filterId=" + filterId + ", defaultValue=" + defaultValue + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java new file mode 100644 index 0000000..cf45d21 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfig.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class UiFilterOptionsValuesConfig { + @JsonProperty("decode") + private String decode; + + @JsonProperty("code") + private String code; + + @JsonCreator + public UiFilterOptionsValuesConfig(@JsonProperty("decode") final String decode, @JsonProperty("code") final String code) { + this.decode = decode; + this.code = code; + } + + @JsonProperty("decode") + public String getDecode() { + return decode; + } + + public void setDecode(String decode) { + this.decode = decode; + } + + @JsonProperty("code") + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + @Override + public String toString() { + return "UiFilterOptionsValuesConfig [decode=" + decode + ", code=" + code + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java new file mode 100644 index 0000000..1d70314 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfig.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.config; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class UiViewListItemConfig { + @JsonProperty("viewName") + private String viewName; + + private List<UiFilterListItemConfig> filters = new ArrayList<UiFilterListItemConfig>(); + + @JsonCreator + public UiViewListItemConfig(@JsonProperty("viewName") final String viewName, @JsonProperty("filters") final List<UiFilterListItemConfig> filters) { + this.viewName = viewName; + this.filters = filters; + } + + @JsonProperty("viewName") + public String getViewName() { + return viewName; + } + + public void setViewName(String viewName) { + this.viewName = viewName; + } + + @JsonProperty("filters") + public List<UiFilterListItemConfig> getFilters() { + return filters; + } + + public void setListOfFilters(List<UiFilterListItemConfig> filters) { + this.filters = filters; + } + + @Override + public String toString() { + return "UiViewEntity [viewName=" + viewName + ", filters=" + filters + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java new file mode 100644 index 0000000..3fc61ac --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/AggregationEntity.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import javax.json.Json; +import javax.json.JsonBuilderFactory; +import javax.json.JsonObject; + +public class AggregationEntity { + + private String aggregationName; + private String aggregationFieldName; + private int size; + + public AggregationEntity(String aggName, String fieldName, int size) { + + this.aggregationName = aggName; + this.aggregationFieldName = fieldName; + this.size = size; + } + + public String getAggregationName() { + return aggregationName; + } + + public void setAggregationName(String aggregationName) { + this.aggregationName = aggregationName; + } + + public String getAggregationFieldName() { + return aggregationFieldName; + } + + public void setAggregationFieldName(String aggregationFieldName) { + this.aggregationFieldName = aggregationFieldName; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public JsonObject getJsonObject() { + + JsonBuilderFactory factory = Json.createBuilderFactory(null); + + return factory.createObjectBuilder() + .add("terms", factory.createObjectBuilder() + .add("field", aggregationFieldName) + .add("size", size)) + .build(); + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java new file mode 100644 index 0000000..750270a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/BoolQueryBuilder.java @@ -0,0 +1,123 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonBuilderFactory; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +public class BoolQueryBuilder { + + private List<MatchFilterCriteriaEntity> mustFilters; + private List<MatchFilterCriteriaEntity> shouldFilters; + + private int minShouldMatch; + + public BoolQueryBuilder() { + + mustFilters = new ArrayList<MatchFilterCriteriaEntity>(); + shouldFilters = new ArrayList<MatchFilterCriteriaEntity>(); + minShouldMatch = -1; + + } + + public void addMustFilter(MatchFilterCriteriaEntity filter) { + + if (!mustFilters.contains(filter)) { + mustFilters.add(filter); + } + + } + + public void addShouldFilter(MatchFilterCriteriaEntity filter) { + + if (!shouldFilters.contains(filter)) { + shouldFilters.add(filter); + } + + } + + public void setMinShouldMatch(int minShouldMatch) { + this.minShouldMatch = minShouldMatch; + } + + public boolean isMatchAll() { + return (mustFilters.isEmpty() && shouldFilters.isEmpty()); + } + + public JsonObject getJsonObject() { + /* + * Specify a null config for now, but if we want normalize all the builders, we can do it at one + * location, when we are ready. + */ + JsonBuilderFactory factory = Json.createBuilderFactory(null); + + JsonObjectBuilder boolBuilder = factory.createObjectBuilder(); + + if(!mustFilters.isEmpty()){ + JsonArrayBuilder mustArrayBuilder = factory.createArrayBuilder(); + + for (MatchFilterCriteriaEntity matchCriteria : mustFilters) { + mustArrayBuilder.add(matchCriteria.getJsonObject()); + } + + JsonArray mustArray = mustArrayBuilder.build(); + boolBuilder.add("must", mustArray); + } + + if (!shouldFilters.isEmpty()) { + JsonArray shouldArray = null; + JsonArrayBuilder shouldArrayBuilder = factory.createArrayBuilder(); + + for (MatchFilterCriteriaEntity matchCriteria : shouldFilters) { + shouldArrayBuilder.add(matchCriteria.getJsonObject()); + } + + shouldArray = shouldArrayBuilder.build(); + boolBuilder.add("should", shouldArray).add("min_should_match", minShouldMatch); + } + + JsonObjectBuilder queryObjectBuilder = factory.createObjectBuilder(); + + /* + * If both filter lists are empty then we are doing an aggregation + * based off fields. Just match-all for the query. + */ + if(isMatchAll()) { + JsonObject matchAllObject = factory.createObjectBuilder().build(); + queryObjectBuilder.add("match_all", matchAllObject); + } else { + queryObjectBuilder.add("bool", boolBuilder.build()); + } + + return queryObjectBuilder.build(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java new file mode 100644 index 0000000..fa9226e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/FilteredAggregationQueryBuilder.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonBuilderFactory; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +public class FilteredAggregationQueryBuilder { + + private List<AggregationEntity> aggregationEntities; + + public FilteredAggregationQueryBuilder() { + aggregationEntities = new ArrayList<AggregationEntity>(); + } + + public void addAggregationEntity(AggregationEntity aggregationEntity) { + if (!aggregationEntities.contains(aggregationEntity)) { + aggregationEntities.add(aggregationEntity); + } + } + + public JsonObject getJsonObject() { + + /* + * Specify a null config for now, but if we want normalize all the builders, we can do it at one + * location, when we are ready. + */ + JsonBuilderFactory factory = Json.createBuilderFactory(null); + + JsonObjectBuilder aggsArrayBuilder = factory.createObjectBuilder(); + + for (AggregationEntity aggEntity : aggregationEntities) { + aggsArrayBuilder.add(aggEntity.getAggregationName(), aggEntity.getJsonObject()); + } + + return aggsArrayBuilder.build(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java new file mode 100644 index 0000000..68c058b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/MatchFilterCriteriaEntity.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import javax.json.Json; +import javax.json.JsonBuilderFactory; +import javax.json.JsonObject; + +public class MatchFilterCriteriaEntity { + + private String criteriaName; + private String criteriaValue; + + public MatchFilterCriteriaEntity(String criteriaName, String criteriaValue) { + super(); + this.criteriaName = criteriaName; + this.criteriaValue = criteriaValue; + } + + public String getCriteriaName() { + return criteriaName; + } + + public void setCriteriaName(String criteriaName) { + this.criteriaName = criteriaName; + } + + public String getCriteriaValue() { + return criteriaValue; + } + + public void setCriteriaValue(String criteriaValue) { + this.criteriaValue = criteriaValue; + } + + public JsonObject getJsonObject() { + + /* + * Specify a null config for now, but if we want normalize all the builders, we can do it at one + * location, when we are ready. + */ + JsonBuilderFactory factory = Json.createBuilderFactory(null); + + return factory.createObjectBuilder() + .add("match", factory.createObjectBuilder().add(criteriaName, criteriaValue)).build(); + + } + + @Override + public String toString() { + return "MatchFilterCriteriaEntity [criteriaName=" + criteriaName + ", criteriaValue=" + + criteriaValue + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java new file mode 100644 index 0000000..2a6f05d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/SearchFilter.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * A base entity to contain the details of the filter id and values from the FE to the BE for the + * purpose of driving DAL calls into ElasticSearch, Search Abstraction Service, or as a utility + * object within the query builders. + * + * The class has unique identifier for the filter id, and then 1 or more filter values. The value + * list has been introduced to help us with a multi-select use case that will need to be supported + * eventually. + */ +public class SearchFilter { + + private String filterId; + private List<String> values; + + public SearchFilter() { + values = new ArrayList<String>(); + } + + public SearchFilter(String filterId) { + this(); + this.filterId = filterId; + } + + public SearchFilter(String filterId, String... values) { + this(); + this.filterId = filterId; + this.values.addAll(Arrays.asList(values)); + } + + public String getFilterId() { + return filterId; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + public List<String> getValues() { + return values; + } + + public void setValues(List<String> values) { + this.values = values; + } + + public void addValue(String v) { + if (!values.contains(v)) { + values.add(v); + } + + } + + @Override + public String toString() { + return "SearchFilter [filterId=" + filterId + ", values=" + values + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java new file mode 100644 index 0000000..f90403c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterEntity.java @@ -0,0 +1,180 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +/** + * Stores data for a single filter for a given UI view. + * <p> + * When a UI view wants to know which filters it should display, an object of this class is created for each + * filter discovered and stores data for that filter. Each filter/object of this class is added to a + * ViewFilterList object which is then serialized to JSON and returned to the view in the response body. + */ +@JsonInclude(Include.NON_NULL) +public class UiFilterEntity { + private String filterId; + private String filterName; + private String displayName; + private String dataType; + + private String multiSelect; + private String watermark; + private UiFilterOptionsValuesConfig defaultValue; + private String optionsType; + + private List<UiFilterOptionsValuesConfig> optionsValues; + + private List<UiFilterValueEntity> filterValueList; + + public UiFilterEntity() {} + + public UiFilterEntity(UiFilterConfig filterConfig) { + if (filterConfig.getFilterId() != null) { + this.setFilterId(filterConfig.getFilterId()); + } + if (filterConfig.getFilterName() != null) { + this.setFilterName(filterConfig.getFilterName()); + } + if (filterConfig.getDisplayName() != null) { + this.setDisplayName(filterConfig.getDisplayName()); + } + if (filterConfig.getDataType() != null) { + this.setDataType(filterConfig.getDataType()); + } + if (filterConfig.getMultiSelect() != null) { + this.setMultiSelect(filterConfig.getMultiSelect()); + } + if (filterConfig.getWatermark() != null) { + this.setWatermark(filterConfig.getWatermark()); + } + if (filterConfig.getDefaultValue() != null) { + this.setDefaultValue(filterConfig.getDefaultValue()); + } + if (filterConfig.getOptionsType() != null) { + this.setOptionsType(filterConfig.getOptionsType()); + } + if(filterConfig.getOptionsValues() != null && !filterConfig.getOptionsValues().isEmpty()) { + this.setOptionsValues(filterConfig.getOptionsValues()); + } else { + this.optionsValues = new ArrayList<UiFilterOptionsValuesConfig>(); + } + } + + public void addFilterValue(UiFilterValueEntity valueEntity) { + if (null == filterValueList) { + filterValueList = new ArrayList<>(); + } + + this.filterValueList.add(valueEntity); + } + + public String getFilterId() { + return filterId; + } + + public String getFilterName() { + return filterName; + } + + public String getDisplayName() { + return displayName; + } + + public String getDataType() { + return dataType; + } + + public UiFilterOptionsValuesConfig getDefaultValue() { + return defaultValue; + } + + public List<UiFilterValueEntity> getFilterValueList() { + return filterValueList; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + public void setFilterName(String filterName) { + this.filterName = filterName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public void setDataType(String dataType) { + this.dataType = dataType; + } + + public String getMultiSelect() { + return multiSelect; + } + + public void setMultiSelect(String multiSelect) { + this.multiSelect = multiSelect; + } + + public String getWatermark() { + return watermark; + } + + public void setWatermark(String watermark) { + this.watermark = watermark; + } + + public String getOptionsType() { + return optionsType; + } + + public void setOptionsType(String optionsType) { + this.optionsType = optionsType; + } + + public List<UiFilterOptionsValuesConfig> getOptionsValues() { + return optionsValues; + } + + public void setOptionsValues(List<UiFilterOptionsValuesConfig> optionsValues) { + this.optionsValues = optionsValues; + } + + public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) { + this.defaultValue = defaultValue; + } + + public void setFilterValueList(List<UiFilterValueEntity> values) { + this.filterValueList = values; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java new file mode 100644 index 0000000..73b105b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFilterValueEntity.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +/** + * This class represents a single item or value to populate the FE filter component with. + * A drop-down list, for example, may be populated with the values from several instances of this class. + */ +@JsonInclude(Include.NON_NULL) +public class UiFilterValueEntity { + private String filterId; + private String filterValue; + private String displayName; // The string that will be rendered in the view + + public UiFilterValueEntity() {} + + public UiFilterValueEntity(String filterId, String filterValue, String displayName) { + this.filterId = filterId; + this.filterValue = filterValue; + this.displayName = displayName; + } + + public String getFilterId() { + return filterId; + } + + public String getFilterValue() { + return filterValue; + } + + public String getDisplayName() { + return displayName; + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + public void setFilterValue(String filterValue) { + this.filterValue = filterValue; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + @Override + public String toString() { + return "UiFilterValueEntity [" + (filterId != null ? "filterId=" + filterId + ", " : "") + + (filterValue != null ? "filterValue=" + filterValue + ", " : "") + + (displayName != null ? "displayName=" + displayName : "") + "]"; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java new file mode 100644 index 0000000..7780418 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/filters/entity/UiFiltersEntity.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.List; + +/** + * Represents a list of filters that a given UI view should display. + * <p> + * When a UI view wants to know which filters it should display, an object of this class is created to keep + * track of all the filters that are discovered for that view and is then serialized to JSON and returned to + * the view in the response body. + */ +public class UiFiltersEntity { + private List<UiFilterEntity> filters = new ArrayList<>(); + + + + public void addFilter(UiFilterEntity viewFilter) { + filters.add(viewFilter); + } + + public List<UiFilterEntity> getFilters() { + return filters; + } + + public void setFilters(List<UiFilterEntity> filters) { + this.filters = filters; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java new file mode 100644 index 0000000..4e785ce --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/search/registry/SearchProviderRegistry.java @@ -0,0 +1,76 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.search.registry; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.search.api.SearchProvider; + +/** + * Make this a java-scoped singleton to resolve the contextual issue spanning a Spring Context and + * accessing the SPR in other parts of the code that are not directly instantiated by a Spring Bean. + * Eventually the SPR doesn’t have to be a real singleton, it could simply be a Spring bean scoped + * as a singleton and then wired in via dependency injection to the classes that need it. But I’m + * not there yet. This will get a demonstrable extension mechanism in place quickly at practically + * no cost, beyond what’s already in the email plus some testing. + */ + +public class SearchProviderRegistry { + + private List<SearchProvider> searchProviders; + + public SearchProviderRegistry() { + searchProviders = new ArrayList<SearchProvider>(); + } + + public List<SearchProvider> getSearchProviders() { + return searchProviders; + } + + public final void addSearchProvider(SearchProvider searchProvider) { + + if (searchProvider == null) { + return; + } + + if (!searchProviders.contains(searchProvider)) { + searchProviders.add(searchProvider); + } + } + + public final void addSearchProviders(List<SearchProvider> searchProviders) { + + if (searchProviders == null) { + return; + } + + for (SearchProvider searchProvider : searchProviders) { + addSearchProvider(searchProvider); + } + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java new file mode 100644 index 0000000..9dfb1bf --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/BaseCookieDecryptor.java @@ -0,0 +1,51 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.openecomp.portalsdk.core.onboarding.util.CipherUtil; + +public class BaseCookieDecryptor implements CookieDecryptor { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(BaseCookieDecryptor.class); + + + public BaseCookieDecryptor(){} + + public String decryptCookie(String encryptedCookie){ + + String decryptedCookie = ""; + try { + decryptedCookie = CipherUtil.decrypt(encryptedCookie, ""); + } catch (Exception e) { + LOG.error(AaiUiMsgs.LOGIN_FILTER_INFO, "decrypting base cookie failed " + e.getLocalizedMessage()); + } + return decryptedCookie; + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java new file mode 100644 index 0000000..6e79b60 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/CookieDecryptor.java @@ -0,0 +1,31 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security; + +public interface CookieDecryptor { + + String decryptCookie(String encryptedCookie); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java new file mode 100644 index 0000000..3348b1f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/EcompSso.java @@ -0,0 +1,155 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; +import org.openecomp.portalsdk.core.onboarding.util.CipherUtil; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; + +/** + * Provides authentication services for onboarded ECOMP applications. + */ +public class EcompSso { + + public static final String EP_SERVICE = "EPService"; + public static final String CSP_COOKIE_NAME = "csp_cookie_name"; + public static final String CSP_GATE_KEEPER_PROD_KEY = "csp_gate_keeper_prod_key"; + public static final String ONAP_ENABLED = "ONAP_ENABLED"; + private static final Logger LOG = LoggerFactory.getInstance().getLogger(EcompSso.class); + + /** + * Searches the request for a cookie with the specified name. + * + * @param request + * @param cookieName + * @return Cookie, or null if not found. + */ + public static Cookie getCookie(HttpServletRequest request, String cookieName) { + Cookie[] cookies = request.getCookies(); + if (cookies != null) + for (Cookie cookie : cookies) { + if (cookie.getName().equals(cookieName)) { + return cookie; + } + } + + return null; + } + + /** + * Answers whether the ECOMP Portal service cookie is present in the specified request. + * + * @param request + * @return true if the cookie is found, else false. + */ + private static boolean isEPServiceCookiePresent(HttpServletRequest request) { + Cookie ep = getCookie(request, EP_SERVICE); + return (ep != null); + } + + /** + * Validates whether the ECOMP Portal sign-on process has completed, which relies the AT&T Global + * Log On single-sign on process. Checks for the ECOMP cookie (see {@link #EP_SERVICE}). If found, + * then searches for a CSP cookie; if not found, for a WebJunction header. + * + * @param request + * @return ATT UID if the ECOMP cookie is present and the sign-on process established an ATT UID; + * else null. + */ + public static String validateEcompSso(HttpServletRequest request) { + boolean isOnapEnabled = PortalAuthenticationConfig.getInstance().getIsOnapEnabled(); + if (isOnapEnabled) { + if (isEPServiceCookiePresent(request)) { + /* + * This is a "temporary" fix until proper separation between closed source and open source + * code is reached + */ + return ONAP_ENABLED; + } + return null; + } else { + return getLoginIdFromCookie(request); + } + } + + /** + * Searches the specified request for the CSP cookie, decodes it and gets the ATT UID. + * + * @param request + * @return ATTUID if the cookie is present in the request and can be decoded successfully (expired + * cookies do not decode); else null. + */ + private static String getLoginIdFromCookie(HttpServletRequest request) { + String uid = null; + try { + String[] cspFields = getCspData(request); + if (cspFields != null && cspFields.length > 5) + uid = cspFields[5]; + } catch (Throwable t) { + LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, + "getLoginIdFromCookie failed " + t.getLocalizedMessage()); + } + return uid; + } + + /** + * Searches the specified request for the CSP cookie, decodes it and parses it to a String array. + * + * @param request + * @return Array of String as parsed from the cookie; null if the cookie is not present; empty + * array if the cookie could not be decoded. + */ + private static String[] getCspData(HttpServletRequest request) { + final String cookieName = PortalApiProperties.getProperty(CSP_COOKIE_NAME); + if (cookieName == null) { + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, + "getCspData: Failed to get property " + CSP_COOKIE_NAME); + return null; + } + Cookie csp = getCookie(request, cookieName); + if (csp == null) { + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "getCspData failed to get cookie " + cookieName); + return null; + } + final String cspCookieEncrypted = csp.getValue(); + + String cspCookieDecrypted = null; + try { + cspCookieDecrypted = PortalAuthenticationConfig.getInstance().getCookieDecryptor().decryptCookie(cspCookieEncrypted); + return cspCookieDecrypted.split("\\|"); + + } catch (ClassNotFoundException e) { + LOG.error(AaiUiMsgs.DECRYPTION_ERROR,"Unable to find the Cookie Decryptor Class"); + } + + return null; + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java new file mode 100644 index 0000000..f251c31 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactory.java @@ -0,0 +1,78 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; + +import javax.net.ssl.SSLContext; + +/** + * A factory for creating SecurityContext objects. + */ +public interface SecurityContextFactory { + + public String getSslAlgorithm(); + + public void setSslAlgorithm(String sslAlgorithm); + + public String getKeyManagerAlgortihm(); + + public void setKeyManagerAlgortihm(String keyManagerAlgortihm); + + public String getKeyStoreType(); + + public void setKeyStoreType(String keyStoreType); + + public boolean isServerCertificationChainValidationEnabled(); + + public void setServerCertificationChainValidationEnabled( + boolean serverCertificationChainValidationEnabled); + + public String getTrustStoreFileName(); + + public void setTrustStoreFileName(String filename); + + public String getClientCertPassword(); + + public void setClientCertPassword(String password); + + public void setClientCertFileInputStream(FileInputStream fis); + + public void setClientCertFileName(String filename) throws IOException; + + public FileInputStream getClientCertFileInputStream(); + + public SSLContext getSecureContext() + throws KeyManagementException, NoSuchAlgorithmException, FileNotFoundException, + KeyStoreException, CertificateException, IOException, UnrecoverableKeyException; + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java new file mode 100644 index 0000000..c09fa97 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/SecurityContextFactoryImpl.java @@ -0,0 +1,205 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; + +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +/** + * The Class SecurityContextFactoryImpl. + */ +public class SecurityContextFactoryImpl implements SecurityContextFactory { + + protected String sslAlgorithm; + protected String keyManagerAlgortihm; + protected String keyStoreType; + protected boolean serverCertificationChainValidationEnabled; + protected String trustStoreFileName; + protected String clientCertPassword; + protected FileInputStream clientCertFileInputStream; + protected String clientCertFileName; + protected byte[] clientCertBytes; + + /** + * Instantiates a new security context factory impl. + */ + public SecurityContextFactoryImpl() { + this.sslAlgorithm = "TLS"; + this.keyManagerAlgortihm = "SunX509"; + this.keyStoreType = "PKCS12"; + this.serverCertificationChainValidationEnabled = false; + this.clientCertFileInputStream = null; + this.clientCertFileName = null; + } + + @Override + public String getSslAlgorithm() { + return sslAlgorithm; + } + + @Override + public void setSslAlgorithm(String sslAlgorithm) { + this.sslAlgorithm = sslAlgorithm; + } + + @Override + public String getKeyManagerAlgortihm() { + return keyManagerAlgortihm; + } + + @Override + public void setKeyManagerAlgortihm(String keyManagerAlgortihm) { + this.keyManagerAlgortihm = keyManagerAlgortihm; + } + + @Override + public String getKeyStoreType() { + return keyStoreType; + } + + @Override + public void setKeyStoreType(String keyStoreType) { + this.keyStoreType = keyStoreType; + } + + @Override + public boolean isServerCertificationChainValidationEnabled() { + return serverCertificationChainValidationEnabled; + } + + @Override + public void setServerCertificationChainValidationEnabled( + boolean serverCertificationChainValidationEnabled) { + this.serverCertificationChainValidationEnabled = serverCertificationChainValidationEnabled; + } + + @Override + public void setClientCertFileName(String filename) throws IOException { + this.clientCertFileName = filename; + + if (filename == null) { + this.clientCertBytes = null; + } else { + this.clientCertBytes = Files.readAllBytes(new File(filename).toPath()); + } + } + + @Override + public void setClientCertFileInputStream(FileInputStream fis) { + this.clientCertFileInputStream = fis; + } + + @Override + public FileInputStream getClientCertFileInputStream() { + return this.clientCertFileInputStream; + } + + @Override + public SSLContext getSecureContext() throws KeyManagementException, NoSuchAlgorithmException, + KeyStoreException, CertificateException, IOException, UnrecoverableKeyException { + + TrustManager[] trustAllCerts = null; + + if (serverCertificationChainValidationEnabled) { + + System.setProperty("javax.net.ssl.trustStore", trustStoreFileName); + + } else { + + // Create a trust manager that does not validate certificate chains + trustAllCerts = new TrustManager[] {new X509TrustManager() { + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + + @Override + public void checkClientTrusted(X509Certificate[] certs, String authType) {} + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType) {} + } }; + } + + KeyManagerFactory kmf = KeyManagerFactory.getInstance(keyManagerAlgortihm); + + KeyStore ks = KeyStore.getInstance(keyStoreType); + + char[] pwd = null; + if (clientCertPassword != null) { + pwd = clientCertPassword.toCharArray(); + } + + if (clientCertBytes != null) { + ks.load(new ByteArrayInputStream(clientCertBytes), pwd); + } else { + ks.load(null, pwd); + } + + kmf.init(ks, pwd); + + SSLContext ctx = SSLContext.getInstance(sslAlgorithm); + ctx.init(kmf.getKeyManagers(), trustAllCerts, null); + + return ctx; + + } + + @Override + public String getTrustStoreFileName() { + return this.trustStoreFileName; + } + + @Override + public void setTrustStoreFileName(String filename) { + this.trustStoreFileName = filename; + } + + @Override + public String getClientCertPassword() { + return this.clientCertPassword; + } + + @Override + public void setClientCertPassword(String password) { + this.clientCertPassword = password; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java new file mode 100644 index 0000000..795739b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/CspCookieFilter.java @@ -0,0 +1,267 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.filter; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +// import esGateKeeper.esGateKeeper; + +/** + * Redirects to the AT&T global login page if the user is not authenticated.<br> + * Filter properties need to be configured in: csp-cookie-filter.properties + */ +public class CspCookieFilter implements Filter { + + /** Redirect URL for the login page. */ + private String globalLoginUrl; + + /** Application identifier. */ + private String applicationId; + + /** Gatekeeper environment setting (development or production). */ + private String gateKeeperEnvironment; + + private static final String FILTER_PARAMETER_CONFIG = "config"; + private static final String PROPERTY_GLOBAL_LOGIN_URL = "global.login.url"; + private static final String PROPERTY_APPLICATION_ID = "application.id"; + private static final String PROPERTY_GATEKEEPER_ENVIRONMENT = "gatekeeper.environment"; + // valid open redirect domains + private List<String> redirectDomains = new ArrayList<>(); + private static final String PROPERTY_REDIRECT_DOMAINS = "redirect-domain"; + + /** Needed by esGateKeeper, does not accept any other value. */ + private static final String GATEKEEPER_ACCOUNT_NAME = "CSP"; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(CspCookieFilter.class); + + + /* (non-Javadoc) + * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) + */ + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CspCookieFilter", "", "Init", ""); + + try { + setConfigurationProperties(filterConfig); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_CSP_CONFIG_FILE); + throw new ServletException(exc); + } + } + + + /* (non-Javadoc) + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws IOException, ServletException { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + + Cookie[] cookies = request.getCookies(); + if ((cookies == null) || (cookies.length == 0)) { + doLogin(request, response); + return; + } + + /* + * String attEsSec = getSecurityCookie(cookies); + * + * if (attESSec == null || attESSec.length() == 0) { doLogin(request, response); return; } + * + * String attESSecUnEncrypted = esGateKeeper.esGateKeeper(attESSec, GATEKEEPER_ACCOUNT_NAME, + * gateKeeperEnvironment); if (attESSecUnEncrypted == null) { doLogin(request, response); } else + * { + */ + // LOG.info("User has valid cookie"); + chain.doFilter(request, response); + // } + } + + + /* (non-Javadoc) + * @see javax.servlet.Filter#destroy() + */ + @Override + public void destroy() {} + + /** + * Sets all required properties needed by this filter. + * + * @param filterConfig the filter configuration defined in the application web.xml + * @throws IOException if the properties failed to load. + */ + private void setConfigurationProperties(FilterConfig filterConfig) throws IOException { + InputStream inputStream = new FileInputStream(SparkyConstants.CONFIG_HOME + + filterConfig.getInitParameter(FILTER_PARAMETER_CONFIG)); + Properties cspProperties = new Properties(); + cspProperties.load(inputStream); + globalLoginUrl = cspProperties.getProperty(PROPERTY_GLOBAL_LOGIN_URL); + applicationId = cspProperties.getProperty(PROPERTY_APPLICATION_ID); + gateKeeperEnvironment = cspProperties.getProperty(PROPERTY_GATEKEEPER_ENVIRONMENT); + redirectDomains = Arrays.asList(cspProperties.getProperty(PROPERTY_REDIRECT_DOMAINS).split(",")); + } + + /** + * Returns the attESSec cookie if found in the client. + * + * @param cookies the cookies available in the client + * @return the attESSec authentication cookie generated by the login page. + */ + private String getSecurityCookie(Cookie[] cookies) { + String attEsSec = null; + for (int i = 0; i < cookies.length; i++) { + Cookie thisCookie = cookies[i]; + String cookieName = thisCookie.getName(); + + if ("attESSec".equals(cookieName)) { + attEsSec = thisCookie.getValue(); + break; + } + } + return attEsSec; + } + + /** + * Redirects to the AT&T global login page. If this is an AJAX request it returns an unauthorized + * HTTP error in the response. + * + * @param request the filter request object + * @param response the filter response object + * @throws IOException if there is an error setting the error response + */ + private void doLogin(HttpServletRequest request, HttpServletResponse response) + throws IOException { + if (isAjaxRequest(request)) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + "User is not authorized. Please login to application"); + } else { + // Fix for Safari 7.0.2 onwards to avoid login page cache + response.addHeader("Cache-Control", "no-cache, no-store"); + String redirectURL = createRedirectUrl(request); + if (this.isValidRedirectURL(redirectURL)){ + response.sendRedirect(redirectURL); + LOG.debug(AaiUiMsgs.VALID_REDIRECT_URL, redirectURL); + } else{ + response.sendError(400, "Bad redirect URL"); + LOG.error(AaiUiMsgs.INVALID_REDIRECT_URL, redirectURL); + } + } + } + + /** + * Checks if a redirect url is valid + * @param url URL to validate + * @return true if URL is a valid redirect URL, false otherwise + */ + private boolean isValidRedirectURL (String url){ + String redirectTo = url.substring(url.indexOf("?retURL=")+ "?retURL=".length()); + try { + redirectTo = URLDecoder.decode(redirectTo, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + LOG.error(AaiUiMsgs.UNSUPPORTED_URL_ENCODING, e.getLocalizedMessage()); + return false; + } + for (String domain: this.redirectDomains){ + if (redirectTo.endsWith(domain)) + return true; + } + return false; + } + + + /** + * Returns <code>true</code> if the request is an AJAX request. + * + * @param request the filter request object + * @return <code>true</code> if the request is an AJAX request. + */ + private boolean isAjaxRequest(HttpServletRequest request) { + String headerValue = request.getHeader("X-Requested-With"); + if ("XMLHttpRequest".equals(headerValue)) { + return true; + } + return false; + } + + /** + * Returns the redirection URL to the AT&T Global login page. + * + * @param request the request + * @return the string + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String createRedirectUrl(HttpServletRequest request) throws UnsupportedEncodingException { + String returnUrl = getReturnUrl(request); + + return globalLoginUrl + "?retURL=" + returnUrl + "&sysName=" + applicationId; + } + + /** + * Gets the URL encoded return URL. + * + * @param request the HTTP request + * @return an encoded URL to return to following login + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String getReturnUrl(HttpServletRequest request) throws UnsupportedEncodingException { + StringBuffer retUrl = request.getRequestURL(); + String urlParams = request.getQueryString(); + if (urlParams != null) { + retUrl.append("?" + urlParams); + } + return URLEncoder.encode(retUrl.toString(), StandardCharsets.UTF_8.toString()); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java new file mode 100644 index 0000000..dd90573 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/filter/LoginFilter.java @@ -0,0 +1,236 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.filter; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; +import javax.ws.rs.core.HttpHeaders; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.security.EcompSso; +import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; +import org.openecomp.portalsdk.core.onboarding.listener.PortalTimeoutHandler; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiConstants; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; +import org.openecomp.portalsdk.core.onboarding.util.SSOUtil; + +/** + * This filter checks every request for proper ECOMP Portal single sign on initialization. The + * possible paths and actions: + * <OL> + * <LI>User starts at an app page via a bookmark. No ECOMP portal cookie is set. Redirect there to + * get one; then continue as below. + * <LI>User starts at ECOMP Portal and goes to app. Alternately, the user's session times out and + * the user hits refresh. The ECOMP Portal cookie is set, but there is no valid session. Create one + * and publish info. + * <LI>User has valid ECOMP Portal cookie and session. Reset the max idle in that session. + * </OL> + * <P> + * Notes: + * <UL> + * <LI>Portal Session should be up prior to App Session</LI> + * <LI>If App Session Expires or if EPService cookie is unavailable, we need to redirect to Portal. + * <LI>Method {@link #initiateSessionMgtHandler(HttpServletRequest)} should be called for Session + * management when the initial session is created + * <LI>While redirecting, the cookie "redirectUrl" should also be set so that Portal knows where to + * forward the request to once the Portal Session is created and EPService cookie is set. + * <LI>Method {@link #resetSessionMaxIdleTimeOut(HttpServletRequest)} should be called for every + * request to reset the MaxInactiveInterval to the right value. + * </UL> + * <P> + * This filter incorporates most features of the SDK application's SessionTimeoutInterceptor and + * SingleSignOnController classes + */ +public class LoginFilter implements Filter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(LoginFilter.class); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + // Validate that app has provided useful portal properties + if (PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL) == null) { + throw new ServletException("Failed to find URL in portal.properties"); + } + + PortalAuthenticationConfig appProperties; + try { + appProperties = PortalAuthenticationConfig.getInstance(); + } catch (Exception ex) { + throw new ServletException("Failed to get properties", ex); + } + + String restUser = appProperties.getUsername(); + String restPassword = appProperties.getPassword(); + if (restUser == null || restPassword == null) { + throw new ServletException("Failed to find user and/or password from properties"); + } + } + + @Override + public void destroy() { + // No resources to release + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, + * javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws ServletException, IOException { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + + // Choose authentication appropriate for the request. + final String restApiURI = request.getContextPath() + PortalApiConstants.API_PREFIX; + if (request.getRequestURI().startsWith(restApiURI)) { + // REST servlet checks credentials + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: delegating auth to REST servlet for request " + request.getRequestURI()); + chain.doFilter(request, response); + } else { + // All other requests require ECOMP Portal authentication + if (EcompSso.validateEcompSso(request) == null) { + String redirectURL, logMessage; + if (request.getRequestURI().contains("/editAttributes")) { + // If request is for Edit Attributes UI, redirect straight to the application. + String appPath = request.getRequestURI().substring(request.getContextPath().length() + 1) + + (request.getQueryString() != null ? ("?" + request.getQueryString()) : ""); + redirectURL = SSOUtil.getECOMPSSORedirectURL(request, response, appPath); + logMessage = "Unauthenticated Edit Attributes UI login attempt."; + } else { + // Redirect to Portal UI + redirectURL = PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL); + logMessage = "Unauthorized login attempt."; + } + + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, + logMessage + + " | Remote IP: " + request.getRemoteAddr() + + " | User agent: " + request.getHeader(HttpHeaders.USER_AGENT) + + " | Request URL: " + request.getRequestURL() + + " | Redirecting to: " + redirectURL); + + response.sendRedirect(redirectURL); + } else { + HttpSession session = request.getSession(false); + if (session == null) { + // New session + session = request.getSession(true); + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: created new session " + session.getId()); + initiateSessionMgtHandler(request); + } else { + // Existing session + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: resetting idle in existing session " + session.getId()); + resetSessionMaxIdleTimeOut(request); + } + // Pass request back down the filter chain + chain.doFilter(request, response); + } + } + } + + /** + * Publishes information about the session. + * + * @param request + */ + private void initiateSessionMgtHandler(HttpServletRequest request) { + String portalJSessionId = getPortalJSessionId(request); + String jSessionId = getJessionId(request); + storeMaxInactiveTime(request); + PortalTimeoutHandler.sessionCreated(portalJSessionId, jSessionId, request.getSession(false)); + } + + /** + * Gets the ECOMP Portal service cookie value. + * + * @param request + * @return Cookie value, or null if not found. + */ + private String getPortalJSessionId(HttpServletRequest request) { + Cookie ep = EcompSso.getCookie(request, EcompSso.EP_SERVICE); + return ep == null ? null : ep.getValue(); + } + + /** + * Gets the container session ID. + * + * @param request + * @return Session ID, or null if no session. + */ + private String getJessionId(HttpServletRequest request) { + HttpSession session = request.getSession(); + return session == null ? null : session.getId(); + } + + /** + * Sets the global session's max idle time to the session's max inactive interval. + * + * @param request + */ + private void storeMaxInactiveTime(HttpServletRequest request) { + HttpSession session = request.getSession(false); + if (session != null + && session.getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME) == null) { + session.setAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME, + session.getMaxInactiveInterval()); + } + } + + /** + * Sets the session's max inactive interval. + * + * @param request + */ + private void resetSessionMaxIdleTimeOut(HttpServletRequest request) { + try { + HttpSession session = request.getSession(false); + if (session != null) { + final Object maxIdleAttribute = session + .getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME); + if (maxIdleAttribute != null) { + session.setMaxInactiveInterval(Integer.parseInt(maxIdleAttribute.toString())); + } + } + } catch (Exception e) { + LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, "resetSessionMaxIdleTimeOut: failed to set session max inactive interval - " + e.getLocalizedMessage()); + } + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java new file mode 100644 index 0000000..ad64c63 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/PortalRestAPIServiceImpl.java @@ -0,0 +1,213 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.portal; + +import java.io.File; +import java.io.IOException; +import java.text.MessageFormat; +import java.util.LinkedHashSet; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; + +import org.onap.aai.sparky.security.EcompSso; +import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.openecomp.portalsdk.core.onboarding.crossapi.IPortalRestAPIService; +import org.openecomp.portalsdk.core.onboarding.exception.PortalAPIException; +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Responds to ECOMP Portal's REST queries for user and role information and management. + */ +public class PortalRestAPIServiceImpl implements IPortalRestAPIService { + + private static final Logger LOG = LoggerFactory.getLogger(PortalRestAPIServiceImpl.class); + private static final String ERROR_MESSAGE = "Failed to {0} user [loginId:{1}]"; + + private UserManager userManager; + + /** + * Initialise user manager. + */ + public PortalRestAPIServiceImpl() { + userManager = new UserManager(new File(SparkyConstants.USERS_FILE_LOCATION)); + } + + ///////////////////////////////////////////////////////////////////////////// + // User interface + ///////////////////////////////////////////////////////////////////////////// + + /* + * (non-Javadoc) + * + */ + @Override + public void pushUser(EcompUser user) throws PortalAPIException { + LOG.debug("Push user [loginId:" + user.getLoginId() + "]"); + + if (userManager.getUser(user.getLoginId()).isPresent()) { + String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId()) + + ", user is already stored"; + LOG.error(message); + throw new PortalAPIException(message); + } + + try { + userManager.pushUser(user); + } catch (IOException e) { + String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId()); + LOG.error(message, e); + throw new PortalAPIException(message, e); + } + } + + /* + * (non-Javadoc) + * + */ + @Override + public void editUser(String loginId, EcompUser user) throws PortalAPIException { + LOG.debug("Edit user [loginId:" + loginId + "]"); + + userManager.getUser(loginId).orElseThrow(() -> { + String message = getMessage(ERROR_MESSAGE, "edit", loginId) + ", unknown user"; + LOG.error(message); + return new PortalAPIException(message); + }); + + try { + userManager.editUser(loginId, user); + } catch (IOException e) { + String message = getMessage(ERROR_MESSAGE, "edit", loginId); + LOG.error(message, e); + throw new PortalAPIException(message, e); + } + } + + /* + * (non-Javadoc) + * + */ + @Override + public EcompUser getUser(String loginId) throws PortalAPIException { + LOG.debug("Get user [loginId:" + loginId + "]"); + return userManager.getUser(loginId).orElseThrow(() -> { + String message = getMessage(ERROR_MESSAGE, "get", loginId) + ", unknown user"; + LOG.error(message); + return new PortalAPIException(message); + }); + } + + /* + * (non-Javadoc) + * + */ + @Override + public List<EcompUser> getUsers() throws PortalAPIException { + LOG.debug("Get users"); + return userManager.getUsers(); + } + + @Override + public String getUserId(HttpServletRequest request) throws PortalAPIException { + return EcompSso.validateEcompSso(request); + } + + ///////////////////////////////////////////////////////////////////////////// + // Role interface + ///////////////////////////////////////////////////////////////////////////// + + public List<EcompRole> getAvailableRoles() throws PortalAPIException { + LOG.debug("Get available roles"); + return UserManager.getRoles(); + } + + /* + * (non-Javadoc) + * + * @see + */ + @Override + public List<EcompRole> getUserRoles(String loginId) throws PortalAPIException { + LOG.debug("Get user roles"); + return userManager.getUserRoles(loginId); + } + + /* + * (non-Javadoc) + * + */ + @Override + public void pushUserRole(String loginId, List<EcompRole> roles) throws PortalAPIException { + LOG.debug("Push user role [loginId:" + loginId + "]"); + try { + EcompUser user = getUser(loginId); + if (roles != null) { + user.setRoles(new LinkedHashSet<EcompRole>(roles)); + } else { + user.setRoles(new LinkedHashSet<EcompRole>()); + } + editUser(loginId, user); + } catch (PortalAPIException e) { + String message = getMessage(ERROR_MESSAGE, "push role", loginId); + LOG.error(message); + throw new PortalAPIException(message, e); + } + } + + ///////////////////////////////////////////////////////////////////////////// + // Security interface + ///////////////////////////////////////////////////////////////////////////// + + /* + * (non-Javadoc) + * + */ + @Override + public boolean isAppAuthenticated(HttpServletRequest request) throws PortalAPIException { + LOG.debug("Authentication request"); + PortalAuthenticationConfig config = PortalAuthenticationConfig.getInstance(); + String restUsername = request.getHeader(PortalAuthenticationConfig.PROP_USERNAME); + String restPassword = request.getHeader(PortalAuthenticationConfig.PROP_PASSWORD); + return restUsername != null && restPassword != null && restUsername.equals(config.getUsername()) + && restPassword.equals(config.getPassword()); + } + + private String getMessage(String message, Object... args) { + MessageFormat formatter = new MessageFormat(""); + formatter.applyPattern(message); + return formatter.format(args); + } + + public List<EcompRole> getAvailableRoles(String requestedLoginId) throws PortalAPIException { + LOG.debug("Get available roles"); + return UserManager.getRoles(); + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java new file mode 100644 index 0000000..ea9b204 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/UserManager.java @@ -0,0 +1,170 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.portal; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Type; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +import org.onap.aai.sparky.security.portal.config.RolesConfig; +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; + +/** + * Basic file based user storage. + */ +public class UserManager { + + private File usersFile; + + private static final ReadWriteLock LOCK = new ReentrantReadWriteLock(true); + private static final Lock READ_LOCK = LOCK.readLock(); + private static final Lock WRITE_LOCK = LOCK.writeLock(); + + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); + + /** + * + * @param usersFile a file to store the users + */ + public UserManager(File usersFile) { + this.usersFile = usersFile; + } + + /** + * Returns all users stored. + * + * @return a list of users. + */ + public List<EcompUser> getUsers() { + Type collectionType = new TypeToken<List<EcompUser>>() { + }.getType(); + + Optional<String> users = read(usersFile); + if (users.isPresent()) { + return GSON.fromJson(users.get(), collectionType); + } + + return new ArrayList<>(); + } + + /** + * Returns a stored user. + * + * @param loginId the identifier of the user + * @return an optional user. + */ + public Optional<EcompUser> getUser(String loginId) { + if (!getUsers().isEmpty()) { + return getUsers().stream().filter(u -> loginId.equals(u.getLoginId())).findFirst(); + } + return Optional.empty(); + } + + /** + * Stores a user if not already stored. + * + * @param user the user to be stored + * @throws IOException + */ + public void pushUser(EcompUser user) throws IOException { + WRITE_LOCK.lock(); + try { + if (!getUser(user.getLoginId()).isPresent()) { + addUser(getUsers(), user); + } + } finally { + WRITE_LOCK.unlock(); + } + } + + /** + * Replaces an existing user. + * + * @param loginId the id of the user + * @param user the new user details + * @throws IOException + */ + public void editUser(String loginId, EcompUser user) throws IOException { + WRITE_LOCK.lock(); + try { + if (getUser(loginId).isPresent()) { + List<EcompUser> users = getUsers().stream().filter(u -> !u.getLoginId().equals(loginId)) + .collect(Collectors.toList()); + addUser(users, user); + } + } finally { + WRITE_LOCK.unlock(); + } + } + + /** + * Gets the roles assigned to a user. + * + * @param loginId the id of the user + * @return the assigned roles + */ + public List<EcompRole> getUserRoles(String loginId) { + List<EcompRole> roles = new ArrayList<>(); + roles.addAll(getUser(loginId).orElseGet(EcompUser::new).getRoles()); + return roles; + } + + public static List<EcompRole> getRoles() { + return RolesConfig.getInstance().getRoles(); + } + + private void addUser(List<EcompUser> users, EcompUser user) throws IOException { + users.add(user); + write(users); + } + + private void write(List<EcompUser> users) throws IOException { + Files.write(usersFile.toPath(), GSON.toJson(users).getBytes()); + } + + private Optional<String> read(File file) { + READ_LOCK.lock(); + try { + return Optional.of(new String(Files.readAllBytes(file.toPath()))); + } catch (IOException e) { // NOSONAR + return Optional.empty(); + } finally { + READ_LOCK.unlock(); + } + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java new file mode 100644 index 0000000..e1b7eda --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/PortalAuthenticationConfig.java @@ -0,0 +1,124 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.portal.config; + + +import java.util.Properties; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.security.CookieDecryptor; +import org.onap.aai.sparky.util.ConfigHelper; +import org.onap.aai.sparky.util.Encryptor; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + + +/** + * Provides Portal authentication configuration. + */ +public class PortalAuthenticationConfig { + + private String username; + private String password; + private boolean isOnapEnabled; + private CookieDecryptor cookieDecryptor; + private String cookieDecryptorClassName; + + public static final String PROP_USERNAME = "username"; + public static final String PROP_PASSWORD = "password"; // NOSONAR + public static final String PROP_IS_ONAP_ENABLED = "onap_enabled"; // NOSONAR + private static final String AUTHENTICATION_CONFIG_FILE = SparkyConstants.PORTAL_AUTHENTICATION_FILE_LOCATION; + public static final String PROP_COOKIEDECRYPTORCLASSNAME = "cookie_decryptor_classname"; + private static final Logger LOG = LoggerFactory.getInstance().getLogger(PortalAuthenticationConfig.class); + + private PortalAuthenticationConfig() { + // Prevent instantiation + } + + private static class PortalAuthenticationConfigHelper { + private static final PortalAuthenticationConfig INSTANCE = new PortalAuthenticationConfig(); + + private PortalAuthenticationConfigHelper() { + // Deliberately empty + } + } + + /** + * Get a singleton instance of the configuration. + * + * @return + */ + public static PortalAuthenticationConfig getInstance() { + PortalAuthenticationConfigHelper.INSTANCE.load(); + return PortalAuthenticationConfigHelper.INSTANCE; + } + + public String getUsername() { + return username; + } + + public String getPassword() { + Encryptor encryptor = new Encryptor(); + return encryptor.decryptValue(password); + } + + public boolean getIsOnapEnabled() { + return isOnapEnabled; + } + public String getcookieDecryptorClassName() { + return cookieDecryptorClassName; + } + + /** + * Reload the Portal authentication properties from the classpath. + */ + public void reload() { + load(); + } + + /** + * Load the Portal authentication properties from the classpath. + */ + private void load() { + Properties props = ConfigHelper.loadConfigFromExplicitPath(AUTHENTICATION_CONFIG_FILE); + username = props.getProperty(PROP_USERNAME); + password = props.getProperty(PROP_PASSWORD); + isOnapEnabled = Boolean.parseBoolean(props.getProperty(PROP_IS_ONAP_ENABLED, "true")); + cookieDecryptorClassName= props.getProperty(PROP_COOKIEDECRYPTORCLASSNAME); + } + + public CookieDecryptor getCookieDecryptor() throws ClassNotFoundException{ + + Class cookieDecrypterClass = Class.forName(cookieDecryptorClassName); + try { + cookieDecryptor = (CookieDecryptor) cookieDecrypterClass.newInstance(); + } catch (InstantiationException | IllegalAccessException e) { + LOG.error(AaiUiMsgs.DECRYPTION_ERROR,"Unable to instantiate Cookie Decryptor Class"); + } + return cookieDecryptor; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java new file mode 100644 index 0000000..283834c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/security/portal/config/RolesConfig.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.security.portal.config; + +import java.io.IOException; +import java.lang.reflect.Type; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; + +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.openecomp.portalsdk.core.restful.domain.EcompRole; + +import com.google.gson.Gson; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; + +/** + * Provides roles configuration. + */ +public class RolesConfig { + + private List<EcompRole> roles; + + private static final Gson GSON = new Gson(); + private static final String ROLES_CONFIG_FILE = SparkyConstants.ROLES_FILE_LOCATION; + + private RolesConfig() { + // Prevent instantiation + } + + private static class RolesConfigHelper { + private static final RolesConfig INSTANCE = new RolesConfig(); + + private RolesConfigHelper() { + // Deliberately empty + } + } + + /** + * Get a singleton instance of the configuration. + * + * @return + */ + public static RolesConfig getInstance() { + try { + RolesConfigHelper.INSTANCE.load(); + } catch (Exception e) { + throw new ExceptionInInitializerError(e); + } + + return RolesConfigHelper.INSTANCE; + } + + public List<EcompRole> getRoles() { + return roles; + } + + private void load() throws JsonSyntaxException, IOException, URISyntaxException { + Type collectionType = new TypeToken<List<EcompRole>>() { + }.getType(); + + roles = Collections.unmodifiableList(GSON + .fromJson(new String(Files.readAllBytes(Paths.get(ROLES_CONFIG_FILE))), collectionType)); + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java new file mode 100644 index 0000000..a8dd131 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/SubscriptionServiceProcessor.java @@ -0,0 +1,74 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.subscription.services.SubscriptionService; + +public class SubscriptionServiceProcessor { + + private static final String EMPTY_RESPONSE = "{}"; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SubscriptionServiceProcessor.class); + + SubscriptionService subService; + + + public SubscriptionServiceProcessor(SubscriptionService subscriptionService) { + this.subService = subscriptionService; + } + + + public void getSubscription(Exchange exchange) { + + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + ServletUtils.setUpMdcContext(exchange, request); + + OperationResult subscriptionResult = null; + + try { + + subscriptionResult = subService.buildSubscriptionPayload(); + + } catch (Exception exc) { + subscriptionResult = new OperationResult(); + subscriptionResult.setResult(EMPTY_RESPONSE); + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, + "Exception thrown during subscription processing: " + exc.getLocalizedMessage()); + } + + exchange.getOut().setBody(subscriptionResult.getResult()); + + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java new file mode 100644 index 0000000..37c6c4d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/config/SubscriptionConfig.java @@ -0,0 +1,139 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.config; + +import java.util.Collection; + + +/** + * The Class SubscriptionConfig. + */ +public class SubscriptionConfig { + + private static final String EMPTY_TARGET = ""; + private static final String EMPTY_ORIGIN = ""; + private static final String EMPTY_MESSAGE_TYPE = ""; + private static final String EMPTY_TOPIC = ""; + + + private String subscriptionTarget = EMPTY_TARGET; + private String subscriptionOrigin = EMPTY_ORIGIN; + private String subscriptionMessageType = EMPTY_MESSAGE_TYPE; + private String subscriptionTopic = EMPTY_TOPIC; + + private String launchOITarget = EMPTY_TARGET; + private String launchOIOrigin = EMPTY_ORIGIN; + private String launchOIMessageType = EMPTY_MESSAGE_TYPE; + private String launchOITopic = EMPTY_TOPIC; + + private Boolean isLaunchOIEnabled = false; + + private Collection<String> annEntitiyTypes; + + /** + * Instantiates a new Subscription config. + */ + public SubscriptionConfig() {} + + public String getSubscriptionTarget() { + return subscriptionTarget; + } + + public void setSubscriptionTarget(String target) { + this.subscriptionTarget = target; + } + + public String getSubscriptionOrigin() { + return subscriptionOrigin; + } + + public void setSubscriptionOrigin(String origin) { + this.subscriptionOrigin = origin; + } + + public String getSubscriptionMessageType() { + return subscriptionMessageType; + } + + public void setSubscriptionMessageType(String messageType) { + this.subscriptionMessageType = messageType; + } + + public String getSubscriptionTopic() { + return subscriptionTopic; + } + + public void setSubscriptionTopic(String topic) { + this.subscriptionTopic = topic; + } + + public String getLaunchOITarget() { + return launchOITarget; + } + + public void setLaunchOITarget(String launchOITarget) { + this.launchOITarget = launchOITarget; + } + + public String getLaunchOIOrigin() { + return launchOIOrigin; + } + + public void setLaunchOIOrigin(String launchOIOrigin) { + this.launchOIOrigin = launchOIOrigin; + } + + public String getLaunchOIMessageType() { + return launchOIMessageType; + } + + public void setLaunchOIMessageType(String launchOIMessageType) { + this.launchOIMessageType = launchOIMessageType; + } + + public String getLaunchOITopic() { + return launchOITopic; + } + + public void setLaunchOITopic(String launchOITopic) { + this.launchOITopic = launchOITopic; + } + + public Collection<String> getAnnEntitiyTypes() { + return annEntitiyTypes; + } + + public void setAnnEntitiyTypes(Collection<String> annEntitiyTypes) { + this.annEntitiyTypes = annEntitiyTypes; + } + + public Boolean getIsLaunchOIEnabled() { + return isLaunchOIEnabled; + } + + public void setIsLaunchOIEnabled(Boolean isLaunchOIEnabled) { + this.isLaunchOIEnabled = isLaunchOIEnabled; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java new file mode 100644 index 0000000..89489f3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Message.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.payload.entity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({ "applicationName", "payload" }) +public class Message { + + @JsonProperty("applicationName") + private String applicationName; + @JsonProperty("payload") + private Payload payload; + + @JsonProperty("applicationName") + public String getApplicationName() { + return applicationName; + } + + @JsonProperty("applicationName") + public void setApplicationName(String applicationName) { + this.applicationName = applicationName; + } + + @JsonProperty("payload") + public Payload getPayload() { + return payload; + } + + @JsonProperty("payload") + public void setPayload(Payload payload) { + this.payload = payload; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java new file mode 100644 index 0000000..eae1df1 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayload.java @@ -0,0 +1,128 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.payload.entity; + +import java.io.File; +import java.io.IOException; + +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({ "target", "origin", "messageType", "topic", "message" }) +public class ObjectInspectorPayload { + + @JsonProperty("target") + private String target; + @JsonProperty("origin") + private String origin; + @JsonProperty("messageType") + private String messageType; + @JsonProperty("topic") + private String topic; + @JsonProperty("message") + private Message message; + + @JsonProperty("target") + public String getTarget() { + return target; + } + + @JsonProperty("target") + public void setTarget(String target) { + this.target = target; + } + + @JsonProperty("origin") + public String getOrigin() { + return origin; + } + + @JsonProperty("origin") + public void setOrigin(String origin) { + this.origin = origin; + } + + @JsonProperty("messageType") + public String getMessageType() { + return messageType; + } + + @JsonProperty("messageType") + public void setMessageType(String messageType) { + this.messageType = messageType; + } + + @JsonProperty("topic") + public String getTopic() { + return topic; + } + + @JsonProperty("topic") + public void setTopic(String topic) { + this.topic = topic; + } + + @JsonProperty("message") + public Message getMessage() { + return message; + } + + @JsonProperty("message") + public void setMessage(Message message) { + this.message = message; + } + + private static ObjectInspectorPayload lic; + public static ObjectInspectorPayload getOIPayload(SubscriptionConfig subscriptionConf) throws JsonParseException, JsonMappingException, IOException{ + if(lic == null){ + ObjectMapper mapper = new ObjectMapper(); + lic = mapper.readValue(new File(SparkyConstants.SUBSCRIPTION_OI_MAPPING), ObjectInspectorPayload.class); + lic.intitializeOIPayload(subscriptionConf); + } + + return lic; + } + + private void intitializeOIPayload(SubscriptionConfig subscriptionConf) { + try { + lic.setOrigin(subscriptionConf.getLaunchOIOrigin()); + lic.setTarget(subscriptionConf.getLaunchOITarget()); + lic.setTopic(subscriptionConf.getLaunchOITopic()); + lic.setMessageType(subscriptionConf.getLaunchOIMessageType()); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java new file mode 100644 index 0000000..e3bb4c7 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Params.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.payload.entity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({ "contexts", "objectName", "externalClassId"}) +public class Params { + + @JsonProperty("objectName") + private String objectName; + @JsonProperty("externalClassId") + private String externalClassId; + + @JsonProperty("objectName") + public String getObjectName() { + return objectName; + } + + @JsonProperty("objectName") + public void setObjectName(String objectName) { + this.objectName = objectName; + } + + @JsonProperty("externalClassId") + public String getExternalClassId() { + return externalClassId; + } + + @JsonProperty("externalClassId") + public void setExternalClassId(String externalClassId) { + this.externalClassId = externalClassId; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java new file mode 100644 index 0000000..71db011 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/payload/entity/Payload.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.payload.entity; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({ "action", "params" }) +public class Payload { + + @JsonProperty("action") + private String action; + @JsonProperty("params") + private Params params; + + @JsonProperty("action") + public String getAction() { + return action; + } + + @JsonProperty("action") + public void setAction(String action) { + this.action = action; + } + + @JsonProperty("params") + public Params getParams() { + return params; + } + + @JsonProperty("params") + public void setParams(Params params) { + this.params = params; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java new file mode 100644 index 0000000..78f1d3f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/subscription/services/SubscriptionService.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.subscription.services; + +import org.json.JSONObject; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; + + +public class SubscriptionService { + + SubscriptionConfig config; + + public SubscriptionService(SubscriptionConfig subscriptionConfig) { + this.config = subscriptionConfig; + } + + public OperationResult buildSubscriptionPayload() throws Exception { + OperationResult returnValue = new OperationResult(); + returnValue.setResultCode(200); + JSONObject subscriptionResponse = new JSONObject(); + JSONObject subscriptionDetails = new JSONObject(); + + + if (config.getSubscriptionTarget().isEmpty() + && config.getSubscriptionTopic().isEmpty() + && config.getSubscriptionMessageType().isEmpty() + && config.getSubscriptionOrigin().isEmpty()) { + subscriptionResponse.put("subscriptionEnabled", false); + } else { + subscriptionResponse.put("subscriptionEnabled", true); + subscriptionDetails.put("target", config.getSubscriptionTarget()); + subscriptionDetails.put("topic", config.getSubscriptionTopic()); + subscriptionDetails.put("messageType", config.getSubscriptionMessageType()); + subscriptionDetails.put("origin", config.getSubscriptionOrigin()); + } + + subscriptionResponse.put("subscriptionDetails", subscriptionDetails); + returnValue.setResult(subscriptionResponse.toString()); + + return returnValue; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java new file mode 100644 index 0000000..a2b750b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizer.java @@ -0,0 +1,524 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.EnumSet; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.aai.ActiveInventoryEntityStatistics; +import org.onap.aai.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics; +import org.onap.aai.sparky.dal.elasticsearch.ElasticSearchEntityStatistics; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.dal.rest.RestOperationalStatistics; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class AbstractEntitySynchronizer. + * + * @author davea. + */ +public abstract class AbstractEntitySynchronizer { + + protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409; + protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3); + + protected final Logger logger; + protected ObjectMapper mapper; + protected long syncDurationInMs; + + /** + * The Enum StatFlag. + */ + protected enum StatFlag { + AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS, + AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS + } + + protected EnumSet<StatFlag> enabledStatFlags; + + protected ElasticSearchAdapter elasticSearchAdapter; + protected ActiveInventoryAdapter aaiAdapter; + + protected ExecutorService synchronizerExecutor; + protected ExecutorService aaiExecutor; + protected ExecutorService esExecutor; + + private RestOperationalStatistics esRestStats; + protected ElasticSearchEntityStatistics esEntityStats; + + private RestOperationalStatistics aaiRestStats; + protected ActiveInventoryEntityStatistics aaiEntityStats; + private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats; + + private TaskProcessingStats aaiTaskProcessingStats; + private TaskProcessingStats esTaskProcessingStats; + + private TransactionRateMonitor aaiTransactionRateController; + private TransactionRateMonitor esTransactionRateController; + + protected AtomicInteger aaiWorkOnHand; + protected AtomicInteger esWorkOnHand; + protected String synchronizerName; + + protected abstract boolean isSyncDone(); + protected boolean shouldSkipSync; + + public String getActiveInventoryStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(aaiRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(aaiEntityStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + sb.append("\n\n ").append("Processing Exception Stats:"); + sb.append(aaiProcessingExceptionStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + public String getElasticSearchStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(esRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(esEntityStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + /** + * Adds the active inventory stat report. + * + * @param sb the sb + */ + private void addActiveInventoryStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n AAI"); + sb.append(getActiveInventoryStatisticsReport()); + + double currentTps = 0; + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = aaiTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + } + + /** + * Adds the elastic stat report. + * + * @param sb the sb + */ + private void addElasticStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n ELASTIC"); + sb.append(getElasticSearchStatisticsReport()); + + double currentTps = 0; + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(esTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = esTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + + } + + /** + * Gets the stat report. + * + * @param syncOpTimeInMs the sync op time in ms + * @param showFinalReport the show final report + * @return the stat report + */ + protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = " + + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); + + addActiveInventoryStatReport(sb); + addElasticStatReport(sb); + + if (showFinalReport) { + sb.append("\n\n ").append("Sync Completed!\n"); + } else { + sb.append("\n\n ").append("Sync in Progress...\n"); + } + + return sb.toString(); + + } + + protected String indexName; + protected long syncStartedTimeStampInMs; + + /** + * Instantiates a new abstract entity synchronizer. + * + * @param logger the logger + * @param syncName the sync name + * @param numSyncWorkers the num sync workers + * @param numActiveInventoryWorkers the num active inventory workers + * @param numElasticsearchWorkers the num elasticsearch workers + * @param indexName the index name + * @throws Exception the exception + */ + protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers, + int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig) + throws Exception { + this.logger = logger; + this.synchronizerExecutor = + NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger); + this.aaiExecutor = + NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger); + this.esExecutor = + NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger); + this.mapper = new ObjectMapper(); + this.indexName = indexName; + this.esRestStats = new RestOperationalStatistics(); + this.esEntityStats = new ElasticSearchEntityStatistics(); + this.aaiRestStats = new RestOperationalStatistics(); + this.aaiEntityStats = new ActiveInventoryEntityStatistics(); + this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics(); + this.aaiTaskProcessingStats = + new TaskProcessingStats(aaiStatConfig); + this.esTaskProcessingStats = + new TaskProcessingStats(esStatConfig); + + this.aaiTransactionRateController = + new TransactionRateMonitor(numActiveInventoryWorkers, aaiStatConfig); + this.esTransactionRateController = + new TransactionRateMonitor(numElasticsearchWorkers, esStatConfig); + + this.aaiWorkOnHand = new AtomicInteger(0); + this.esWorkOnHand = new AtomicInteger(0); + + enabledStatFlags = EnumSet.allOf(StatFlag.class); + + this.synchronizerName = "Abstact Entity Synchronizer"; + + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", ""); + + this.shouldSkipSync = false; + this.syncStartedTimeStampInMs = System.currentTimeMillis(); + this.syncDurationInMs = -1; + } + + public boolean shouldSkipSync() { + return shouldSkipSync; + } + + public void setShouldSkipSync(boolean shouldSkipSync) { + this.shouldSkipSync = shouldSkipSync; + } + + /** + * Inc active inventory work on hand counter. + */ + protected void incActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.incrementAndGet(); + } + + /** + * Dec active inventory work on hand counter. + */ + protected void decActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.decrementAndGet(); + } + + /** + * Inc elastic search work on hand counter. + */ + protected void incElasticSearchWorkOnHandCounter() { + esWorkOnHand.incrementAndGet(); + } + + /** + * Dec elastic search work on hand counter. + */ + protected void decElasticSearchWorkOnHandCounter() { + esWorkOnHand.decrementAndGet(); + } + + /** + * Shutdown executors. + */ + protected void shutdownExecutors() { + try { + + if (synchronizerExecutor != null) { + synchronizerExecutor.shutdown(); + } + + if (aaiExecutor != null) { + aaiExecutor.shutdown(); + } + + if (esExecutor != null) { + esExecutor.shutdown(); + } + + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc ); + } + } + + /** + * Clear cache. + */ + public void clearCache() {} + + public ElasticSearchAdapter getElasticSearchAdapter() { + return elasticSearchAdapter; + } + + public void setElasticSearchAdapter(ElasticSearchAdapter elasticSearchAdapter) { + this.elasticSearchAdapter = elasticSearchAdapter; + } + + public ActiveInventoryAdapter getAaiAdapter() { + return aaiAdapter; + } + + public void setAaiAdapter(ActiveInventoryAdapter aaiAdapter) { + this.aaiAdapter = aaiAdapter; + } + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + + /** + * Gets the response length. + * + * @param txn the txn + * @return the response length + */ + private long getResponseLength(NetworkTransaction txn) { + + if (txn == null) { + return -1; + } + + OperationResult result = txn.getOperationResult(); + + if (result == null) { + return -1; + } + + if (result.getResult() != null) { + return result.getResult().length(); + } + + return -1; + } + + /** + * Update elastic search counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateElasticSearchCounters(HttpMethod method, String entityType, + OperationResult or) { + updateElasticSearchCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update elastic search counters. + * + * @param txn the txn + */ + protected void updateElasticSearchCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + esRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + esEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + + esTransactionRateController.trackResponseTime(txn.getOpTimeInMs()); + + esTaskProcessingStats + .updateTaskResponseStatsHistogram(txn.getOpTimeInMs()); + esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + esTaskProcessingStats + .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps()); + } + } + + /** + * Update active inventory counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateActiveInventoryCounters(HttpMethod method, String entityType, + OperationResult or) { + updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update active inventory counters. + * + * @param txn the txn + */ + protected void updateActiveInventoryCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + aaiRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + aaiEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + aaiProcessingExceptionStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + aaiTransactionRateController + .trackResponseTime(txn.getOpTimeInMs()); + + aaiTaskProcessingStats + .updateTaskResponseStatsHistogram(txn.getOpTimeInMs()); + aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + aaiTaskProcessingStats.updateTransactionsPerSecondHistogram( + (long) aaiTransactionRateController.getCurrentTps()); + } + } + + /** + * Reset counters. + */ + protected void resetCounters() { + aaiRestStats.reset(); + aaiEntityStats.reset(); + aaiProcessingExceptionStats.reset(); + + esRestStats.reset(); + esEntityStats.reset(); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java new file mode 100644 index 0000000..3255e39 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleaner.java @@ -0,0 +1,604 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.entity.ObjectIdCollection; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.sync.enumeration.OperationState; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Class ElasticSearchIndexCleaner. + */ +public class ElasticSearchIndexCleaner implements IndexCleaner { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class); + + private static final String BULK_OP_LINE_TEMPLATE = "%s\n"; + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + private ObjectIdCollection before; + private ObjectIdCollection after; + + private ObjectMapper mapper; + private ElasticSearchAdapter esAdapter; + private ElasticSearchEndpointConfig endpointConfig; + private ElasticSearchSchemaConfig schemaConfig; + + /** + * Instantiates a new elastic search index cleaner. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes + * @param numItemsToGetBulkRequest the num items to get bulk request + */ + public ElasticSearchIndexCleaner(ElasticSearchAdapter esAdapter, ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig) { + this.esAdapter = esAdapter; + this.before = null; + this.after = null; + this.endpointConfig = endpointConfig; + this.schemaConfig = schemaConfig; + this.mapper = new ObjectMapper(); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection() + */ + @Override + public OperationState populatePreOperationCollection() { + + try { + before = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage()); + return OperationState.ERROR; + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection() + */ + @Override + public OperationState populatePostOperationCollection() { + try { + after = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage()); + return OperationState.ERROR; + } + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup() + */ + @Override + public OperationState performCleanup() { + // TODO Auto-generated method stub + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, schemaConfig.getIndexName()); + + int sizeBefore = before.getSize(); + int sizeAfter = after.getSize(); + + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore), + String.valueOf(sizeAfter)); + + /* + * If the processedImportIds size <= 0, then something has failed in the sync operation and we + * shouldn't do the selective delete right now. + */ + + if (sizeAfter > 0) { + + Collection<String> presyncIds = before.getImportedObjectIds(); + presyncIds.removeAll(after.getImportedObjectIds()); + + try { + LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, schemaConfig.getIndexName(), schemaConfig.getIndexDocType(), + String.valueOf(presyncIds.size())); + + ObjectIdCollection bulkIds = new ObjectIdCollection(); + + Iterator<String> it = presyncIds.iterator(); + int numItemsInBulkRequest = 0; + int numItemsRemainingToBeDeleted = presyncIds.size(); + + while (it.hasNext()) { + + bulkIds.addObjectId(it.next()); + numItemsInBulkRequest++; + + if (numItemsInBulkRequest >= endpointConfig.getScrollContextBatchRequestSize()) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize())); + bulkDelete(bulkIds.getImportedObjectIds()); + numItemsRemainingToBeDeleted -= numItemsInBulkRequest; + numItemsInBulkRequest = 0; + bulkIds.clear(); + } + } + + if (numItemsRemainingToBeDeleted > 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize())); + bulkDelete(bulkIds.getImportedObjectIds()); + } + + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, schemaConfig.getIndexName(), exc.getLocalizedMessage()); + + } + } + + return OperationState.OK; + } + + @Override + public String getIndexName() { + return schemaConfig.getIndexName(); + } + + /** + * Builds the initial scroll request payload. + * + * @param numItemsToGetPerRequest the num items to get per request + * @param fieldList the field list + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest, + List<String> fieldList) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("size", numItemsToGetPerRequest); + + ArrayNode fields = mapper.createArrayNode(); + + for (String f : fieldList) { + fields.add(f); + } + + rootNode.set("fields", fields); + + ObjectNode queryNode = mapper.createObjectNode(); + queryNode.set("match_all", mapper.createObjectNode()); + + rootNode.set("query", queryNode); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Builds the subsequent scroll context request payload. + * + * @param scrollId the scroll id + * @param contextTimeToLiveInMinutes the context time to live in minutes + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildSubsequentScrollContextRequestPayload(String scrollId, + int contextTimeToLiveInMinutes) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + + rootNode.put("scroll", contextTimeToLiveInMinutes + "m"); + rootNode.put("scroll_id", scrollId); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Parses the elastic search result. + * + * @param jsonResult the json result + * @return the json node + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected JsonNode parseElasticSearchResult(String jsonResult) + throws JsonProcessingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.readTree(jsonResult); + } + + /** + * Lookup index doc. + * + * @param ids the ids + * @param docs the docs + * @return the array list + */ + protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids, + List<SearchableEntity> docs) { + ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>(); + + if (ids != null && docs != null) { + for (SearchableEntity d : docs) { + if (ids.contains(d.getId())) { + objs.add(d); + } + } + } + + return objs; + } + + /** + * Builds the delete data object. + * + * @param index the index + * @param type the type + * @param id the id + * @return the object node + */ + protected ObjectNode buildDeleteDataObject(String index, String type, String id) { + + ObjectNode indexDocProperties = mapper.createObjectNode(); + + indexDocProperties.put("_index", index); + indexDocProperties.put("_type", type); + indexDocProperties.put("_id", id); + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.set("delete", indexDocProperties); + + return rootNode; + } + + /** + * This method might appear to be a little strange, and is simply an optimization to take an + * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists. + * + * @param startNode the start node + * @param fieldPath the field path + * @return the node path + */ + protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) { + + JsonNode jsonNode = null; + + for (String field : fieldPath) { + if (jsonNode == null) { + jsonNode = startNode.get(field); + } else { + jsonNode = jsonNode.get(field); + } + + /* + * This is our safety net in case any intermediate path returns a null + */ + + if (jsonNode == null) { + return null; + } + + } + + return jsonNode; + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(), + endpointConfig.getEsServerPort(), resourceUrl); + } + + /** + * Retrieve all document identifiers. + * + * @return the object id collection + * @throws IOException Signals that an I/O exception has occurred. + */ + public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException { + + ObjectIdCollection currentDocumentIds = new ObjectIdCollection(); + + long opStartTimeInMs = System.currentTimeMillis(); + + List<String> fields = new ArrayList<String>(); + fields.add("_id"); + // fields.add("entityType"); + + String scrollRequestPayload = + buildInitialScrollRequestPayload(endpointConfig.getScrollContextBatchRequestSize(), fields); + + final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName()+ "/" + schemaConfig.getIndexDocType() + "/_search?scroll=" + + endpointConfig.getScrollContextTimeToLiveInMinutes() + "m"); + + OperationResult result = + esAdapter.doPost(fullUrlStr, scrollRequestPayload, MediaType.APPLICATION_JSON_TYPE); + + if (result.wasSuccessful()) { + + JsonNode rootNode = parseElasticSearchResult(result.getResult()); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + int totalRecordsAvailable = 0; + String scrollId = null; + int numRecordsFetched = 0; + + if (rootNode != null) { + + scrollId = getFieldValue(rootNode, "_scroll_id"); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + + if (timedOut) { + LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers", + String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers", + String.valueOf(tookInMs)); + } + + JsonNode hitsNode = rootNode.get("hits"); + totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText()); + + LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers", + String.valueOf(totalRecordsAvailable)); + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + + Iterator<JsonNode> nodeIterator = hitsArray.iterator(); + + String key = null; + String value = null; + JsonNode jsonNode = null; + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + currentDocumentIds.addObjectId(key); + } + + } + + int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched); + + int numRequiredAdditionalFetches = + (totalRecordsRemainingToFetch / endpointConfig.getScrollContextBatchRequestSize()); + + /* + * Do an additional fetch for the remaining items (if needed) + */ + + if (totalRecordsRemainingToFetch % endpointConfig.getScrollContextBatchRequestSize() != 0) { + numRequiredAdditionalFetches += 1; + } + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES, + String.valueOf(numRequiredAdditionalFetches)); + } + + + for (int x = 0; x < numRequiredAdditionalFetches; x++) { + + if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) { + // abort the whole thing because now we can't reliably cleanup the orphans. + throw new IOException( + "Failed to collect pre-sync doc collection from index. Aborting operation"); + } + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES, + String.valueOf(currentDocumentIds.getSize()), + String.valueOf(totalRecordsAvailable)); + } + + } + + } + + } else { + // scroll context get failed, nothing else to do + LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString()); + } + + LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers", + String.valueOf((System.currentTimeMillis() - opStartTimeInMs))); + + return currentDocumentIds; + + } + + /** + * Collect items from scroll context. + * + * @param scrollId the scroll id + * @param objectIds the object ids + * @return the operation state + * @throws IOException Signals that an I/O exception has occurred. + */ + private OperationState collectItemsFromScrollContext(String scrollId, + ObjectIdCollection objectIds) throws IOException { + + String requestPayload = buildSubsequentScrollContextRequestPayload(scrollId, + endpointConfig.getScrollContextTimeToLiveInMinutes()); + + final String fullUrlStr = getFullUrl("/_search/scroll"); + + OperationResult opResult = + esAdapter.doPost(fullUrlStr, requestPayload, MediaType.APPLICATION_JSON_TYPE); + + if (opResult.getResultCode() >= 300) { + LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult()); + return OperationState.ERROR; + } + + JsonNode rootNode = parseElasticSearchResult(opResult.getResult()); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + + JsonNode hitsNode = rootNode.get("hits"); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + + if (rootNode != null) { + + if (timedOut) { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs)); + } + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + String key = null; + String value = null; + JsonNode jsonNode = null; + + Iterator<JsonNode> nodeIterator = hitsArray.iterator(); + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + objectIds.addObjectId(key); + + } + + } + } + + return OperationState.OK; + } + + /** + * Gets the field value. + * + * @param node the node + * @param fieldName the field name + * @return the field value + */ + protected String getFieldValue(JsonNode node, String fieldName) { + + JsonNode field = node.get(fieldName); + + if (field != null) { + return field.asText(); + } + + return null; + + } + + /** + * Bulk delete. + * + * @param docIds the doc ids + * @return the operation result + * @throws IOException Signals that an I/O exception has occurred. + */ + public OperationResult bulkDelete(Collection<String> docIds) throws IOException { + + if (docIds == null || docIds.size() == 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP); + return new OperationResult(500, + "Skipping bulkDelete(); operation because docs to delete list is empty"); + } + + LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size())); + + StringBuilder sb = new StringBuilder(128); + + for (String id : docIds) { + sb.append(String.format(BULK_OP_LINE_TEMPLATE, + buildDeleteDataObject(schemaConfig.getIndexName(), schemaConfig.getIndexDocType(), id))); + } + + sb.append("\n"); + + final String fullUrlStr = getFullUrl("/_bulk"); + + return esAdapter.doPost(fullUrlStr, sb.toString(), MediaType.APPLICATION_FORM_URLENCODED_TYPE); + + } + + /* + + */ + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java new file mode 100644 index 0000000..a2ff6cf --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/ElasticSearchSchemaFactory.java @@ -0,0 +1,97 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.io.IOException; + +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +public class ElasticSearchSchemaFactory { + + private static final String SETTINGS = "settings"; + private static final String MAPPINGS = "mappings"; + + private static ObjectMapper mapper = new ObjectMapper(); + private SparkyResourceLoader resourceLoader; + + public String getIndexSchema(ElasticSearchSchemaConfig schemaConfig) + throws ElasticSearchOperationException { + + JsonNode esSettingsNode = null; + JsonNode esMappingsNodes = null; + + try { + + if (schemaConfig.getIndexSettingsFileName() != null) { + esSettingsNode = mapper + .readTree(resourceLoader.getResourceAsString(schemaConfig.getIndexSettingsFileName(),true)); + } + + if (schemaConfig.getIndexMappingsFileName() != null) { + esMappingsNodes = mapper + .readTree(resourceLoader.getResourceAsString(schemaConfig.getIndexMappingsFileName(),true)); + } + + } catch (IOException e1) { + + throw new ElasticSearchOperationException("Caught an exception building initial ES index. Error: " + e1.getMessage()); + } + + ObjectNode esConfig = null; + + ObjectNode mappings = + (ObjectNode) mapper.createObjectNode().set(schemaConfig.getIndexDocType(), esMappingsNodes); + + if (esSettingsNode == null) { + esConfig = (ObjectNode) mapper.createObjectNode().set(MAPPINGS, mappings); + } else { + esConfig = (ObjectNode) mapper.createObjectNode().set(SETTINGS, esSettingsNode); + esConfig.set(MAPPINGS, mappings); + } + + try { + return mapper.writeValueAsString(esConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + public SparkyResourceLoader getResourceLoader() { + return resourceLoader; + } + + public void setResourceLoader(SparkyResourceLoader resourceLoader) { + this.resourceLoader = resourceLoader; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java new file mode 100644 index 0000000..bc47fa8 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexCleaner.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.sync.enumeration.OperationState; + +/** + * The Interface IndexCleaner. + */ +public interface IndexCleaner { + + /** + * Populate pre operation collection. + * + * @return the operation state + */ + public OperationState populatePreOperationCollection(); + + /** + * Populate post operation collection. + * + * @return the operation state + */ + public OperationState populatePostOperationCollection(); + + /** + * Perform cleanup. + * + * @return the operation state + */ + public OperationState performCleanup(); + + public String getIndexName(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java new file mode 100644 index 0000000..2c1bb16 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexIntegrityValidator.java @@ -0,0 +1,178 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; + +/** + * The Class IndexIntegrityValidator. + */ +public class IndexIntegrityValidator implements IndexValidator { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class); + + private ElasticSearchEndpointConfig endpointConfig; + private ElasticSearchSchemaConfig schemaConfig; + private String tableConfigJson; + + private final ElasticSearchAdapter esAdapter; + + /** + * Instantiates a new index integrity validator. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param tableConfigJson the table config json + */ + public IndexIntegrityValidator(ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig esSchemaConfig, ElasticSearchEndpointConfig esEndpointConfig, + String tableConfigJson) { + + this.esAdapter = esAdapter; + this.schemaConfig = esSchemaConfig; + this.endpointConfig = esEndpointConfig; + this.tableConfigJson = tableConfigJson; + } + + public ElasticSearchEndpointConfig getEndpointConfig() { + return endpointConfig; + } + + public void setEndpointConfig(ElasticSearchEndpointConfig endpointConfig) { + this.endpointConfig = endpointConfig; + } + + public ElasticSearchSchemaConfig getSchemaConfig() { + return schemaConfig; + } + + public void setSchemaConfig(ElasticSearchSchemaConfig schemaConfig) { + this.schemaConfig = schemaConfig; + } + + public ElasticSearchAdapter getEsAdapter() { + return esAdapter; + } + + @Override + public String getIndexName() { + return schemaConfig.getIndexName(); + } + + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#exists() + */ + @Override + public boolean exists() { + final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/"); + OperationResult existsResult = esAdapter.doHead(fullUrlStr, MediaType.APPLICATION_JSON_TYPE); + + int rc = existsResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_EXISTS, schemaConfig.getIndexName()); + return true; + } else { + LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, schemaConfig.getIndexName()); + return false; + } + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#integrityValid() + */ + @Override + public boolean integrityValid() { + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#createOrRepair() + */ + @Override + public void createOrRepair() { + + String message = + "IndexIntegrityValidator.createOrRepair() for indexName = " + schemaConfig.getIndexName(); + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + + final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName() + "/"); + OperationResult createResult = + esAdapter.doPut(fullUrlStr, tableConfigJson, MediaType.APPLICATION_JSON_TYPE); + + int rc = createResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_RECREATED, schemaConfig.getIndexName()); + } else if (rc == 400) { + LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, schemaConfig.getIndexName()); + } else { + LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, schemaConfig.getIndexName(), + createResult.getResult()); + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexValidator#destroyIndex() + */ + @Override + public void destroyIndex() { + // we don't do this for now + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(), + endpointConfig.getEsServerPort(), resourceUrl); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java new file mode 100644 index 0000000..05c7a5e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexSynchronizer.java @@ -0,0 +1,67 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +/** + * The Interface IndexSynchronizer. + * + * @author davea. + */ +public interface IndexSynchronizer { + + /** + * Do sync. + * + * @return the operation state + */ + public OperationState doSync(); + + public SynchronizerState getState(); + + /** + * Gets the stat report. + * + * @param finalReport the final report + * @return the stat report + */ + public String getStatReport(boolean finalReport); + + /** + * Shutdown. + */ + public void shutdown(); + + public String getIndexName(); + + /** + * Clear cache. + */ + public void clearCache(); + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java new file mode 100644 index 0000000..05e7d91 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/IndexValidator.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +/** + * The Interface IndexValidator. + */ +public interface IndexValidator { + + /** + * Exists. + * + * @return true, if successful + */ + public boolean exists(); + + /** + * Integrity valid. + * + * @return true, if successful + */ + public boolean integrityValid(); + + /** + * Creates the or repair. + */ + public void createOrRepair(); + + /** + * Destroy index. + */ + public void destroyIndex(); + + public String getIndexName(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java new file mode 100644 index 0000000..bdd009b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncController.java @@ -0,0 +1,96 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.Calendar; +import java.util.Date; + +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public interface SyncController { + + String getControllerName(); + boolean isPeriodicSyncEnabled(); + boolean isRunOnceSyncEnabled(); + + /** + * Perform action. + * + * @param requestedAction the requested action + * @return + */ + OperationState performAction(SyncActions requestedAction); + + /** + * Register entity synchronizer. + * + * @param entitySynchronizer the entity synchronizer + */ + void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer); + + /** + * Register index validator. + * + * @param indexValidator the index validator + */ + void registerIndexValidator(IndexValidator indexValidator); + + /** + * Register index cleaner. + * + * @param indexCleaner the index cleaner + */ + void registerIndexCleaner(IndexCleaner indexCleaner); + + /** + * Shutdown. + */ + void shutdown(); + + SynchronizerState getState(); + + long getDelayInMs(); + + void setDelayInMs(long delayInMs); + + long getSyncFrequencyInMs(); + + void setSyncFrequencyInMs(long syncFrequencyInMs); + + Date getSyncStartTime(); + + void setSyncStartTime(Date syncStartTime); + + Date getLastExecutionDate(); + + void setLastExecutionDate(Date lastExecutionDate); + + Calendar getCreationTime(); + + String getNextSyncTime(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java new file mode 100644 index 0000000..0e0b193 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerImpl.java @@ -0,0 +1,682 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.LinkedHashSet; +import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.util.NodeUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * The Class SyncController. + * + * @author davea. + */ +public class SyncControllerImpl implements SyncController { + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncControllerImpl.class); + + /** + * The Enum InternalState. + */ + private enum InternalState { + IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC, + TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT + } + + /** + * The Enum SyncActions. + */ + public enum SyncActions { + SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE, + SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE + } + + private Collection<IndexSynchronizer> registeredSynchronizers; + private Collection<IndexValidator> registeredIndexValidators; + private Collection<IndexCleaner> registeredIndexCleaners; + private InternalState currentInternalState; + private ExecutorService syncControllerExecutor; + private ExecutorService statReporterExecutor; + + private long delayInMs; + private long syncFrequencyInMs; + private Date syncStartTime; + + private Date lastExecutionDate; + private AtomicInteger runCount; + private Semaphore performingActionGate; + private Calendar creationTime; + + private String syncStartTimeWithTimeZone; + private String controllerName; + + protected SyncControllerConfig syncControllerConfig; + + + + + /** + * Instantiates a new sync controller. + * + * @param name the name + * @throws Exception the exception + */ + public SyncControllerImpl(SyncControllerConfig syncControllerConfig) throws Exception { + this(syncControllerConfig,null); + } + + public SyncControllerImpl(SyncControllerConfig syncControllerConfig, String targetEntityType) + throws Exception { + + this.syncControllerConfig = syncControllerConfig; + + this.delayInMs = 0L; + this.syncFrequencyInMs = 86400000L; + this.syncStartTime = null; + this.lastExecutionDate = null; + this.runCount = new AtomicInteger(0); + this.performingActionGate = new Semaphore(1); + registeredSynchronizers = new LinkedHashSet<IndexSynchronizer>(); + registeredIndexValidators = new LinkedHashSet<IndexValidator>(); + registeredIndexCleaners = new LinkedHashSet<IndexCleaner>(); + + String controllerName = syncControllerConfig.getControllerName(); + + if (targetEntityType != null) { + controllerName += " (" + targetEntityType + ")"; + } + + this.controllerName = controllerName; + + this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController-" + controllerName, + syncControllerConfig.getNumSyncControllerWorkers(), LOG); + this.statReporterExecutor = + NodeUtils.createNamedExecutor("StatReporter-" + controllerName, 1, LOG); + + this.currentInternalState = InternalState.IDLE; + + this.creationTime = + Calendar.getInstance(TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp())); + + } + + + + + + + /** + * Change internal state. + * + * @param newState the new state + * @param causedByAction the caused by action + */ + private void changeInternalState(InternalState newState, SyncActions causedByAction) { + LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName, + currentInternalState.toString(), newState.toString(), causedByAction.toString()); + + this.currentInternalState = newState; + + performStateAction(); + } + + + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#getDelayInMs() + */ + @Override + public long getDelayInMs() { + return delayInMs; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#setDelayInMs(long) + */ + @Override + public void setDelayInMs(long delayInMs) { + this.delayInMs = delayInMs; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncFrequencyInMs() + */ + @Override + public long getSyncFrequencyInMs() { + return syncFrequencyInMs; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncFrequencyInMs(long) + */ + @Override + public void setSyncFrequencyInMs(long syncFrequencyInMs) { + this.syncFrequencyInMs = syncFrequencyInMs; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#getSyncStartTime() + */ + @Override + public Date getSyncStartTime() { + return syncStartTime; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#setSyncStartTime(java.util.Date) + */ + @Override + public void setSyncStartTime(Date syncStartTime) { + this.syncStartTime = syncStartTime; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#getLastExecutionDate() + */ + @Override + public Date getLastExecutionDate() { + return lastExecutionDate; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncController2#setLastExecutionDate(java.util.Date) + */ + @Override + public void setLastExecutionDate(Date lastExecutionDate) { + this.lastExecutionDate = lastExecutionDate; + } + + @Override + public String getControllerName() { + return controllerName; + } + + + + + @Override + public OperationState performAction(SyncActions requestedAction) { + + if (currentInternalState == InternalState.IDLE) { + + try { + + /* + * non-blocking semaphore acquire used to guarantee only 1 execution of the synchronization + * at a time. + */ + + switch (requestedAction) { + case SYNCHRONIZE: + + if (performingActionGate.tryAcquire()) { + try { + + long opStartTime = System.currentTimeMillis(); + + LOG.info(AaiUiMsgs.INFO_GENERIC, + getControllerName() + " started synchronization at " + + SynchronizerConstants.SIMPLE_DATE_FORMAT.format(opStartTime).replaceAll( + SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); + + runCount.incrementAndGet(); + + changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction); + + long opEndTime = System.currentTimeMillis(); + + long opTime = (opEndTime - opStartTime); + + String durationMessage = + String.format(getControllerName() + " synchronization took '%d' ms.", opTime); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage); + + if (syncControllerConfig.isPeriodicSyncEnabled()) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + getControllerName() + " next sync to begin at " + getNextSyncTime()); + + TimeZone tz = TimeZone.getTimeZone(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp()); + + if (opTime > this.getSyncFrequencyInMs()) { + + String durationWasLongerMessage = String.format( + getControllerName() + " synchronization took '%d' ms which is larger than" + + " synchronization interval of '%d' ms.", + opTime, this.getSyncFrequencyInMs()); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage); + } + } + + } catch (Exception syncException) { + String message = "An error occurred while performing action = " + requestedAction + + ". Error = " + syncException.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } finally { + performingActionGate.release(); + } + } else { + return OperationState.IGNORED_SYNC_NOT_IDLE; + } + + break; + + default: + break; + } + + return OperationState.OK; + + } catch (Exception exc) { + String message = "An error occurred while performing action = " + requestedAction + + ". Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return OperationState.ERROR; + } finally { + + } + } else { + LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString()); + return OperationState.IGNORED_SYNC_NOT_IDLE; + } + } + + /** + * Perform state action. + */ + private void performStateAction() { + + try { + switch (currentInternalState) { + + case TEST_INDEX_INTEGRITY: + performIndexIntegrityValidation(); + break; + + case PRE_SYNC: + performPreSyncCleanupCollection(); + break; + + case SYNC_OPERATION: + performSynchronization(); + break; + + case POST_SYNC: + performIndexSyncPostCollection(); + changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE); + break; + + case SELECTIVE_DELETE: + performIndexCleanup(); + changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE); + break; + + case GENERATE_FINAL_REPORT: + + dumpStatReport(true); + clearCaches(); + changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE); + break; + + case ABORTING_SYNC: + performSyncAbort(); + break; + + default: + break; + } + } catch (Exception exc) { + /* + * Perhaps we should abort the sync on an exception + */ + String message = "Caught an error which performing action. Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + @Override + public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { + + String indexName = entitySynchronizer.getIndexName(); + + if (indexName != null) { + registeredSynchronizers.add(entitySynchronizer); + } else { + String message = "Failed to register entity synchronizer because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + @Override + public void registerIndexValidator(IndexValidator indexValidator) { + + String indexName = indexValidator.getIndexName(); + + if (indexName != null) { + registeredIndexValidators.add(indexValidator); + } else { + String message = "Failed to register index validator because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + @Override + public void registerIndexCleaner(IndexCleaner indexCleaner) { + + String indexName = indexCleaner.getIndexName(); + + if (indexName != null) { + registeredIndexCleaners.add(indexCleaner); + } else { + String message = "Failed to register index cleaner because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + } + + /* + * State machine should drive our flow dosync just dispatches an action and the state machine + * determines what is in play and what is next + */ + + /** + * Dump stat report. + * + * @param showFinalReport the show final report + */ + private void dumpStatReport(boolean showFinalReport) { + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + String statReport = synchronizer.getStatReport(showFinalReport); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + } + + /** + * Clear caches. + */ + private void clearCaches() { + + /* + * Any entity caches that were built as part of the sync operation should be cleared to save + * memory. The original intent of the caching was to provide a short-lived cache to satisfy + * entity requests from multiple synchronizers yet minimizing interactions with the AAI. + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + synchronizer.clearCache(); + } + } + + /** + * Perform pre sync cleanup collection. + */ + private void performPreSyncCleanupCollection() { + + /* + * ask the index cleaners to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePreOperationCollection(); + } + + changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE); + + } + + /** + * Perform index sync post collection. + */ + private void performIndexSyncPostCollection() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePostOperationCollection(); + } + + } + + /** + * Perform index cleanup. + */ + private void performIndexCleanup() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.performCleanup(); + } + + } + + /** + * Perform sync abort. + */ + private void performSyncAbort() { + changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED); + } + + /** + * Perform index integrity validation. + */ + private void performIndexIntegrityValidation() { + + /* + * loop through registered index validators and test and fix, if needed + */ + + for (IndexValidator validator : registeredIndexValidators) { + try { + if (!validator.exists()) { + validator.createOrRepair(); + } + } catch (Exception exc) { + String message = "Index validator caused an error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE); + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#shutdown() + */ + @Override + public void shutdown() { + + this.syncControllerExecutor.shutdown(); + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + try { + synchronizer.shutdown(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Synchronizer shutdown caused an error = " + exc.getMessage()); + } + + } + this.statReporterExecutor.shutdown(); + } + + /* + * Need some kind of task running that responds to a transient boolean to kill it or we just stop + * the executor that it is in? + */ + + + + /** + * Perform synchronization. + */ + private void performSynchronization() { + + /* + * Get all the synchronizers running in parallel + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + + synchronizer.doSync(); + return null; + } + + }, this.syncControllerExecutor).whenComplete((result, error) -> { + + /* + * We don't bother checking the result, because it will always be null as the doSync() is + * non-blocking. + */ + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "doSync operation failed with an error = " + error.getMessage()); + } + }); + } + + boolean allDone = false; + long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + boolean dumpPeriodicStatReport = false; + + while (!allDone) { + int totalFinished = 0; + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + if (dumpPeriodicStatReport) { + if (synchronizer.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + String statReport = synchronizer.getStatReport(false); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + } + + if (synchronizer.getState() == SynchronizerState.IDLE + || synchronizer.getState() == SynchronizerState.ABORTED) { + totalFinished++; + } + } + + if ( System.currentTimeMillis() > nextReportTimeStampInMs) { + dumpPeriodicStatReport = true; + nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + } else { + dumpPeriodicStatReport = false; + } + + allDone = (totalFinished == registeredSynchronizers.size()); + + try { + Thread.sleep(250); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred while waiting for sync to complete. Error = " + exc.getMessage()); + } + + } + + changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE); + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.SyncControllerInterface#getState() + */ + @Override + public SynchronizerState getState() { + + switch (currentInternalState) { + + case IDLE: { + return SynchronizerState.IDLE; + } + + default: { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + + } + } + + } + + @Override + public Calendar getCreationTime() { + return creationTime; + } + + @Override + public String getNextSyncTime() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isPeriodicSyncEnabled() { + return syncControllerConfig.isPeriodicSyncEnabled(); + } + + @Override + public boolean isRunOnceSyncEnabled() { + return syncControllerConfig.isRunOnceSyncEnabled(); + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java new file mode 100644 index 0000000..f20eceb --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistrar.java @@ -0,0 +1,29 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +public interface SyncControllerRegistrar { + public void registerController(); +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java new file mode 100644 index 0000000..d4013d3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerRegistry.java @@ -0,0 +1,50 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.ArrayList; +import java.util.List; + +public class SyncControllerRegistry { + + private List<SyncController> controllers; + + public SyncControllerRegistry() { + controllers = new ArrayList<SyncController>(); + } + + public void registerSyncController(SyncController controller) { + controllers.add(controller); + } + + public List<SyncController> getControllers() { + return controllers; + } + + public void setControllers(List<SyncController> controllers) { + this.controllers = controllers; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java new file mode 100644 index 0000000..07653b7 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SyncControllerService.java @@ -0,0 +1,222 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.lang.Thread.UncaughtExceptionHandler; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.springframework.context.ApplicationListener; +import org.springframework.context.event.ApplicationContextEvent; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +public class SyncControllerService implements ApplicationListener<ApplicationContextEvent> { + + private SyncControllerRegistry syncControllerRegistry; + private ExecutorService runonceSyncExecutor; + private ScheduledExecutorService periodicSyncExecutor; + private boolean syncStarted; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SyncControllerService.class); + + private class SyncControllerTask implements Runnable { + + private SyncController controller; + + public SyncControllerTask(SyncController controller) { + this.controller = controller; + } + + @Override + public void run() { + + try { + + if (controller.getState() == SynchronizerState.IDLE) { + + /* + * This is a blocking-call, but would be nicer if it was async internally within the + * controller but at the moment, that's not the way it works. + */ + + if (controller.performAction(SyncActions.SYNCHRONIZE) != OperationState.OK) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " is not idle, sync attempt has been skipped."); + } + } else { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " is not idle, sync attempt has been skipped."); + } + + } catch (Exception exception) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Error while attempting synchronization. Error = " + exception.getMessage()); + } + + } + + } + + public SyncControllerService(SyncControllerRegistry syncControllerRegistry, int numRunOnceWorkers, + int numPeriodicWorkers) { + this.syncControllerRegistry = syncControllerRegistry; + this.syncStarted = false; + + UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { + + @Override + public void uncaughtException(Thread thread, Throwable exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); + } + }; + + runonceSyncExecutor = Executors.newFixedThreadPool(numRunOnceWorkers, + new ThreadFactoryBuilder().setNameFormat("RunonceSyncWorker-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build()); + + + periodicSyncExecutor = Executors.newScheduledThreadPool(numPeriodicWorkers, + new ThreadFactoryBuilder().setNameFormat("PeriodicSyncWorker-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build()); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void startSync() { + + long syncInitialDelayInMs = 0; + + for (SyncController controller : syncControllerRegistry.getControllers()) { + + syncInitialDelayInMs = controller.getDelayInMs(); + + if (!controller.isPeriodicSyncEnabled()) { + + if (controller.isRunOnceSyncEnabled()) { + LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is enabled."); + runonceSyncExecutor.submit(new SyncControllerTask(controller)); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, controller.getControllerName() + " is disabled."); + } + + } else { + + /** + * Do both. We'll take one instance of the SyncController and wrap the object instance into + * two SyncControllerTasks. The responsibility for preventing a conflicting sync should live + * in the SyncController instance. If a sync is underway when the periodic sync kicks in, + * then it will be ignored by the SyncController which is already underway. + * + * The SyncController instance itself would then also be stateful such that it would know + * the last time it ran, and the next time it is supposed to run, the number times a sync + * has executed, etc. + */ + + if (controller.isRunOnceSyncEnabled()) { + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " run-once sync is enabled."); + runonceSyncExecutor.submit(new SyncControllerTask(controller)); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " run-once sync is disabled."); + } + + /* + * The controller knows it's configuredfrequency and we can just ask it to tell us what the + * delay and frequency needs to be, rather than trying to calculate the configured frequency + * per controller which "could" be different for each controller. + */ + + if (controller.isPeriodicSyncEnabled()) { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " periodic sync is enabled and scheduled to start @ " + + controller.getNextSyncTime()); + + periodicSyncExecutor.scheduleAtFixedRate(new SyncControllerTask(controller), + controller.getDelayInMs(), controller.getSyncFrequencyInMs(), TimeUnit.MILLISECONDS); + + } else { + + LOG.info(AaiUiMsgs.INFO_GENERIC, + controller.getControllerName() + " periodic sync is disabled."); + + } + + } + + } + + } + + public void shutdown() { + + if (runonceSyncExecutor != null) { + runonceSyncExecutor.shutdown(); + } + + if (periodicSyncExecutor != null) { + periodicSyncExecutor.shutdown(); + } + + if (syncControllerRegistry != null) { + for (SyncController controller : syncControllerRegistry.getControllers()) { + controller.shutdown(); + } + } + + } + + @Override + public synchronized void onApplicationEvent(ApplicationContextEvent arg0) { + + /* + * Start sync service processing when spring-context-initialization has finished + */ + + if (!syncStarted) { + syncStarted = true; + startSync(); + } + + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java new file mode 100644 index 0000000..9201f07 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/SynchronizerConstants.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.text.SimpleDateFormat; +import java.util.Date; + +/** + * The Class SynchronizerConstants. + */ +public final class SynchronizerConstants { + // Error values for invalid user input + public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE; + public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE); + public static final SimpleDateFormat SIMPLE_DATE_FORMAT = + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); + + public static final String DEPTH_MODIFIER = "?depth=0"; + public static final String DEPTH_ALL_MODIFIER = "?depth=all"; + public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only"; + public static final String NODES_ONLY_MODIFIER = "?nodes-only"; + + // constants for scheduling synchronizer + public static final int COMPONENTS_IN_TIMESTAMP = 2; + public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0"; + public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0"; + public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC"; + public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0; + public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0; + public static final int IDX_TIME_IN_TIMESTAMP = 0; + public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1; + public static final long MILLISEC_IN_A_MIN = 60000; + public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000; + public static final String TIME_STD = "GMT"; + public static final String TIME_CONFIG_STD = "UTC"; + public static final String TIMESTAMP24HOURS_PATTERN = + "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]"; + + /** + * Instantiates a new synchronizer constants. + */ + private SynchronizerConstants() {} +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java new file mode 100644 index 0000000..48625c0 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TaskProcessingStats.java @@ -0,0 +1,135 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import org.onap.aai.sparky.analytics.AbstractStatistics; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; + +/** + * The Class TaskProcessingStats. + */ +public class TaskProcessingStats extends AbstractStatistics { + + private static String TASK_AGE_STATS = "taskAgeStats"; + private static String TASK_RESPONSE_STATS = "taskResponseStats"; + private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes"; + // private static String QUEUE_ITEM_LENGTH = "queueItemLength"; + private static String TPS = "transactionsPerSecond"; + + /** + * Instantiates a new task processing stats. + * + * @param config the config + */ + public TaskProcessingStats(NetworkStatisticsConfig config) { + + addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(), + config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(), + config.getTaskAgeHistogramNumDecimalPoints()); + + addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(), + config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(), + config.getResponseTimeHistogramNumDecimalPoints()); + + addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(), + config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(), + config.getBytesHistogramNumDecimalPoints()); + + /* + * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(), + * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(), + * config.getQueueLengthHistogramNumDecimalPoints()); + */ + + addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(), + config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints()); + + } + + /* + * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH, + * value); } + */ + + /** + * Update task age stats histogram. + * + * @param value the value + */ + public void updateTaskAgeStatsHistogram(long value) { + updateHistogram(TASK_AGE_STATS, value); + } + + /** + * Update task response stats histogram. + * + * @param value the value + */ + public void updateTaskResponseStatsHistogram(long value) { + updateHistogram(TASK_RESPONSE_STATS, value); + } + + /** + * Update response size in bytes histogram. + * + * @param value the value + */ + public void updateResponseSizeInBytesHistogram(long value) { + updateHistogram(RESPONSE_SIZE_IN_BYTES, value); + } + + /** + * Update transactions per second histogram. + * + * @param value the value + */ + public void updateTransactionsPerSecondHistogram(long value) { + updateHistogram(TPS, value); + } + + /** + * Gets the statistics report. + * + * @param verboseEnabled the verbose enabled + * @param indentPadding the indent padding + * @return the statistics report + */ + public String getStatisticsReport(boolean verboseEnabled, String indentPadding) { + + StringBuilder sb = new StringBuilder(); + + sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding)); + // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding)); + sb.append("\n") + .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding)); + + return sb.toString(); + + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java new file mode 100644 index 0000000..e639eff --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/TransactionRateMonitor.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync; + +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.sparky.analytics.AveragingRingBuffer; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; + +public class TransactionRateMonitor { + + private AtomicInteger numTransactions; + private AveragingRingBuffer responseTimeTracker; + private long startTimeInMs; + + /** + * Instantiates a new transaction rate controller. + * + * @param config the config + */ + public TransactionRateMonitor(int numWorkerThreads, NetworkStatisticsConfig config) { + + this.responseTimeTracker = new AveragingRingBuffer( + config.getNumSamplesPerThreadForRunningAverage() * numWorkerThreads); + this.startTimeInMs = System.currentTimeMillis(); + this.numTransactions = new AtomicInteger(0); + } + + /** + * Track response time. + * + * @param responseTimeInMs the response time in ms + */ + public void trackResponseTime(long responseTimeInMs) { + this.numTransactions.incrementAndGet(); + responseTimeTracker.addSample(responseTimeInMs); + } + + public long getAvg() { + return responseTimeTracker.getAvg(); + } + + public double getCurrentTps() { + if (numTransactions.get() > 0) { + double timeDelta = System.currentTimeMillis() - startTimeInMs; + double numTxns = numTransactions.get(); + return (numTxns / timeDelta) * 1000.0; + } + + return 0.0; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java new file mode 100644 index 0000000..0197b9b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfig.java @@ -0,0 +1,72 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class ElasticSearchEndpointConfig { + + private String esIpAddress; + private String esServerPort; + private int scrollContextTimeToLiveInMinutes; + private int scrollContextBatchRequestSize; + + public ElasticSearchEndpointConfig() { + + } + + public String getEsIpAddress() { + return esIpAddress; + } + + public void setEsIpAddress(String esIpAddress) { + this.esIpAddress = esIpAddress; + } + + public String getEsServerPort() { + return esServerPort; + } + + public void setEsServerPort(String esServerPort) { + this.esServerPort = esServerPort; + } + + public int getScrollContextTimeToLiveInMinutes() { + return scrollContextTimeToLiveInMinutes; + } + + public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { + this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; + } + + public int getScrollContextBatchRequestSize() { + return scrollContextBatchRequestSize; + } + + public void setScrollContextBatchRequestSize(int scrollContextBatchRequestSize) { + this.scrollContextBatchRequestSize = scrollContextBatchRequestSize; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java new file mode 100644 index 0000000..4f134c3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfig.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class ElasticSearchSchemaConfig { + + private String indexName; + private String indexDocType; + private String indexSettingsFileName; + private String indexMappingsFileName; + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public String getIndexDocType() { + return indexDocType; + } + + public void setIndexDocType(String indexDocType) { + this.indexDocType = indexDocType; + } + + public String getIndexSettingsFileName() { + return indexSettingsFileName; + } + + public void setIndexSettingsFileName(String indexSettingsFileName) { + this.indexSettingsFileName = indexSettingsFileName; + } + + public String getIndexMappingsFileName() { + return indexMappingsFileName; + } + + public void setIndexMappingsFileName(String indexMappingsFileName) { + this.indexMappingsFileName = indexMappingsFileName; + } + + @Override + public String toString() { + return "ElasticSearchSchemaConfig [" + + (indexName != null ? "indexName=" + indexName + ", " : "") + + (indexDocType != null ? "indexDocType=" + indexDocType + ", " : "") + + (indexSettingsFileName != null ? "indexSettingsFileName=" + indexSettingsFileName + ", " + : "") + + (indexMappingsFileName != null ? "indexMappingsFileName=" + indexMappingsFileName : "") + + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java new file mode 100644 index 0000000..0d089a4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfig.java @@ -0,0 +1,239 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +public class NetworkStatisticsConfig { + + private int numSamplesPerThreadForRunningAverage; + + private String bytesHistogramLabel; + + private long bytesHistogramMaxYAxis; + + private int bytesHistogramNumBins; + + private int bytesHistogramNumDecimalPoints; + + private String queueLengthHistogramLabel; + + private long queueLengthHistogramMaxYAxis; + + private int queueLengthHistogramNumBins; + + private int queueLengthHistogramNumDecimalPoints; + + private String taskAgeHistogramLabel; + + private long taskAgeHistogramMaxYAxis; + + private int taskAgeHistogramNumBins; + + private int taskAgeHistogramNumDecimalPoints; + + private String responseTimeHistogramLabel; + + private long responseTimeHistogramMaxYAxis; + + private int responseTimeHistogramNumBins; + + private int responseTimeHistogramNumDecimalPoints; + + private String tpsHistogramLabel; + + private long tpsHistogramMaxYAxis; + + private int tpsHistogramNumBins; + + private int tpsHistogramNumDecimalPoints; + + public int getNumSamplesPerThreadForRunningAverage() { + return numSamplesPerThreadForRunningAverage; + } + + public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { + this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; + } + + public String getBytesHistogramLabel() { + return bytesHistogramLabel; + } + + public void setBytesHistogramLabel(String bytesHistogramLabel) { + this.bytesHistogramLabel = bytesHistogramLabel; + } + + public long getBytesHistogramMaxYAxis() { + return bytesHistogramMaxYAxis; + } + + public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { + this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; + } + + public int getBytesHistogramNumBins() { + return bytesHistogramNumBins; + } + + public void setBytesHistogramNumBins(int bytesHistogramNumBins) { + this.bytesHistogramNumBins = bytesHistogramNumBins; + } + + public int getBytesHistogramNumDecimalPoints() { + return bytesHistogramNumDecimalPoints; + } + + public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { + this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; + } + + public String getQueueLengthHistogramLabel() { + return queueLengthHistogramLabel; + } + + public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { + this.queueLengthHistogramLabel = queueLengthHistogramLabel; + } + + public long getQueueLengthHistogramMaxYAxis() { + return queueLengthHistogramMaxYAxis; + } + + public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { + this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; + } + + public int getQueueLengthHistogramNumBins() { + return queueLengthHistogramNumBins; + } + + public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { + this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; + } + + public int getQueueLengthHistogramNumDecimalPoints() { + return queueLengthHistogramNumDecimalPoints; + } + + public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { + this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; + } + + public String getTaskAgeHistogramLabel() { + return taskAgeHistogramLabel; + } + + public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { + this.taskAgeHistogramLabel = taskAgeHistogramLabel; + } + + public long getTaskAgeHistogramMaxYAxis() { + return taskAgeHistogramMaxYAxis; + } + + public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { + this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; + } + + public int getTaskAgeHistogramNumBins() { + return taskAgeHistogramNumBins; + } + + public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { + this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; + } + + public int getTaskAgeHistogramNumDecimalPoints() { + return taskAgeHistogramNumDecimalPoints; + } + + public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { + this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; + } + + public String getResponseTimeHistogramLabel() { + return responseTimeHistogramLabel; + } + + public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { + this.responseTimeHistogramLabel = responseTimeHistogramLabel; + } + + public long getResponseTimeHistogramMaxYAxis() { + return responseTimeHistogramMaxYAxis; + } + + public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { + this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; + } + + public int getResponseTimeHistogramNumBins() { + return responseTimeHistogramNumBins; + } + + public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { + this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; + } + + public int getResponseTimeHistogramNumDecimalPoints() { + return responseTimeHistogramNumDecimalPoints; + } + + public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { + this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; + } + + public String getTpsHistogramLabel() { + return tpsHistogramLabel; + } + + public void setTpsHistogramLabel(String tpsHistogramLabel) { + this.tpsHistogramLabel = tpsHistogramLabel; + } + + public long getTpsHistogramMaxYAxis() { + return tpsHistogramMaxYAxis; + } + + public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { + this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; + } + + public int getTpsHistogramNumBins() { + return tpsHistogramNumBins; + } + + public void setTpsHistogramNumBins(int tpsHistogramNumBins) { + this.tpsHistogramNumBins = tpsHistogramNumBins; + } + + public int getTpsHistogramNumDecimalPoints() { + return tpsHistogramNumDecimalPoints; + } + + public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { + this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java new file mode 100644 index 0000000..566c249 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/config/SyncControllerConfig.java @@ -0,0 +1,305 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.config; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.TimeZone; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.onap.aai.sparky.sync.SynchronizerConstants; + +public class SyncControllerConfig { + + private static final String UNKNOWN_CONTROLLER_NAME = "UnknownControllerName"; + + private String controllerName; + private boolean enabled; + private int syncTaskDelayInMs; + private int syncTaskFrequencyInDays; + + private int numSyncControllerWorkers; + private boolean runOnceSyncEnabled; + private boolean periodicSyncEnabled; + + private String targetSyncStartTimeStamp; + + private int numInternalSyncWorkers; + private int numSyncElasticWorkers; + private int numSyncActiveInventoryWorkers; + + /* + * calculated variables based on incoming config + */ + private String timeZoneOfSyncStartTimeStamp; + private int syncTaskStartTimeHr; + private int syncTaskStartTimeMin; + private int syncTaskStartTimeSec; + + + + public SyncControllerConfig() { + controllerName = UNKNOWN_CONTROLLER_NAME; + enabled = false; + syncTaskDelayInMs = 0; + syncTaskFrequencyInDays = 365; + numSyncControllerWorkers = 1; + runOnceSyncEnabled = false; + periodicSyncEnabled = false; + targetSyncStartTimeStamp = SynchronizerConstants.DEFAULT_START_TIMESTAMP; + numInternalSyncWorkers = 2; + numSyncElasticWorkers = 5; + numSyncActiveInventoryWorkers = 5; + } + + protected void initializeSyncTimeParameters() { + + if (syncTaskDelayInMs < 0) { + throw new IllegalArgumentException("syncTaskDelayInMs must >= 0"); + } + + Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN); + Matcher matcher = pattern.matcher(targetSyncStartTimeStamp); + if (!matcher.matches()) { + throw new IllegalArgumentException("Invalid time format for targetSyncStartTimeStamp"); + } + + List<String> timestampVal = Arrays.asList(targetSyncStartTimeStamp.split(" ")); + + if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) { + + // Need both time and timezone offset + timeZoneOfSyncStartTimeStamp = timestampVal + .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT"); + + String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP); + DateFormat format = new SimpleDateFormat("HH:mm:ss"); + + Date date = null; + + try { + date = format.parse(time); + } catch (ParseException parseException) { + throw new IllegalArgumentException(parseException); + } + + Calendar calendar = Calendar.getInstance(); + calendar.setTime(date); + + syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY); + syncTaskStartTimeMin = calendar.get(Calendar.MINUTE); + syncTaskStartTimeSec = calendar.get(Calendar.SECOND); + } else { + throw new IllegalArgumentException("Invalid timestamp format from targetSyncStartTimeStamp"); + } + + } + + + public int getNumInternalSyncWorkers() { + return numInternalSyncWorkers; + } + + public void setNumInternalSyncWorkers(int numInternalSyncWorkers) { + this.numInternalSyncWorkers = numInternalSyncWorkers; + } + + public int getNumSyncElasticWorkers() { + return numSyncElasticWorkers; + } + + public void setNumSyncElasticWorkers(int numSyncElasticWorkers) { + this.numSyncElasticWorkers = numSyncElasticWorkers; + } + + public int getNumSyncActiveInventoryWorkers() { + return numSyncActiveInventoryWorkers; + } + + public void setNumSyncActiveInventoryWorkers(int numSyncActiveInventoryWorkers) { + this.numSyncActiveInventoryWorkers = numSyncActiveInventoryWorkers; + } + + public String getTargetSyncStartTimeStamp() { + return targetSyncStartTimeStamp; + } + + public void setTargetSyncStartTimeStamp(String targetSyncStartTimeStamp) { + this.targetSyncStartTimeStamp = targetSyncStartTimeStamp; + initializeSyncTimeParameters(); + } + + public String getControllerName() { + return controllerName; + } + + public void setControllerName(String controllerName) { + this.controllerName = controllerName; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public int getSyncTaskDelayInMs() { + return syncTaskDelayInMs; + } + + public void setSyncTaskDelayInMs(int syncTaskDelayInMs) { + this.syncTaskDelayInMs = syncTaskDelayInMs; + } + + public int getSyncTaskFrequencyInDays() { + return syncTaskFrequencyInDays; + } + + public void setSyncTaskFrequencyInDays(int syncTaskFrequencyInDays) { + this.syncTaskFrequencyInDays = syncTaskFrequencyInDays; + } + + public int getNumSyncControllerWorkers() { + return numSyncControllerWorkers; + } + + public void setNumSyncControllerWorkers(int numSyncControllerWorkers) { + this.numSyncControllerWorkers = numSyncControllerWorkers; + } + + public boolean isRunOnceSyncEnabled() { + return runOnceSyncEnabled; + } + + public void setRunOnceSyncEnabled(boolean runOnceSyncEnabled) { + this.runOnceSyncEnabled = runOnceSyncEnabled; + } + + public boolean isPeriodicSyncEnabled() { + return periodicSyncEnabled; + } + + public void setPeriodicSyncEnabled(boolean periodicSyncEnabled) { + this.periodicSyncEnabled = periodicSyncEnabled; + } + + public long getSyncFrequencyInMs() { + + return (syncTaskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); + + } + + public Calendar getTargetSyncTime() { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar targetSyncTime = Calendar.getInstance(tz); + + targetSyncTime.set(Calendar.HOUR_OF_DAY, syncTaskStartTimeHr); + targetSyncTime.set(Calendar.MINUTE, syncTaskStartTimeMin); + targetSyncTime.set(Calendar.SECOND, syncTaskStartTimeSec); + + return targetSyncTime; + + } + + + public String getNextSyncTime() { + + int taskFrequencyInSeconds = 0; + if (getSyncFrequencyInMs() > 0) { + taskFrequencyInSeconds = (int) (getSyncFrequencyInMs() / 1000); + } + + if (taskFrequencyInSeconds < 86400) { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar targetSyncTime = Calendar.getInstance(tz); + targetSyncTime.add(Calendar.SECOND, taskFrequencyInSeconds); + + return SynchronizerConstants.SIMPLE_DATE_FORMAT.format(targetSyncTime.getTimeInMillis()) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); + + } else { + + return SynchronizerConstants.SIMPLE_DATE_FORMAT + .format(getNextSyncTime(getTargetSyncTime(), taskFrequencyInSeconds)) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); + + } + + } + + public long getNextSyncTime(Calendar syncTime, int taskFrequencyInSeconds) { + + TimeZone tz = TimeZone.getTimeZone(timeZoneOfSyncStartTimeStamp); + Calendar timeNow = Calendar.getInstance(tz); + + return getNextSyncTime(syncTime, timeNow.getTimeInMillis(), taskFrequencyInSeconds); + } + + /** + * Gets the first sync time. + * + * @param calendar the calendar + * @param timeNow the time now in ms + * @param taskFrequencyInMs task period in ms + * @return the first sync time + */ + + public long getNextSyncTime(Calendar syncTime, long timeNowInMs, int taskFrequencyInSeconds) { + if (taskFrequencyInSeconds == 0) { + return 0; + } else if (timeNowInMs > syncTime.getTimeInMillis()) { + + /* + * If current time is after the scheduled sync start time, then we'll skip ahead to the next + * sync time period + */ + + syncTime.add(Calendar.SECOND, taskFrequencyInSeconds); + } + + return syncTime.getTimeInMillis(); + } + + public String getTimeZoneOfSyncStartTimeStamp() { + return timeZoneOfSyncStartTimeStamp; + } + + public void setTimeZoneOfSyncStartTimeStamp(String timeZoneOfSyncStartTimeStamp) { + this.timeZoneOfSyncStartTimeStamp = timeZoneOfSyncStartTimeStamp; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java new file mode 100644 index 0000000..30d4e71 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationEntity.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.HashMap; +import java.util.Map; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Class AggregationEntity. + */ +public class AggregationEntity extends IndexableEntity implements IndexDocument { + private Map<String, String> attributes = new HashMap<String, String>(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new aggregation entity. + */ + public AggregationEntity() { + super(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + this.id = + NodeUtils.generateUniqueShaDigest(link); + } + + public void copyAttributeKeyValuePair(Map<String, Object> map){ + for(String key: map.keySet()){ + if (!key.equalsIgnoreCase("relationship-list")){ // ignore relationship data which is not required in aggregation + this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can contain an object as value + } + } + } + + public void addAttributeKeyValuePair(String key, String value){ + this.attributes.put(key, value); + } + + @Override + public String getAsJson() { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("link", this.getLink()); + rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); + for (String key: this.attributes.keySet()){ + rootNode.put(key, this.attributes.get(key)); + } + return rootNode.toString(); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java new file mode 100644 index 0000000..06f60b3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/AggregationSuggestionEntity.java @@ -0,0 +1,111 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig; +import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig; +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument { + + private static final String FILTER_ID = "filterId"; + private static final String FILTER_LIST = "filterList"; + + private List<String> inputs = new ArrayList<>(); + private final String outputString = "VNFs"; + protected ObjectMapper mapper = new ObjectMapper(); + List<String> filterIds = new ArrayList<>(); + + @JsonIgnore + private FiltersConfig filtersConfig; + + public AggregationSuggestionEntity(FiltersConfig filtersConfig) { + super(); + this.filtersConfig = filtersConfig; + inputs.add("VNFs"); + inputs.add("generic-vnfs"); + } + + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(this.outputString); + } + + @Override + public String getAsJson() { + JSONArray inputArray = new JSONArray(); + for (String input: inputs) { + input = input.replace(",","" ); + input = input.replace("[","" ); + input = input.replace("]","" ); + inputArray.put(input); + } + + JSONObject entitySuggest = new JSONObject(); + entitySuggest.put("input", inputArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("weight", 100); + + JSONArray payloadFilters = new JSONArray(); + + for (String filterId : filterIds) { + JSONObject filterPayload = new JSONObject(); + filterPayload.put(FILTER_ID, filterId); + payloadFilters.put(filterPayload); + } + + JSONObject payloadNode = new JSONObject(); + payloadNode.put(FILTER_LIST, payloadFilters); + entitySuggest.put("payload", payloadNode); + + JSONObject rootNode = new JSONObject(); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + public void initializeFilters() { + for (UiViewListItemConfig view : filtersConfig.getViewsConfig().getViews()) { + if (view.getViewName().equals("vnfSearch")) { + for (UiFilterListItemConfig currentViewFilter : view.getFilters()) { + filterIds.add(currentViewFilter.getFilterId()); + } + } + } + } + + public void setFilterIds(List<String> filterIds) { + this.filterIds = filterIds; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java new file mode 100644 index 0000000..6ee3351 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexDocument.java @@ -0,0 +1,41 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Interface IndexDocument. + */ +public interface IndexDocument { + + /** + * Derive fields. + */ + public void deriveFields(); + + public String getId(); + + public String getAsJson() throws Exception; + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java new file mode 100644 index 0000000..0c94227 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableCrossEntityReference.java @@ -0,0 +1,97 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonProcessingException; + + +/** + * The Class IndexableCrossEntityReference. + */ + +public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument { + + protected String crossReferenceEntityValues; + protected ArrayList<String> crossEntityReferenceCollection = new ArrayList<String>(); + + /** + * Instantiates a new indexable cross entity reference. + */ + public IndexableCrossEntityReference() { + super(); + } + + /** + * Adds the cross entity reference value. + * + * @param crossEntityReferenceValue the cross entity reference value + */ + public void addCrossEntityReferenceValue(String crossEntityReferenceValue) { + if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) { + crossEntityReferenceCollection.add(crossEntityReferenceValue); + } + } + + public String getCrossReferenceEntityValues() { + return crossReferenceEntityValues; + } + + public void setCrossReferenceEntityValues(String crossReferenceEntityValues) { + this.crossReferenceEntityValues = crossReferenceEntityValues; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(link); + this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";"); + } + + @JsonIgnore // Since this is a "get" we need to JSON ignore otherwise it will be called when converting to JSON + @Override + public String getAsJson() throws JsonProcessingException { + + return NodeUtils.convertObjectToJson(this, false); + + } + + @Override + public String toString() { + return "IndexableCrossEntityReference [" + + (crossReferenceEntityValues != null + ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "") + + (crossEntityReferenceCollection != null + ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : ""); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java new file mode 100644 index 0000000..9a7f865 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/IndexableEntity.java @@ -0,0 +1,100 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.sql.Timestamp; +import java.text.SimpleDateFormat; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class IndexableEntity. + */ +public abstract class IndexableEntity { + protected String id; // generated, SHA-256 digest + protected String entityType; + protected String entityPrimaryKeyValue; + protected String lastmodTimestamp; + protected String link; + + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + /** + * Instantiates a new indexable entity. + */ + public IndexableEntity() { + SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + this.setEntityTimeStamp(currentFormattedTimeStamp); + } + + @JsonIgnore + public String getId() { + return id; + } + + @JsonProperty("entityType") + public String getEntityType() { + return entityType; + } + + @JsonProperty("entityPrimaryKeyValue") + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + @JsonProperty("lastmodTimestamp") + public String getEntityTimeStamp() { + return lastmodTimestamp; + } + + public void setId(String id) { + this.id = id; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public void setEntityPrimaryKeyValue(String fieldValue) { + this.entityPrimaryKeyValue = fieldValue; + } + + public void setEntityTimeStamp(String lastmodTimestamp) { + this.lastmodTimestamp = lastmodTimestamp; + } + + @JsonProperty("link") + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java new file mode 100644 index 0000000..82c874a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/MergableEntity.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; + +/** + * The Class MergableEntity. + */ +public class MergableEntity { + private Map<String, String> other = new HashMap<String, String>(); + + /** + * Any. + * + * @return the map + */ + @JsonAnyGetter + public Map<String, String> any() { + return other; + } + + /** + * Sets the. + * + * @param name the name + * @param value the value + */ + @JsonAnySetter + public void set(String name, String value) { + other.put(name, value); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java new file mode 100644 index 0000000..8462bc9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/ObjectIdCollection.java @@ -0,0 +1,78 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +/** + * The Class ObjectIdCollection. + */ +public class ObjectIdCollection { + + protected ConcurrentHashMap<String, String> importedObjectIds = + new ConcurrentHashMap<String, String>(); + + public Collection<String> getImportedObjectIds() { + return importedObjectIds.values(); + } + + /** + * Adds the object id. + * + * @param id the id + */ + public void addObjectId(String id) { + importedObjectIds.putIfAbsent(id, id); + } + + public int getSize() { + return importedObjectIds.values().size(); + } + + /** + * Adds the all. + * + * @param items the items + */ + public void addAll(List<String> items) { + if (items == null) { + return; + } + + items.stream().forEach((item) -> { + importedObjectIds.putIfAbsent(item, item); + }); + + } + + /** + * Clear. + */ + public void clear() { + importedObjectIds.clear(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java new file mode 100644 index 0000000..84fc093 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SearchableEntity.java @@ -0,0 +1,142 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.util.NodeUtils; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class SearchableEntity. + */ +public class SearchableEntity extends IndexableEntity implements IndexDocument { + + @JsonIgnore + protected List<String> searchTagCollection = new ArrayList<String>(); + + @JsonIgnore + protected List<String> searchTagIdCollection = new ArrayList<String>(); + + @JsonIgnore + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new searchable entity. + */ + public SearchableEntity() { + super(); + } + + /* + * Generated fields, leave the settings for junit overrides + */ + + protected String searchTags; // generated based on searchTagCollection values + + protected String searchTagIDs; + + /** + * Generates the sha based id. + */ + public void generateId() { + this.id = NodeUtils.generateUniqueShaDigest(link); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + generateId(); + this.searchTags = NodeUtils.concatArray(searchTagCollection, ";"); + this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";"); + } + + /** + * Adds the search tag with key. + * + * @param searchTag the search tag + * @param searchTagKey the key associated with the search tag (key:value) + */ + public void addSearchTagWithKey(String searchTag, String searchTagKey) { + searchTagIdCollection.add(searchTagKey); + searchTagCollection.add(searchTag); + } + + public List<String> getSearchTagCollection() { + return searchTagCollection; + } + + @JsonProperty("searchTags") + public String getSearchTags() { + return searchTags; + } + + @JsonProperty("searchTagIDs") + public String getSearchTagIDs() { + return searchTagIDs; + } + + @JsonIgnore + public List<String> getSearchTagIdCollection() { + return searchTagIdCollection; + } + + @Override + @JsonIgnore + public String getAsJson() throws JsonProcessingException { + return NodeUtils.convertObjectToJson(this, false); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "") + + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + + (searchTags != null ? "searchTags=" + searchTags + ", " : "") + + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java new file mode 100644 index 0000000..2b3f858 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SelfLinkDescriptor.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Class SelfLinkDescriptor. + */ +public class SelfLinkDescriptor { + private String selfLink; + private String entityType; + private String depthModifier; + + public String getDepthModifier() { + return depthModifier; + } + + public void setDepthModifier(String depthModifier) { + this.depthModifier = depthModifier; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public SelfLinkDescriptor(String selfLink) { + this(selfLink, null, null); + } + + /** + * Instantiates a new self link descriptor. + * + * @param selfLink the self link + * @param entityType the entity type + */ + public SelfLinkDescriptor(String selfLink, String entityType) { + this(selfLink, null, entityType); + } + + public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) { + this.selfLink = selfLink; + this.entityType = entityType; + this.depthModifier = depthModifier; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "") + + (entityType != null ? "entityType=" + entityType + ", " : "") + + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]"; + } + +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java new file mode 100644 index 0000000..3648b53 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/SuggestionSearchEntity.java @@ -0,0 +1,327 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.SuggestionsPermutation; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument { + private static final String FILTER_ID = "filterId"; + private static final String FILTER_VALUE = "filterValue"; + private static final String FILTER_LIST = "filterList"; + + private String entityType; + private List<String> suggestionConnectorWords = new ArrayList<String>(); + private List<String> suggestionAttributeTypes = new ArrayList<String>(); + private List<String> suggestionAttributeValues = new ArrayList<String>(); + private List<String> suggestionTypeAliases = new ArrayList<String>(); + private List<String> suggestionInputPermutations = new ArrayList<String>(); + private List<String> suggestableAttr = new ArrayList<String>(); + + private Map<String, String> inputOutputData = new HashMap<String, String>(); + Map<String, UiFilterConfig> filters = new HashMap<String, UiFilterConfig>(); + private JSONObject payload = new JSONObject(); + private JSONArray payloadFilters = new JSONArray(); + private StringBuffer outputString = new StringBuffer(); + private String aliasToUse; + + private SuggestionEntityLookup entityLookup; + + public JSONObject getPayload() { + return payload; + } + + public void setPayload(JSONObject payload) { + this.payload = payload; + } + + protected ObjectMapper mapper = new ObjectMapper(); + + public SuggestionSearchEntity(FiltersConfig filtersConfig) { + super(); + + FiltersDetailsConfig filterConfigList = filtersConfig.getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for(UiFilterConfig filter : filterConfigList.getFilters()) { + if(filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public SuggestionSearchEntity(FiltersConfig filtersConfig, SuggestionEntityLookup entityLookup) { + + this.entityLookup = entityLookup; + + FiltersDetailsConfig filterConfigList = filtersConfig.getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for(UiFilterConfig filter : filterConfigList.getFilters()) { + if(filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public SuggestionSearchEntity(SuggestionEntityLookup entityLookup, FiltersConfig config) { + + FiltersDetailsConfig filterConfigList = config.getFiltersConfig(); + // Populate the map with keys that will match the suggestableAttr values + for(UiFilterConfig filter : filterConfigList.getFilters()) { + if(filter.getDataSource() != null) { + filters.put(filter.getDataSource().getFieldName(), filter); + } + } + } + + public void setSuggestableAttr(ArrayList<String> attributes) { + for (String attribute : attributes) { + this.suggestableAttr.add(attribute); + } + } + + public void setPayloadFromResponse(JsonNode node) { + if (suggestableAttr != null) { + JSONObject nodePayload = new JSONObject(); + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null) { + inputOutputData.put(attribute, node.get(attribute).asText()); + this.payload.put(attribute, node.get(attribute).asText()); + } + } + } + } + + public void setFilterBasedPayloadFromResponse(JsonNode node, String entityName, ArrayList<String> uniqueList) { + + HashMap<String, String> desc = entityLookup.getSuggestionSearchEntityOxmModel().get(entityName); + + if ( desc == null ) { + return; + } + + String attr = desc.get("suggestibleAttributes"); + + if ( attr == null ) { + return; + } + + List<String> suggestableAttrOxm = Arrays.asList(attr.split(",")); + + /* + * Note: + * (1) 'uniqueList' is one item within the power set of the suggestable attributes. + * (2) 'inputeOutputData' is used to generate permutations of strings + */ + for (String selectiveAttr: uniqueList) { + if (node.get(selectiveAttr) != null) { + inputOutputData.put(selectiveAttr, node.get(selectiveAttr).asText()); + } + } + + if (suggestableAttrOxm != null) { + for (String attribute : suggestableAttrOxm) { + if (node.get(attribute) != null && uniqueList.contains(attribute)) { + UiFilterConfig filterConfig = filters.get(attribute); + if(filterConfig != null) { + JSONObject filterPayload = new JSONObject(); + filterPayload.put(FILTER_ID, filterConfig.getFilterId()); + filterPayload.put(FILTER_VALUE, node.get(attribute).asText()); + this.payloadFilters.put(filterPayload); + } else { + this.payload.put(attribute, node.get(attribute).asText()); + } + } else { + UiFilterConfig emptyValueFilterConfig = filters.get(attribute); + if(emptyValueFilterConfig != null) { + JSONObject emptyValueFilterPayload = new JSONObject(); + emptyValueFilterPayload.put(FILTER_ID, emptyValueFilterConfig.getFilterId()); + this.payloadFilters.put(emptyValueFilterPayload); + } + } + } + this.payload.put(FILTER_LIST, this.payloadFilters); + } + } + + @Override + public String getEntityType() { + return entityType; + } + + @Override + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public List<String> getSuggestionConnectorWords() { + return suggestionConnectorWords; + } + + public void setSuggestionConnectorWords(List<String> suggestionConnectorWords) { + this.suggestionConnectorWords = suggestionConnectorWords; + } + + public List<String> getSuggestionPropertyTypes() { + return this.suggestionAttributeTypes; + } + + public void setSuggestionPropertyTypes(List<String> suggestionPropertyTypes) { + this.suggestionAttributeTypes = suggestionPropertyTypes; + } + + public List<String> getSuggestionAttributeValues() { + return this.suggestionAttributeValues; + } + + public void setSuggestionAttributeValues(List<String> suggestionAttributeValues) { + this.suggestionAttributeValues = suggestionAttributeValues; + } + + public List<String> getSuggestionAliases() { + return this.suggestionTypeAliases; + } + + public void setSuggestionAliases(List<String> suggestionAliases) { + this.suggestionTypeAliases = suggestionAliases; + } + + public List<String> getSuggestionInputPermutations() { + return this.suggestionInputPermutations; + } + + public void setSuggestionInputPermutations(List<String> permutations) { + this.suggestionInputPermutations = permutations; + } + + public void generateSuggestionInputPermutations() { + + List<String> entityNames = new ArrayList<>(); + entityNames.add(entityType); + HashMap<String, String> desc = entityLookup.getSuggestionSearchEntityOxmModel().get(this.entityType); + String attr = desc.get("suggestionAliases"); + String[] suggestionAliasesArray = attr.split(","); + suggestionTypeAliases = Arrays.asList(suggestionAliasesArray); + this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]); + for (String alias : suggestionTypeAliases) { + entityNames.add(alias); + } + + ArrayList<String> listToPermutate = new ArrayList<>(inputOutputData.values()); + + for (String entity : entityNames){ + listToPermutate.add(entity); // add entity-name or alias in list to permutate + List<List<String>> lists = SuggestionsPermutation.getListPermutations(listToPermutate); + for (List<String> li : lists){ + suggestionInputPermutations.add(String.join(" ", li)); + } + // prepare for the next pass: remove the entity-name or alias from the list + listToPermutate.remove(entity); + } + } + + public boolean isSuggestableDoc() { + return this.getPayload().length() != 0; + } + + + @Override + public void deriveFields() { + + int entryCounter = 1; + for (Map.Entry<String, String> outputValue : inputOutputData.entrySet()) { + if (outputValue.getValue() != null && outputValue.getValue().length() > 0) { + this.outputString.append(outputValue.getValue()); + if (entryCounter < inputOutputData.entrySet().size()) { + this.outputString.append(" and "); + } else{ + this.outputString.append(" "); + } + } + entryCounter++; + } + + this.outputString.append(this.getAliasToUse()); + this.id = NodeUtils.generateUniqueShaDigest(outputString.toString()); + } + + @Override + public String getAsJson() { + // TODO Auto-generated method stub + JSONObject rootNode = new JSONObject(); + + JSONArray suggestionsArray = new JSONArray(); + for (String suggestion : suggestionInputPermutations) { + suggestionsArray.put(suggestion); + } + + JSONObject entitySuggest = new JSONObject(); + + entitySuggest.put("input", suggestionsArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("payload", this.payload); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + public String getAliasToUse() { + return aliasToUse; + } + + public void setAliasToUse(String aliasToUse) { + this.aliasToUse = aliasToUse; + } + + public Map<String, String> getInputOutputData() { + return inputOutputData; + } + + public void setInputOutputData(Map<String, String> inputOutputData) { + this.inputOutputData = inputOutputData; + } + + @Override + public String toString() { + return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords=" + + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes + + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases=" + + suggestionTypeAliases + ", mapper=" + mapper + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java new file mode 100644 index 0000000..52ea891 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/entity/TransactionStorageType.java @@ -0,0 +1,56 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.entity; + +/** + * The Enum TransactionStorageType. + */ +public enum TransactionStorageType { + EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1, + "aaiOffline/active-inventory-query"); + + private Integer index; + private String outputFolder; + + /** + * Instantiates a new transaction storage type. + * + * @param index the index + * @param outputFolder the output folder + */ + TransactionStorageType(Integer index, String outputFolder) { + this.index = index; + this.outputFolder = outputFolder; + } + + public Integer getIndex() { + return index; + } + + public String getOutputFolder() { + return outputFolder; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java new file mode 100644 index 0000000..b8c12c9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/OperationState.java @@ -0,0 +1,32 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.enumeration; + +/** + * The Enum OperationState. + */ +public enum OperationState { + INIT, OK, ERROR, ABORT, PENDING, IGNORED_SYNC_NOT_IDLE +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java new file mode 100644 index 0000000..01b28e0 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/enumeration/SynchronizerState.java @@ -0,0 +1,32 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.enumeration; + +/** + * The Enum SynchronizerState. + */ +public enum SynchronizerState { + IDLE, PERFORMING_SYNCHRONIZATION, ABORTED +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java new file mode 100644 index 0000000..a986dfc --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformActiveInventoryRetrieval.java @@ -0,0 +1,97 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.slf4j.MDC; + +/* + * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call + * flow pattern + */ + +/** + * The Class PerformActiveInventoryRetrieval. + */ +public class PerformActiveInventoryRetrieval implements Supplier<NetworkTransaction> { + + private static Logger logger = LoggerFactory.getInstance().getLogger(PerformActiveInventoryRetrieval.class); + + private NetworkTransaction txn; + private ActiveInventoryAdapter aaiAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform active inventory retrieval. + * + * @param txn the txn + * @param aaiProvider the aai provider + */ + public PerformActiveInventoryRetrieval(NetworkTransaction txn, + ActiveInventoryAdapter aaiAdapter) { + this.txn = txn; + this.aaiAdapter = aaiAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult result = null; + try { + + final String absoluteSelfLink = aaiAdapter.repairSelfLink(txn.getLink(), txn.getQueryParameters()); + result = aaiAdapter.queryActiveInventoryWithRetries(absoluteSelfLink, "application/json", 5); + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_GENERIC,"Failure to resolve self link from AAI. Error = " + exc.getMessage()); + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + txn.setOperationResult(result); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + } + + return txn; + } + + protected void setContextMap(Map<String, String> contextMap) { + this.contextMap = contextMap; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java new file mode 100644 index 0000000..cbc9ccb --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchPut.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchPut. + */ +public class PerformElasticSearchPut implements Supplier<NetworkTransaction> { + + private ElasticSearchAdapter esAdapter; + private String jsonPayload; + private NetworkTransaction txn; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search put. + * + * @param jsonPayload the json payload + * @param txn the txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + ElasticSearchAdapter esAdapter) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + ElasticSearchAdapter esAdapter, Map<String, String> contextMap) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = contextMap; + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + MDC.setContextMap(contextMap); + + long startTimeInMs = System.currentTimeMillis(); + + OperationResult or = + esAdapter.doPut(txn.getLink(), jsonPayload, MediaType.APPLICATION_JSON_TYPE); + + txn.setOperationResult(or); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + + return txn; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java new file mode 100644 index 0000000..048d19e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchRetrieval.java @@ -0,0 +1,72 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchRetrieval. + */ +public class PerformElasticSearchRetrieval implements Supplier<NetworkTransaction> { + + private NetworkTransaction txn; + private ElasticSearchAdapter esAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search retrieval. + * + * @param elasticSearchTxn the elastic search txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn, + ElasticSearchAdapter esAdapter) { + this.txn = elasticSearchTxn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + MDC.setContextMap(contextMap); + long startTimeInMs = System.currentTimeMillis(); + OperationResult or = esAdapter.doGet(txn.getLink(), MediaType.APPLICATION_JSON_TYPE); + txn.setOperationResult(or); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + return txn; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java new file mode 100644 index 0000000..2e329e5 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformElasticSearchUpdate.java @@ -0,0 +1,82 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchUpdate. + */ +public class PerformElasticSearchUpdate implements Supplier<NetworkTransaction> { + + private ElasticSearchAdapter esAdapter; + private NetworkTransaction operationTracker; + private String updatePayload; + private String updateUrl; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search update. + * + * @param updateUrl the update url + * @param updatePayload the update payload + * @param esDataProvider the es data provider + * @param transactionTracker the transaction tracker + */ + public PerformElasticSearchUpdate(String updateUrl, String updatePayload, + ElasticSearchAdapter esAdapter, NetworkTransaction transactionTracker) { + this.updateUrl = updateUrl; + this.updatePayload = updatePayload; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + this.operationTracker = new NetworkTransaction(); + operationTracker.setEntityType(transactionTracker.getEntityType()); + operationTracker.setDescriptor(transactionTracker.getDescriptor()); + operationTracker.setOperationType(transactionTracker.getOperationType()); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + operationTracker.setTaskAgeInMs(); + MDC.setContextMap(contextMap); + long startTimeInMs = System.currentTimeMillis(); + OperationResult or = esAdapter.doBulkOperation(updateUrl, updatePayload); + operationTracker.setOperationResult(or); + operationTracker.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + return operationTracker; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java new file mode 100644 index 0000000..972b049 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/PerformGizmoRetrieval.java @@ -0,0 +1,95 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.slf4j.MDC; + +/* + * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call + * flow pattern + */ + +/** + * The Class PerformActiveInventoryRetrieval. + */ +public class PerformGizmoRetrieval implements Supplier<NetworkTransaction> { + + private static Logger logger = LoggerFactory.getInstance().getLogger(PerformGizmoRetrieval.class); + + private NetworkTransaction txn; + private GizmoAdapter gizmoAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform active inventory retrieval. + * + * @param txn the txn + * @param aaiProvider the aai provider + */ + public PerformGizmoRetrieval(NetworkTransaction txn, + GizmoAdapter gizmoAdapter) { + this.txn = txn; + this.gizmoAdapter = gizmoAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult result = null; + try { + result = gizmoAdapter.queryGizmoWithRetries(txn.getLink(), "application/json", 5); + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_GENERIC,"Failure to resolve self link from AAI. Error = " + exc.getMessage()); + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + txn.setOperationResult(result); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + } + + return txn; + } + + protected void setContextMap(Map<String, String> contextMap) { + this.contextMap = contextMap; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java new file mode 100644 index 0000000..f92ccd3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/StoreDocumentTask.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import java.util.Map; +import java.util.function.Supplier; + +import javax.ws.rs.core.MediaType; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.sync.entity.IndexDocument; +import org.slf4j.MDC; + +/** + * The Class StoreDocumentTask. + */ +public class StoreDocumentTask implements Supplier<NetworkTransaction> { + + private IndexDocument doc; + + private NetworkTransaction txn; + + private ElasticSearchAdapter esAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new store document task. + * + * @param doc the doc + * @param txn the txn + * @param esDataProvider the es data provider + */ + public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn, + ElasticSearchAdapter esAdapter) { + this.doc = doc; + this.txn = txn; + this.esAdapter = esAdapter; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult operationResult = null; + + try { + + operationResult = + esAdapter.doPut(txn.getLink(), doc.getAsJson(), MediaType.APPLICATION_JSON_TYPE); + txn.setOpTimeInMs(System.currentTimeMillis() - startTimeInMs); + } catch (Exception exception) { + operationResult.setResult(500, exception.getMessage()); + } + + txn.setOperationResult(operationResult); + + return txn; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java new file mode 100644 index 0000000..92f3683 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/sync/task/SyncControllerTask.java @@ -0,0 +1,55 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.sync.task; + +import org.onap.aai.sparky.sync.SyncController; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public class SyncControllerTask implements Runnable { + + private SyncController controller; + + public SyncControllerTask(SyncController controller) { + this.controller = controller; + } + + @Override + public void run() { + + controller.performAction(SyncActions.SYNCHRONIZE); + + while (controller.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + // exit out of the sync-wait-loop + break; + } + } + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java new file mode 100644 index 0000000..93b6eb4 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSyncController.java @@ -0,0 +1,101 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.topology.sync; + +import org.onap.aai.sparky.config.oxm.GeoEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class GeoSyncController extends SyncControllerImpl implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + + public GeoSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig, + GeoEntityLookup geoEntityLookup, OxmEntityLookup oxmEntityLookup, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "Inventory Geo Synchronizer"; + + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + GeoSynchronizer synchronizer = + new GeoSynchronizer(schemaConfig, syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, + geoEntityLookup, oxmEntityLookup); + + synchronizer.setAaiAdapter(aaiAdapter); + synchronizer.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(synchronizer); + + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java new file mode 100644 index 0000000..fcc8a7a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/topology/sync/GeoSynchronizer.java @@ -0,0 +1,487 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.topology.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.GeoEntityLookup; +import org.onap.aai.sparky.config.oxm.GeoOxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.inventory.entity.GeoIndexDocument; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.StoreDocumentTask; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + + +/** + * The Class GeoSynchronizer. + */ +public class GeoSynchronizer extends AbstractEntitySynchronizer implements IndexSynchronizer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(GeoSynchronizer.class); + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private GeoEntityLookup geoEntityLookup; + private OxmEntityLookup oxmEntityLookup; + + private Map<String, GeoOxmEntityDescriptor> geoDescriptorMap = null; + + /** + * Instantiates a new geo synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public GeoSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers, + int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, GeoEntityLookup geoEntityLookup, + OxmEntityLookup oxmEntityLookup) throws Exception { + + super(LOG, "GEO", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(),aaiStatConfig, esStatConfig); + this.geoEntityLookup = geoEntityLookup; + this.oxmEntityLookup = oxmEntityLookup; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.synchronizerName = "Geo Synchronizer"; + this.geoDescriptorMap = geoEntityLookup.getGeoEntityDescriptors(); + this.aaiEntityStats.intializeEntityCounters(geoDescriptorMap.keySet()); + this.esEntityStats.intializeEntityCounters(geoDescriptorMap.keySet()); + this.syncDurationInMs = -1; + } + + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + resetCounters(); + setShouldSkipSync(false); + allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "GeoSynchronizer", "", "Sync", ""); + + collectAllTheWork(); + return OperationState.OK; + } + + + /** + * Collect all the work. + * + * @return the operation state + */ + public OperationState collectAllTheWork() { + final Map<String,String> contextMap = MDC.getCopyOfContextMap(); + + if (geoDescriptorMap.isEmpty()) { + setShouldSkipSync(true); + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "geo entities"); + return OperationState.ERROR; + } + + Collection<String> syncTypes = geoDescriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + return OperationState.OK; + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET_NO_RESPONSE, linkDescriptor.getSelfLink()); + } else { + processEntityTypeSelfLinkResult(result); + } + } + }); + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, exc); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + if (resourceType != null && resourceLink != null) { + + if (geoDescriptorMap.containsKey(resourceType)) { + selflinks.add(new SelfLinkDescriptor(resourceLink + "?nodes-only", resourceType)); + } else { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + } + } + } + } + + } + + /** + * Process entity type self link result. + * + * @param txn the txn + */ + private void processEntityTypeSelfLinkResult(NetworkTransaction txn) { + + updateActiveInventoryCounters(txn); + + if (!txn.getOperationResult().wasSuccessful()) { + return; + } + + GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(txn.getEntityType()); + + if ( descriptor == null ) { + return; + } + + try { + if (descriptor.hasGeoEntity()) { + + GeoIndexDocument geoDoc = new GeoIndexDocument(); + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + populateGeoDocument(geoDoc, jsonResult, txn.getDescriptor(), txn.getLink()); + + if (!geoDoc.isValidGeoDocument()) { + + LOG.info(AaiUiMsgs.GEO_SYNC_IGNORING_ENTITY, geoDoc.getEntityType(), geoDoc.toString()); + + } else { + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), geoDoc.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc); + } + + if (link != null) { + + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new StoreDocumentTask(geoDoc, n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, error.getMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } + + return; + } + + + /** + * Process store document result. + * + * @param txn the txn + */ + private void processStoreDocumentResult(NetworkTransaction txn) { + + OperationResult or = txn.getOperationResult(); + + if (!or.wasSuccessful()) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, or.toString()); + /* + * if(or.getResultCode() != 404 || (or.getResultCode() == 404 && + * !synchronizerConfig.isResourceNotFoundErrorsSupressed())) { logger.error( + * "Skipping failed resource = " + "link" + " RC=[" + or.getResultCode() + "]. Message: " + + * or.getResult()); } + */ + + } + + } + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + /** + * Populate geo document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @param entityLink the entity link + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateGeoDocument(GeoIndexDocument doc, String result, + OxmEntityDescriptor resultDescriptor, String entityLink) + throws JsonProcessingException, IOException { + + doc.setSelfLink(entityLink); + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + GeoOxmEntityDescriptor descriptor = geoDescriptorMap.get(resultDescriptor.getEntityName()); + + String geoLatKey = descriptor.getGeoLatName(); + String geoLongKey = descriptor.getGeoLongName(); + + doc.setLatitude(NodeUtils.getNodeFieldAsText(entityNode, geoLatKey)); + doc.setLongitude(NodeUtils.getNodeFieldAsText(entityNode, geoLongKey)); + doc.deriveFields(); + + } + + @Override + protected boolean isSyncDone() { + if (shouldSkipSync()) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return true; + } + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java new file mode 100644 index 0000000..cb6cc53 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ConfigHelper.java @@ -0,0 +1,193 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; +import java.util.Set; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +/** + * The Class ConfigHelper. + */ +public class ConfigHelper { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(ConfigHelper.class); + + /** + * Gets the config with prefix. + * + * @param configPrefix the config prefix + * @param properties the properties + * @return the config with prefix + */ + public static Properties getConfigWithPrefix(String configPrefix, Properties properties) { + + /* + * The idea here is collect properties groups prefixed with the same origin + */ + + Set<Object> set = properties.keySet(); + Properties newProps = new Properties(); + + for (Object k : set) { + String ks = (String) k; + if (ks.startsWith(configPrefix)) { + + String temp = ks.replaceFirst(configPrefix + ".", ""); + newProps.setProperty(temp, properties.getProperty(ks)); + } + } + + return newProps; + } + + /** + * Load config. + * + * @param fileName the file name + * @return the properties + * @throws Exception the exception + */ + public static Properties loadConfig(String fileName) throws Exception { + + String basePath = System.getProperty("user.dir"); + InputStream fileInputStream = new FileInputStream(basePath + "//" + fileName); + + Properties props = new Properties(); + props.load(fileInputStream); + + return props; + } + + /** + * Load config from explicit path. + * + * @param fileName the file name + * @return the properties + */ + public static Properties loadConfigFromExplicitPath(String fileName) { + + Properties props = new Properties(); + + try { + InputStream fileInputStream = new FileInputStream(fileName); + props.load(fileInputStream); + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.CONFIG_NOT_FOUND_VERBOSE, fileName, exc.getLocalizedMessage()); + } + + return props; + } + + /** + * Property fetch. + * + * @param config the config + * @param propName the prop name + * @param defaultValue the default value + * @return the string + */ + public static String propertyFetch(Properties config, String propName, String defaultValue) { + return config.getProperty(propName, defaultValue); + } + + public static boolean isEssDevModeEnabled() { + return Boolean.parseBoolean(System.getProperty("isEssDevMode", "false")); + } + + /** + * Gets the filepath. + * + * @param fileName the file name + * @param isRelativePath the is relative path + * @return the filepath + */ + public static String getFilepath(String fileName, boolean isRelativePath) { + + String filepath = null; + + if (isRelativePath) { + filepath = System.getProperty("user.dir") + "/" + fileName; + + } else { + filepath = fileName; + } + + return filepath; + + } + + /** + * Gets the file contents. + * + * @param fileName the file name + * @return the file contents + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getFileContents(String fileName) throws IOException { + + LOG.debug(AaiUiMsgs.FILE_READ_IN_PROGRESS, fileName); + + File file = new File(fileName); + + if (!file.exists()) { + throw new FileNotFoundException("Failed to load file = " + fileName); + } + + if (file.exists() && !file.isDirectory()) { + BufferedReader br = new BufferedReader(new FileReader(file)); + try { + StringBuilder sb = new StringBuilder(); + String line = br.readLine(); + + while (line != null) { + sb.append(line); + sb.append(System.lineSeparator()); + line = br.readLine(); + } + + return sb.toString(); + } finally { + br.close(); + } + } else { + LOG.warn(AaiUiMsgs.FILE_NOT_FOUND, fileName); + } + + return null; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java new file mode 100644 index 0000000..623ce38 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/EncryptConvertor.java @@ -0,0 +1,149 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +/** + * The Class EncryptConvertor. + */ +public class EncryptConvertor { + + private static final char[] HEX_CHARS = "0123456789abcdef".toCharArray(); + + /** + * toHexString(String) - convert a string into its hex equivalent. + * + * @param buf the buf + * @return the string + */ + public static final String toHexString(String buf) { + if (buf == null) { + return ""; + } + return toHexString(buf.getBytes()); + } + + /** + * toHexString(byte[]) - convert a byte-string into its hex equivalent. + * + * @param buf the buf + * @return the string + */ + public static final String toHexString(byte[] buf) { + + if (buf == null) { + return ""; + } + char[] chars = new char[2 * buf.length]; + for (int i = 0; i < buf.length; ++i) { + chars[2 * i] = HEX_CHARS[(buf[i] & 0xF0) >>> 4]; + chars[2 * i + 1] = HEX_CHARS[buf[i] & 0x0F]; + } + return new String(chars); + } + + /** + * Convert a hex string to its equivalent value. + * + * @param hexString the hex string + * @return the string + * @throws Exception the exception + */ + public static final String stringFromHex(String hexString) throws Exception { + if (hexString == null) { + return ""; + } + return stringFromHex(hexString.toCharArray()); + } + + /** + * String from hex. + * + * @param hexCharArray the hex char array + * @return the string + * @throws Exception the exception + */ + public static final String stringFromHex(char[] hexCharArray) throws Exception { + if (hexCharArray == null) { + return ""; + } + return new String(bytesFromHex(hexCharArray)); + } + + /** + * Bytes from hex. + * + * @param hexString the hex string + * @return the byte[] + * @throws Exception the exception + */ + public static final byte[] bytesFromHex(String hexString) throws Exception { + if (hexString == null) { + return new byte[0]; + } + return bytesFromHex(hexString.toCharArray()); + } + + /** + * Bytes from hex. + * + * @param hexCharArray the hex char array + * @return the byte[] + * @throws Exception the exception + */ + public static final byte[] bytesFromHex(char[] hexCharArray) throws Exception { + if (hexCharArray == null) { + return new byte[0]; + } + int len = hexCharArray.length; + if ((len % 2) != 0) { + throw new Exception("Odd number of characters: '" + String.valueOf(hexCharArray) + "'"); + } + byte[] txtInByte = new byte[len / 2]; + int counter = 0; + for (int i = 0; i < len; i += 2) { + txtInByte[counter++] = + (byte) (((fromHexDigit(hexCharArray[i], i) << 4) | fromHexDigit(hexCharArray[i + 1], i)) + & 0xFF); + } + return txtInByte; + } + + /** + * From hex digit. + * + * @param ch the ch + * @param index the index + * @return the int + * @throws Exception the exception + */ + protected static final int fromHexDigit(char ch, int index) throws Exception { + int digit = Character.digit(ch, 16); + if (digit == -1) { + throw new Exception("Illegal hex character '" + ch + "' at index " + index); + } + return digit; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java new file mode 100644 index 0000000..948df51 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/Encryptor.java @@ -0,0 +1,155 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import org.apache.commons.cli.BasicParser; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.eclipse.jetty.util.security.Password; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.logging.AaiUiMsgs; + +/** + * The Class Encryptor. + */ +public class Encryptor { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(Encryptor.class); + /** + * Instantiates a new encryptor. + */ + public Encryptor() { + } + + /** + * Encrypt value. + * + * @param value to encrypt + * @return the encrypted string + */ + public String encryptValue(String value) { + String encyptedValue = ""; + try { + encyptedValue = Password.obfuscate(value); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ENCRYPTION_ERROR, value, exc.toString()); + } + return encyptedValue; + } + + /** + * Decrypt value. + * + * @param value the value + * @return the string + */ + public String decryptValue(String value) { + String decyptedValue = ""; + try { + decyptedValue = Password.deobfuscate(value); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.DECRYPTION_ERROR, value, exc.toString()); + } + + return decyptedValue; + } + + /** + * Usage. + */ + public static void usage() { + usage(null); + } + + /** + * Usage. + * + * @param msg the msg + */ + public static void usage(String msg) { + if (msg != null) { + System.err.println(msg); + } + System.err.println("Usage: java Encryptor -e value"); + System.err.println("\tEncrypt the given value"); + System.err.println("Usage: java Encryptor -d value"); + System.err.println("\tDecrypt the given value"); + System.exit(1); + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + + Options options = new Options(); + options.addOption("d", true, "value to decrypt"); + options.addOption("h", false, "show help"); + options.addOption("?", false, "show help"); + + String value = null; + boolean encrypt = false; + boolean decrypt = false; + + CommandLineParser parser = new BasicParser(); + CommandLine cmd = null; + + try { + cmd = parser.parse(options, args); + + if (cmd.hasOption("d")) { + value = cmd.getOptionValue("d"); + decrypt = true; + } + + if (cmd.hasOption("?") || cmd.hasOption("h")) { + usage(); + System.exit(0); + } + + if ((encrypt && decrypt) || (!encrypt && !decrypt)) { + usage("Must specify one (and only one) of the -e or -d options"); + } + + Encryptor encryptor = new Encryptor(); + + if (decrypt) { + String out = encryptor.decryptValue(value); + System.out.println(out); + } + } catch (ParseException exc) { + System.out.println("Failed to parse command line properties: " + exc.toString()); + } catch (Exception exc) { + System.out.println("Failure: " + exc.toString()); + } + + System.exit(0); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java new file mode 100644 index 0000000..d2bea64 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/ErrorUtil.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +/** + * The Class ErrorUtil. + */ +public class ErrorUtil { + + /** + * Extract stack trace elements. + * + * @param maxNumberOfElementsToCapture the max number of elements to capture + * @param exc the exc + * @return the string + */ + public static String extractStackTraceElements(int maxNumberOfElementsToCapture, Exception exc) { + StringBuilder sb = new StringBuilder(128); + + StackTraceElement[] stackTraceElements = exc.getStackTrace(); + + if (stackTraceElements != null) { + + /* + * We want to avoid an index out-of-bounds error, so we will make sure to only extract the + * number of frames from the stack trace that actually exist. + */ + + int numFramesToExtract = Math.min(maxNumberOfElementsToCapture, stackTraceElements.length); + + for (int x = 0; x < numFramesToExtract; x++) { + sb.append(stackTraceElements[x]).append("\n"); + } + + } + + return sb.toString(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java new file mode 100644 index 0000000..af2e8ca --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/JsonXmlConverter.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.XML; + +/** + * The Class JsonXmlConverter. + */ +public class JsonXmlConverter { + + /** + * Checks if is valid json. + * + * @param text the text + * @return true, if is valid json + */ + public static boolean isValidJson(String text) { + try { + new JSONObject(text); + } catch (JSONException ex) { + try { + new JSONArray(text); + } catch (JSONException ex1) { + return false; + } + } + + return true; + } + + /** + * Convert jsonto xml. + * + * @param jsonText the json text + * @return the string + */ + public static String convertJsontoXml(String jsonText) { + JSONObject jsonObj = new JSONObject(jsonText); + String xmlText = XML.toString(jsonObj); + return xmlText; + } + + /** + * Convert xmlto json. + * + * @param xmlText the xml text + * @return the string + */ + public static String convertXmltoJson(String xmlText) { + JSONObject jsonObj = XML.toJSONObject(xmlText); + return jsonObj.toString(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java new file mode 100644 index 0000000..68645e2 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/NodeUtils.java @@ -0,0 +1,896 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.Thread.UncaughtExceptionHandler; +import java.net.URI; +import java.nio.ByteBuffer; +import java.security.SecureRandom; +import java.sql.Timestamp; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.servlet.http.HttpServletRequest; +import javax.xml.stream.XMLStreamConstants; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.restlet.Request; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.ser.FilterProvider; +import com.google.common.util.concurrent.ThreadFactoryBuilder; + + +/** + * The Class NodeUtils. + */ +public class NodeUtils { + private static SecureRandom sRandom = new SecureRandom(); + + private static final Pattern AAI_VERSION_PREFIX = Pattern.compile("/aai/v[0-9]+/(.*)"); + private static final Pattern GIZMO_VERSION_PREFIX = Pattern.compile("[/]*services/inventory/v[0-9]+/(.*)"); + private static final Pattern GIZMO_RELATIONSHIP_VERSION_PREFIX = Pattern.compile("services/inventory/relationships/v[0-9]+/(.*)"); + + + public static synchronized String getRandomTxnId(){ + byte bytes[] = new byte[6]; + sRandom.nextBytes(bytes); + return Integer.toUnsignedString(ByteBuffer.wrap(bytes).getInt()); + } + + /** + * Builds the depth padding. + * + * @param depth the depth + * @return the string + */ + public static String buildDepthPadding(int depth) { + StringBuilder sb = new StringBuilder(32); + + for (int x = 0; x < depth; x++) { + sb.append(" "); + } + + return sb.toString(); + } + + + public static String extractRawPathWithoutVersion(String selfLinkUri) { + + try { + + String rawPath = new URI(selfLinkUri).getRawPath(); + + Matcher m = AAI_VERSION_PREFIX.matcher(rawPath); + + if (m.matches()) { + + if ( m.groupCount() >= 1) { + return m.group(1); + } + + } + } catch (Exception e) { + } + + return null; + + } + + public static String extractRawGizmoPathWithoutVersion(String resourceLink) { + + try { + + String rawPath = new URI(resourceLink).getRawPath(); + + Matcher m = GIZMO_VERSION_PREFIX.matcher(rawPath); + + if (m.matches()) { + + if ( m.groupCount() >= 1) { + return m.group(1); + } + + } + } catch (Exception e) { + } + + return null; + + } + + public static String extractRawGizmoRelationshipPathWithoutVersion(String resourceLink) { + + try { + + String rawPath = new URI(resourceLink).getRawPath(); + + Matcher m = GIZMO_RELATIONSHIP_VERSION_PREFIX.matcher(rawPath); + + if (m.matches()) { + + if ( m.groupCount() >= 1) { + return m.group(1); + } + + } + } catch (Exception e) { + } + + return null; + + } + + + + + /** + * Checks if is numeric. + * + * @param numberStr the number str + * @return true, if is numeric + */ + public static boolean isNumeric(String numberStr) { + + try { + Double.parseDouble(numberStr); + } catch (Exception exc) { + return false; + } + + return true; + + } + + /** + * Creates the named executor. + * + * @param name the name + * @param numWorkers the num workers + * @param logger the logger + * @return the executor service + */ + public static ExecutorService createNamedExecutor(String name, int numWorkers, final Logger logger) { + UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { + + @Override + public void uncaughtException(Thread thread, Throwable exc) { + + logger.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); + + } + }; + + ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat(name + "-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build(); + + return Executors.newScheduledThreadPool(numWorkers + 1, namedThreadFactory); + } + + + public static String calculateEditAttributeUri(String link) { + String uri = null; + + if (link != null) { + + Pattern pattern = Pattern.compile(SparkyConstants.URI_VERSION_REGEX_PATTERN); + Matcher matcher = pattern.matcher(link); + if (matcher.find()) { + uri = link.substring(matcher.end()); + } + } + return uri; + } + + + /** + * Generate unique sha digest. + * + * @param keys the keys + * @return the string + */ + public static String generateUniqueShaDigest(String... keys) { + + if ((keys == null) || keys.length == 0) { + return null; + } + + final String keysStr = Arrays.asList(keys).toString(); + final String hashedId = org.apache.commons.codec.digest.DigestUtils.sha256Hex(keysStr); + + return hashedId; + } + + /** + * Gets the node field as text. + * + * @param node the node + * @param fieldName the field name + * @return the node field as text + */ + public static String getNodeFieldAsText(JsonNode node, String fieldName) { + + String fieldValue = null; + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + fieldValue = valueNode.asText(); + } + + return fieldValue; + } + + private static final String ENTITY_RESOURCE_KEY_FORMAT = "%s.%s"; + + /** + * Convert a millisecond duration to a string format + * + * @param millis A duration to convert to a string form + * @return A string of the form "X Days Y Hours Z Minutes A Seconds". + */ + + private static final String TIME_BREAK_DOWN_FORMAT = + "[ %d days, %d hours, %d minutes, %d seconds ]"; + + /** + * Gets the duration breakdown. + * + * @param millis the millis + * @return the duration breakdown + */ + public static String getDurationBreakdown(long millis) { + + if (millis < 0) { + return String.format(TIME_BREAK_DOWN_FORMAT, 0, 0, 0, 0); + } + + long days = TimeUnit.MILLISECONDS.toDays(millis); + millis -= TimeUnit.DAYS.toMillis(days); + long hours = TimeUnit.MILLISECONDS.toHours(millis); + millis -= TimeUnit.HOURS.toMillis(hours); + long minutes = TimeUnit.MILLISECONDS.toMinutes(millis); + millis -= TimeUnit.MINUTES.toMillis(minutes); + long seconds = TimeUnit.MILLISECONDS.toSeconds(millis); + + return String.format(TIME_BREAK_DOWN_FORMAT, days, hours, minutes, seconds); + + } + + /** + * Checks if is equal. + * + * @param n1 the n 1 + * @param n2 the n 2 + * @return true, if is equal + */ + public static boolean isEqual(JsonNode n1, JsonNode n2) { + + /* + * due to the inherent nature of json being unordered, comparing object representations of the + * same keys and values but different order makes comparison challenging. Let's try an + * experiment where we compare the structure of the json, and then simply compare the sorted + * order of that structure which should be good enough for what we are trying to accomplish. + */ + + TreeWalker walker = new TreeWalker(); + List<String> n1Paths = new ArrayList<String>(); + List<String> n2Paths = new ArrayList<String>(); + + walker.walkTree(n1Paths, n1); + walker.walkTree(n2Paths, n2); + + Collections.sort(n1Paths); + Collections.sort(n2Paths); + + return n1Paths.equals(n2Paths); + + } + + /** + * Concat array. + * + * @param list the list + * @return the string + */ + public static String concatArray(List<String> list) { + return concatArray(list, " "); + } + + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + public static String getCurrentTimeStamp() { + SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + return dateFormat.format(timestamp); + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + public static String concatArray(List<String> list, String delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /** + * Concat array. + * + * @param values the values + * @return the string + */ + public static String concatArray(String[] values) { + + if (values == null || values.length == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + boolean firstValue = true; + + for (String item : values) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(".").append(item); + } + + } + + return result.toString(); + + } + + /** + * Builds the entity resource key. + * + * @param entityType the entity type + * @param resourceId the resource id + * @return the string + */ + public static String buildEntityResourceKey(String entityType, String resourceId) { + return String.format(ENTITY_RESOURCE_KEY_FORMAT, entityType, resourceId); + } + + /** + * Extract resource id from link. + * + * @param link the link + * @return the string + */ + public static String extractResourceIdFromLink(String link) { + + if (link == null) { + return null; + } + + int linkLength = link.length(); + if (linkLength == 0) { + return null; + } + + /* + * if the last character != / then we need to change the lastIndex position + */ + + int startIndex = 0; + String resourceId = null; + if ("/".equals(link.substring(linkLength - 1))) { + // Use-case: + // https://ext1.test.onap.com:9292/aai/v7/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1/ + startIndex = link.lastIndexOf("/", linkLength - 2); + resourceId = link.substring(startIndex + 1, linkLength - 1); + } else { + // Use-case: + // https://ext1.test.onap.com:9292/aai/v7/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1 + startIndex = link.lastIndexOf("/"); + resourceId = link.substring(startIndex + 1, linkLength); + } + + String result = null; + + if (resourceId != null) { + try { + result = java.net.URLDecoder.decode(resourceId, "UTF-8"); + } catch (Exception exc) { + /* + * if there is a failure decoding the parameter we will just return the original value. + */ + result = resourceId; + } + } + + return result; + + } + + /** + * Gets the xml stream constant as str. + * + * @param value the value + * @return the xml stream constant as str + */ + public static String getXmlStreamConstantAsStr(int value) { + switch (value) { + case XMLStreamConstants.ATTRIBUTE: + return "ATTRIBUTE"; + case XMLStreamConstants.CDATA: + return "CDATA"; + case XMLStreamConstants.CHARACTERS: + return "CHARACTERS"; + case XMLStreamConstants.COMMENT: + return "COMMENT"; + case XMLStreamConstants.DTD: + return "DTD"; + case XMLStreamConstants.END_DOCUMENT: + return "END_DOCUMENT"; + case XMLStreamConstants.END_ELEMENT: + return "END_ELEMENT"; + case XMLStreamConstants.ENTITY_DECLARATION: + return "ENTITY_DECLARATION"; + case XMLStreamConstants.ENTITY_REFERENCE: + return "ENTITY_REFERENCE"; + case XMLStreamConstants.NAMESPACE: + return "NAMESPACE"; + case XMLStreamConstants.NOTATION_DECLARATION: + return "NOTATION_DECLARATION"; + case XMLStreamConstants.PROCESSING_INSTRUCTION: + return "PROCESSING_INSTRUCTION"; + case XMLStreamConstants.SPACE: + return "SPACE"; + case XMLStreamConstants.START_DOCUMENT: + return "START_DOCUMENT"; + case XMLStreamConstants.START_ELEMENT: + return "START_ELEMENT"; + + default: + return "Unknown(" + value + ")"; + } + } + + /** + * Convert object to json. + * + * @param object the object + * @param pretty the pretty + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToJson(Object object, boolean pretty) + throws JsonProcessingException { + ObjectWriter ow = null; + + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + + if (pretty) { + ow = mapper.writer().withDefaultPrettyPrinter(); + + } else { + ow = mapper.writer(); + } + + return ow.writeValueAsString(object); + } + + /** + * Convert object to json by selectively choosing certain fields thru filters. + * Example use case: + * based on request type we might need to send different serialization of the UiViewFilterEntity + * + * @param object the object + * @param pretty the pretty + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToJson(Object object, boolean pretty, FilterProvider filters) + throws JsonProcessingException { + ObjectWriter ow = null; + + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + + if (pretty) { + ow = mapper.writer(filters).withDefaultPrettyPrinter(); + + } else { + ow = mapper.writer(filters); + } + + return ow.writeValueAsString(object); + } + + + /** + * Convert json str to json node. + * + * @param jsonStr the json str + * @return the json node + * @throws IOException Signals that an I/O exception has occurred. + */ + public static JsonNode convertJsonStrToJsonNode(String jsonStr) throws IOException { + ObjectMapper mapper = new ObjectMapper(); + if (jsonStr == null || jsonStr.length() == 0) { + return null; + } + + return mapper.readTree(jsonStr); + } + + /** + * Convert object to xml. + * + * @param object the object + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToXml(Object object) throws JsonProcessingException { + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + String jsonOutput = ow.writeValueAsString(object); + + if (jsonOutput == null) { + return null; + } + + return JsonXmlConverter.convertJsontoXml(jsonOutput); + + } + + /** + * Extract objects by key. + * + * @param node the node + * @param searchKey the search key + * @param foundObjects the found objects + */ + public static void extractObjectsByKey(JsonNode node, String searchKey, + Collection<JsonNode> foundObjects) { + + if ( node == null ) { + return; + } + + if (node.isObject()) { + Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields(); + + while (nodeIterator.hasNext()) { + Map.Entry<String, JsonNode> entry = nodeIterator.next(); + if (!entry.getValue().isValueNode()) { + extractObjectsByKey(entry.getValue(), searchKey, foundObjects); + } + + String name = entry.getKey(); + if (name.equalsIgnoreCase(searchKey)) { + + JsonNode entryNode = entry.getValue(); + + if (entryNode.isArray()) { + + Iterator<JsonNode> arrayItemsIterator = entryNode.elements(); + while (arrayItemsIterator.hasNext()) { + foundObjects.add(arrayItemsIterator.next()); + } + + } else { + foundObjects.add(entry.getValue()); + } + + + } + } + } else if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + while (arrayItemsIterator.hasNext()) { + extractObjectsByKey(arrayItemsIterator.next(), searchKey, foundObjects); + } + + } + } + + public static String extractObjectValueByKey(JsonNode node, String searchKey) { + + if (node == null) { + return null; + } + + if (node.isObject()) { + Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields(); + + while (nodeIterator.hasNext()) { + Map.Entry<String, JsonNode> entry = nodeIterator.next(); + if (!entry.getValue().isValueNode()) { + return extractObjectValueByKey(entry.getValue(), searchKey); + } + + String name = entry.getKey(); + if (name.equalsIgnoreCase(searchKey)) { + + JsonNode entryNode = entry.getValue(); + + if (entryNode.isArray()) { + + Iterator<JsonNode> arrayItemsIterator = entryNode.elements(); + while (arrayItemsIterator.hasNext()) { + return arrayItemsIterator.next().asText(); + } + + } else { + return entry.getValue().asText(); + } + + + } + } + } else if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + while (arrayItemsIterator.hasNext()) { + return extractObjectValueByKey(arrayItemsIterator.next(), searchKey); + } + + } + + return null; + + } + + /** + * Convert array into list. + * + * @param node the node + * @param instances the instances + */ + public static void convertArrayIntoList(JsonNode node, Collection<JsonNode> instances) { + + if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + while (arrayItemsIterator.hasNext()) { + instances.add(arrayItemsIterator.next()); + } + + } else { + instances.add(node); + } + + } + + /** + * Extract field values from object. + * + * @param node the node + * @param attributesToExtract the attributes to extract + * @param fieldValues the field values + */ + public static void extractFieldValuesFromObject(JsonNode node, + Collection<String> attributesToExtract, Collection<String> fieldValues) { + + if (node == null) { + return; + } + + if (node.isObject()) { + + JsonNode valueNode = null; + + for (String attrToExtract : attributesToExtract) { + + valueNode = node.get(attrToExtract); + + if (valueNode != null) { + + if (valueNode.isValueNode()) { + fieldValues.add(valueNode.asText()); + } + } + } + } + } + + /** + * Extract field value from object. + * + * @param node the node + * @param fieldName the field name + * @return the string + */ + public static String extractFieldValueFromObject(JsonNode node, String fieldName) { + + if (node == null) { + return null; + } + + if (node.isObject()) { + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + + if (valueNode.isValueNode()) { + return valueNode.asText(); + } + } + + } + return null; + + } + + /** + * Format timestamp. + * + * @param timestamp the timestamp + * @return the string + */ + public static String formatTimestamp(String timestamp) { + try { + SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'HHmmss'Z'"); + originalFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + Date toDate = originalFormat.parse(timestamp); + SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); + newFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + return newFormat.format(toDate); + + } catch (ParseException pe) { + return timestamp; + } + } + + /** + * Gets the HttpRequest payload. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBody(HttpServletRequest request) throws IOException { + InputStream inputStream = request.getInputStream(); + return getBodyFromStream(inputStream); + } + + + + /** + * Gets the Restlet Request payload. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBody(Request request) throws IOException { + InputStream inputStream = request.getEntity().getStream(); + return getBodyFromStream(inputStream); + } + + + /** + * Gets the payload from the input stream of a request. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBodyFromStream(InputStream inputStream) throws IOException { + + String body = null; + StringBuilder stringBuilder = new StringBuilder(); + BufferedReader bufferedReader = null; + + try { + if (inputStream != null) { + bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); + char[] charBuffer = new char[128]; + int bytesRead = -1; + while ((bytesRead = bufferedReader.read(charBuffer)) > 0) { + stringBuilder.append(charBuffer, 0, bytesRead); + } + } else { + stringBuilder.append(""); + } + } catch (IOException ex) { + throw ex; + } finally { + if (bufferedReader != null) { + try { + bufferedReader.close(); + } catch (IOException ex) { + throw ex; + } + } + } + + body = stringBuilder.toString(); + return body; + } + + + /** + * The main method. + * + * @param args the arguments + * @throws ParseException the parse exception + */ + public static void main(String[] args) throws ParseException { + String date = "20170110T112312Z"; + SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'hhmmss'Z'"); + Date toDate = originalFormat.parse(date); + SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss'Z'"); + System.out.println(newFormat.format(toDate)); + + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java new file mode 100644 index 0000000..99166ca --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RawByteHelper.java @@ -0,0 +1,176 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +/** + * The Class RawByteHelper. + */ +public class RawByteHelper { + private static final byte[] HEX_CHAR = + new byte[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'}; + + /** + * Dump bytes. + * + * @param buffer the buffer + * @return the string + */ + /* + * TODO -> DOCUMENT ME! + * + * @param buffer DOCUMENT ME! + * + * @return DOCUMENT ME! + */ + public static String dumpBytes(byte[] buffer) { + if (buffer == null) { + return ""; + } + String newLine = System.getProperty("line.separator"); + StringBuffer sb = new StringBuffer(); + + for (int i = 0; i < buffer.length; i++) { + if (i != 0 && i % 16 == 0) { + sb.append(newLine); + } + // sb.append("0x").append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4])).append((char) + // (HEX_CHAR[buffer[i] & 0x000F])).append(" "); + sb.append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4])) + .append((char) (HEX_CHAR[buffer[i] & 0x000F])).append(" "); + } + + return sb.toString(); + } + + // if you're trying to figure out why or's w/ FF's see: + /** + * Bytes to int. + * + * @param one the one + * @param two the two + * @param three the three + * @param four the four + * @return the int + */ + // http://www.darksleep.com/player/JavaAndUnsignedTypes.html + public static int bytesToInt(byte one, byte two, byte three, byte four) { + return (((0xFF & one) << 24) | ((0xFF & two) << 16) | ((0xFF & three) << 8) | ((0xFF & four))); + } + + /** + * Bytes to short. + * + * @param one the one + * @param two the two + * @return the short + */ + public static short bytesToShort(byte one, byte two) { + return (short) (((0xFF & one) << 8) | (0xFF & two)); + } + + /** + * First byte. + * + * @param num the num + * @return the byte + */ + // short helper functions + static byte firstByte(short num) { + return (byte) ((num >> 8) & 0xFF); + } + + /** + * First byte. + * + * @param num the num + * @return the byte + */ + // Int helper functions + static byte firstByte(int num) { + return (byte) ((num >> 24) & 0xFF); + } + + /** + * Second byte. + * + * @param num the num + * @return the byte + */ + static byte secondByte(short num) { + return (byte) (num & 0xFF); + } + + /** + * Second byte. + * + * @param num the num + * @return the byte + */ + static byte secondByte(int num) { + return (byte) ((num >> 16) & 0xFF); + } + + /** + * Third byte. + * + * @param num the num + * @return the byte + */ + static byte thirdByte(int num) { + return (byte) ((num >> 8) & 0xFF); + } + + /** + * Fourth byte. + * + * @param num the num + * @return the byte + */ + static byte fourthByte(int num) { + return (byte) (num & 0xFF); + } + + /** + * Int to byte. + * + * @param value the value + * @return the byte + */ + public static byte intToByte(int value) { + return fourthByte(value); + } + + /** + * Int to short. + * + * @param value the value + * @return the short + */ + public static short intToShort(int value) { + return (short) ((value & 0xFF00) | (value & 0xFF)); + } + +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java new file mode 100644 index 0000000..26dbf62 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/RestletUtils.java @@ -0,0 +1,119 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import javax.servlet.http.HttpServletResponse; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.SearchServiceAdapter; +import org.restlet.Response; +import org.restlet.data.MediaType; +import org.restlet.data.Status; + +public class RestletUtils { + /** + * Returns an HttpServletResponse based on values from a Restlet Response + * + * @param restletResponse Restlet Response to be converted to an HttpServletResponse + * @return An HttpServletResponse object built from the values of a Restlet Response + */ + public HttpServletResponse convertRestletResponseToHttpServletResponse(Response restletResponse) { + return org.restlet.ext.servlet.ServletUtils.getResponse(restletResponse); + } + + /** + * Execute post query + * + * @param logger The logger + * @param search The searchAdapter + * @param response The response + * @param requestUrl The request URL + * @param requestJsonPayload The request JSON payload + * @return The operation result + */ + public OperationResult executePostQuery(Logger logger, SearchServiceAdapter search, + Response response, String requestUrl, String requestJsonPayload) { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult.getResultCode() > 300) { + setRestletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + } else { + response.setStatus(new Status(opResult.getResultCode())); + } + + return opResult; + } + + /** + * Generate JSON error response + * + * @param message The error message + * @return The error message formatted as a JSON string + */ + public String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : \"%s\" }", message); + } + + /** + * Log Restlet exceptions/errors & prepare Response object with exception/errors info + * + * @param logger The logger + * @param errorMsg The error message + * @param exc The exception + * @param response The response + */ + public void handleRestletErrors(Logger logger, String errorMsg, Exception exc, + Response response) { + String errorLogMsg = (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); + logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); + response.setEntity(generateJsonErrorResponse(errorMsg), MediaType.APPLICATION_JSON); + } + + /** + * Sets the Restlet response + * + * @param logger The logger + * @param isError The error + * @param responseCode The response code + * @param response The response + * @param postPayload The post payload + */ + public void setRestletResponse(Logger logger, boolean isError, int responseCode, + Response response, String postPayload) { + + if (isError) { + logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); + } + + response.setStatus(new Status(responseCode)); + + if (postPayload != null) { + response.setEntity(postPayload, MediaType.APPLICATION_JSON); + } + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java new file mode 100644 index 0000000..05f6996 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/SuggestionsPermutation.java @@ -0,0 +1,100 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import java.util.ArrayList; +import java.util.List; + +public class SuggestionsPermutation { + + /* + * Will return all the unique combinations of the suggestions provided. The order of the + * permutation is not taken into account when computing the uniqueness. eg: A list of A,B,C,D will + * return [[A], [A, B, C, D], [A, C, D], [A, D], [B], [B, C, D], [B, D], [C], [C, D], [D]] + * + * @param list The list to create the unique permutations + * + * @return A Arraylist which contains a array list of all possible combinations + */ + public static ArrayList<ArrayList<String>> getUniqueListForSuggestions( + List<String> originalList) { + ArrayList<ArrayList<String>> lists = new ArrayList<ArrayList<String>>(); + if (originalList.isEmpty()) { + lists.add(new ArrayList<String>()); + return lists; + } + List<String> list = new ArrayList<String>(originalList); + String head = list.get(0); + ArrayList<String> rest = new ArrayList<String>(list.subList(1, list.size())); + + for (ArrayList<String> activeList : getUniqueListForSuggestions(rest)) { + ArrayList<String> newList = new ArrayList<String>(); + newList.add(head); + newList.addAll(activeList); + lists.add(newList); + lists.add(activeList); + } + return lists; + } + + public static ArrayList<ArrayList<String>> getNonEmptyUniqueLists(List<String> list){ + ArrayList<ArrayList<String>> lists = getUniqueListForSuggestions(list); + // remove empty list from the power set + for (ArrayList<String> emptyList : lists ){ + if ( emptyList.isEmpty() ) { + lists.remove(emptyList); + break; + } + } + return lists; + } + + public static List<List<String>> getListPermutations(List<String> list) { + List<String> inputList = new ArrayList<String>(); + inputList.addAll(list); + if (inputList.size() == 0) { + List<List<String>> result = new ArrayList<List<String>>(); + result.add(new ArrayList<String>()); + return result; + } + + List<List<String>> listOfLists = new ArrayList<List<String>>(); + + String firstElement = inputList.remove(0); + + List<List<String>> recursiveReturn = getListPermutations(inputList); + for (List<String> li : recursiveReturn) { + + for (int index = 0; index <= li.size(); index++) { + List<String> temp = new ArrayList<String>(li); + temp.add(index, firstElement); + listOfLists.add(temp); + } + + } + return listOfLists; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java new file mode 100644 index 0000000..d8bb7b9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/util/TreeWalker.java @@ -0,0 +1,136 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; + +/** + * The Class TreeWalker. + */ +public class TreeWalker { + + /** + * Convert json to node. + * + * @param json the json + * @return the json node + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + public JsonNode convertJsonToNode(String json) throws JsonProcessingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + + if (json == null) { + return null; + } + + return mapper.readTree(json); + + } + + /** + * Walk tree. + * + * @param paths the paths + * @param root the root + */ + public void walkTree(List<String> paths, JsonNode root) { + walker(paths, null, root); + } + + /** + * Walker. + * + * @param paths the paths + * @param nodename the nodename + * @param node the node + */ + private void walker(List<String> paths, String nodename, JsonNode node) { + + if (node == null) { + return; + } + + /* + * if ( nodename != null ) { paths.add(nodename); } + */ + + // System.out.println("path: " + nameToPrint); + if (node.isObject()) { + Iterator<Map.Entry<String, JsonNode>> iterator = node.fields(); + + ArrayList<Map.Entry<String, JsonNode>> nodesList = Lists.newArrayList(iterator); + // System.out.println("Walk Tree - root:" + node + ", elements + // keys:" + nodesList); + + if (nodesList.isEmpty()) { + + if (nodename != null) { + paths.add(nodename); + } + + } else { + + for (Map.Entry<String, JsonNode> nodEntry : nodesList) { + String name = nodEntry.getKey(); + JsonNode newNode = nodEntry.getValue(); + + if (newNode.isValueNode()) { + if (nodename == null) { + paths.add(name + "=" + newNode.asText()); + } else { + paths.add(nodename + "." + name + "=" + newNode.asText()); + } + } else { + + if (nodename == null) { + walker(paths, name, newNode); + } else { + walker(paths, nodename + "." + name, newNode); + } + } + + } + } + } else if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + ArrayList<JsonNode> arrayItemsList = Lists.newArrayList(arrayItemsIterator); + for (JsonNode arrayNode : arrayItemsList) { + walker(paths, nodename, arrayNode); + } + } else if (node.isValueNode()) { + paths.add(nodename + "=" + node.asText()); + } + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java new file mode 100644 index 0000000..c6d4666 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/EntityTypeAggregation.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + + + + +/** + * The Class EntityTypeAggregation. + */ +public class EntityTypeAggregation { + + private ConcurrentHashMap<String, AtomicInteger> counters; + + /** + * Instantiates a new entity type aggregation. + */ + public EntityTypeAggregation() { + counters = new ConcurrentHashMap<String, AtomicInteger>(); + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + counters.putIfAbsent(counterName, new AtomicInteger(0)); + counters.get(counterName).incrementAndGet(); + } + + public ConcurrentHashMap<String, AtomicInteger> getCounters() { + return counters; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java new file mode 100644 index 0000000..0133c9d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessor.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect; + + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.camel.Exchange; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.logging.util.ServletUtils; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; +import org.onap.aai.sparky.viewandinspect.services.VisualizationService; +import org.restlet.data.Status; + +public class SchemaVisualizationProcessor { + + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SchemaVisualizationProcessor.class); + + private VisualizationService visualizationService; + + public SchemaVisualizationProcessor()throws Exception{} + + protected String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + public void setVisualizationService(VisualizationService visualizationService){ + this.visualizationService = visualizationService; + } + public VisualizationService getVisualizationService(){ + return visualizationService; + } + + public void processVisualizationRequest(Exchange exchange) { + + HttpServletRequest request = exchange.getIn().getBody(HttpServletRequest.class); + ServletUtils.setUpMdcContext(exchange, request); + + QueryRequest hashId = null; + OperationResult operationResult = null; + + String visualizationPayload = exchange.getIn().getBody(String.class); + hashId = this.getVisualizationService().analyzeQueryRequestBody(visualizationPayload); + + if (hashId != null) { + + operationResult = this.getVisualizationService().buildVisualizationUsingGenericQuery(hashId); + + if (operationResult.getResultCode() != Status.SUCCESS_OK.getCode()) { + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode()); + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, String + .format("Failed to process Visualization Schema Payload = '%s'", visualizationPayload)); + return; + } + + } else { + operationResult = new OperationResult(); + operationResult.setResult(String + .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload)); + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode()); + LOG.error(AaiUiMsgs.FAILED_TO_ANALYZE, String + .format("Failed to analyze Visualization Schema Payload = '%s'", visualizationPayload)); + return; + + } + + exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, operationResult.getResultCode()); + exchange.getOut().setBody(operationResult.getResult()); + + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java new file mode 100644 index 0000000..5624b20 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/SparkyConstants.java @@ -0,0 +1,102 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.config; + +/** + * The Class TierSupportUiConstants. + */ +public class SparkyConstants { + + public static String APP_NAME = "AAIUI"; + + /** Default to unix file separator if system property file.separator is null */ + public static final String FILESEP = + (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator"); + + public static String CONFIG_HOME = System.getProperty("CONFIG_HOME") + FILESEP; + public static String DYNAMIC_CONFIG_APP_LOCATION = CONFIG_HOME; + + public static String CONFIG_OXM_LOCATION = CONFIG_HOME + "model" + FILESEP; + public static String CONFIG_FILTERS_BASE_LOCATION = CONFIG_HOME + FILESEP; + //public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP; + + public static String HOST = "host"; + public static String IP_ADDRESS = "ipAddress"; + public static String PORT = "port"; + public static String HTTP_PORT = "httpPort"; + public static String RETRIES = "numRequestRetries"; + public static String RESOURCE_VERSION = "resource-version"; + public static String URI = "URI"; + + public static String AUTHORIZED_USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "authorized-users.config"; + public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config"; + public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config"; + public static String PORTAL_AUTHENTICATION_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties"; + + // Related to data-router properties + public static String DR_URI_SUFFIX = "uriSuffix"; + public static String DR_CERT_NAME = "cert-name"; + public static String DR_KEYSTORE_PASSWORD = "keystore-password"; + public static String DR_KEYSTORE = "keystore"; + public static String DR_CONNECT_TIMEOUT = "connectTimeoutMs"; + public static String DR_READ_TIMEOUT = "readTimeoutMs"; + + public static final String APP_JSON = "application/json"; + + public static final String ES_SUGGEST_API = "_suggest"; + public static final String ES_COUNT_API = "_count"; + public static final String ES_SEARCH_API = "_search"; + + public static final String UI_FILTER_VIEW_NAME_PARAMETER = "viewName"; + public static final String UI_FILTER_ID_LIST_PARAMETER = "filterIdList"; + + public static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT = + "entityautosuggestindex-localhost"; + public static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT = + "/etc/autoSuggestSettings.json"; + public static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT = + "/etc/autoSuggestMappings.json"; + public static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT = + "/etc/dynamicMappings.json"; + /*public static final String FILTER_LIST_FILE_DEFAULT = + CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_filters.json"; + public static final String FILTER_MAPPING_FILE_DEFAULT = + CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_views.json";*/ + + public static final String SUBSCRIPTION_OI_MAPPING = + CONFIG_FILTERS_BASE_LOCATION + "subscription_object_inspector_mapping.json"; + + public static final String SUGGESTION_TEXT_SEPARATOR = " -- "; + + // Injected Attributes + public static String URI_ATTR_NAME = "uri"; + + public static final String URI_VERSION_REGEX_PATTERN = "aai/v[\\d]+/"; + + public static final String getConfigPath(String configFile){ + return CONFIG_HOME + FILESEP + configFile; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java new file mode 100644 index 0000000..e0cc9c6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/TierSupportUiConstants.java @@ -0,0 +1,102 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.config; + +/** + * The Class TierSupportUiConstants. + */ +public class TierSupportUiConstants { + + public static String APP_NAME = "AAIUI"; + + /** Default to unix file separator if system property file.separator is null */ + public static final String FILESEP = + (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator"); + + public static String CONFIG_HOME = System.getProperty("CONFIG_HOME") + FILESEP; + public static String AJSC_HOME = System.getProperty("AJSC_HOME") + FILESEP; + public static String CONFIG_ROOT_LOCATION = + AJSC_HOME + "bundleconfig" + FILESEP + "etc" + FILESEP; + public static String STATIC_CONFIG_APP_LOCATION = CONFIG_ROOT_LOCATION + "appprops" + FILESEP; + public static String DYNAMIC_CONFIG_APP_LOCATION = CONFIG_HOME; + + public static String CONFIG_OXM_LOCATION = CONFIG_HOME + "model" + FILESEP; + public static String CONFIG_FILTERS_BASE_LOCATION = CONFIG_HOME + FILESEP; + public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP; + + public static String HOST = "host"; + public static String IP_ADDRESS = "ipAddress"; + public static String PORT = "port"; + public static String HTTP_PORT = "httpPort"; + public static String RETRIES = "numRequestRetries"; + public static String RESOURCE_VERSION = "resource-version"; + public static String URI = "URI"; + + public static String AUTHORIZED_USERS_FILE_LOCATION = + DYNAMIC_CONFIG_APP_LOCATION + "authorized-users.config"; + public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config"; + public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config"; + public static String PORTAL_AUTHENTICATION_FILE_LOCATION = + DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties"; + + // Related to data-router properties + public static String DR_URI_SUFFIX = "uriSuffix"; + public static String DR_CERT_NAME = "cert-name"; + public static String DR_KEYSTORE_PASSWORD = "keystore-password"; + public static String DR_KEYSTORE = "keystore"; + public static String DR_CONNECT_TIMEOUT = "connectTimeoutMs"; + public static String DR_READ_TIMEOUT = "readTimeoutMs"; + + public static final String ES_SUGGEST_API = "_suggest"; + public static final String ES_COUNT_API = "_count"; + public static final String ES_SEARCH_API = "_search"; + + public static final String UI_FILTER_VIEW_NAME_PARAMETER = "viewName"; + public static final String UI_FILTER_ID_LIST_PARAMETER = "filterIdList"; + + public static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT = + "entityautosuggestindex-localhost"; + public static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT = + "/etc/autoSuggestSettings.json"; + public static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT = + "/etc/autoSuggestMappings.json"; + public static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT = "/etc/dynamicMappings.json"; + public static final String FILTER_LIST_FILE_DEFAULT = + CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_filters.json"; + public static final String FILTER_MAPPING_FILE_DEFAULT = + CONFIG_FILTERS_BASE_LOCATION + "filters" + FILESEP + "aaiui_views.json"; + + public static final String SUGGESTION_TEXT_SEPARATOR = " -- "; + + // Injected Attributes + public static String URI_ATTR_NAME = "uri"; + + public static final String URI_VERSION_REGEX_PATTERN = "aai/v[\\d]+/"; + + public static final String getConfigPath(String configFile) { + return AJSC_HOME + FILESEP + configFile; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java new file mode 100644 index 0000000..169dbc6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigs.java @@ -0,0 +1,174 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.config; + +import java.util.ArrayList; + +import org.onap.aai.sparky.config.SparkyResourceLoader; + +/** + * The Class VisualizationConfig. + */ +public class VisualizationConfigs { + + private int maxSelfLinkTraversalDepth; + + private boolean visualizationDebugEnabled; + + private String aaiEntityNodeDescriptors; + + private String generalNodeClassName; + + private String searchNodeClassName; + + private String selectedSearchedNodeClassName; + + private int numOfThreadsToFetchNodeIntegrity; + + private boolean makeAllNeighborsBidirectional; + + private ArrayList<String> shallowEntities; + + private boolean gizmoEnabled; + + private SparkyResourceLoader resourceLoader; + + /** + * Instantiates a new visualization config. + */ + public VisualizationConfigs() { + this.gizmoEnabled = false; + } + + public ArrayList<String> getShallowEntities() { + return shallowEntities; + } + + public void setShallowEntities(ArrayList<String> shallowEntities) { + this.shallowEntities = shallowEntities; + } + + /** + * Make all neighbors bidirectional. + * + * @return true, if successful + */ + public boolean makeAllNeighborsBidirectional() { + return makeAllNeighborsBidirectional; + } + + public void setMakeAllNeighborsBidirectional(boolean makeAllNeighborsBidirectional) { + this.makeAllNeighborsBidirectional = makeAllNeighborsBidirectional; + } + + public String getSelectedSearchedNodeClassName() { + return selectedSearchedNodeClassName; + } + + public void setSelectedSearchedNodeClassName(String selectedSearchedNodeClassName) { + this.selectedSearchedNodeClassName = selectedSearchedNodeClassName; + } + + public String getGeneralNodeClassName() { + return generalNodeClassName; + } + + public void setGeneralNodeClassName(String generalNodeClassName) { + this.generalNodeClassName = generalNodeClassName; + } + + public String getSearchNodeClassName() { + return searchNodeClassName; + } + + public void setSearchNodeClassName(String searchNodeClassName) { + this.searchNodeClassName = searchNodeClassName; + } + + public String getAaiEntityNodeDescriptors() { + return aaiEntityNodeDescriptors; + } + + public void setAaiEntityNodeDescriptors(String aaiEntityNodeDescriptors) { + this.aaiEntityNodeDescriptors = aaiEntityNodeDescriptors; + } + + public boolean isVisualizationDebugEnabled() { + return visualizationDebugEnabled; + } + + public void setVisualizationDebugEnabled(boolean visualizationDebugEnabled) { + this.visualizationDebugEnabled = visualizationDebugEnabled; + } + + public void setMaxSelfLinkTraversalDepth(int maxSelfLinkTraversalDepth) { + this.maxSelfLinkTraversalDepth = maxSelfLinkTraversalDepth; + } + + public int getMaxSelfLinkTraversalDepth() { + return maxSelfLinkTraversalDepth; + } + + public int getNumOfThreadsToFetchNodeIntegrity() { + return numOfThreadsToFetchNodeIntegrity; + } + + public void setNumOfThreadsToFetchNodeIntegrity(int numOfThreadsToFetchNodeIntegrity) { + this.numOfThreadsToFetchNodeIntegrity = numOfThreadsToFetchNodeIntegrity; + } + + public boolean isGizmoEnabled() { + return gizmoEnabled; + } + + public void setGizmoEnabled(boolean gizmoEnabled) { + this.gizmoEnabled = gizmoEnabled; + } + + public SparkyResourceLoader getResourceLoader() { + return resourceLoader; + } + + public void setResourceLoader(SparkyResourceLoader resourceLoader) { + this.resourceLoader = resourceLoader; + } + + @Override + public String toString() { + return "VisualizationConfigs [maxSelfLinkTraversalDepth=" + maxSelfLinkTraversalDepth + + ", visualizationDebugEnabled=" + visualizationDebugEnabled + ", " + + (aaiEntityNodeDescriptors != null ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " + : "") + + (generalNodeClassName != null ? "generalNodeClassName=" + generalNodeClassName + ", " : "") + + (searchNodeClassName != null ? "searchNodeClassName=" + searchNodeClassName + ", " : "") + + (selectedSearchedNodeClassName != null + ? "selectedSearchedNodeClassName=" + selectedSearchedNodeClassName + ", " : "") + + "numOfThreadsToFetchNodeIntegrity=" + numOfThreadsToFetchNodeIntegrity + + ", makeAllNeighborsBidirectional=" + makeAllNeighborsBidirectional + ", " + + (shallowEntities != null ? "shallowEntities=" + shallowEntities + ", " : "") + "gizmoEnabled=" + + gizmoEnabled + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java new file mode 100644 index 0000000..3981626 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNode.java @@ -0,0 +1,831 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class ActiveInventoryNode. + */ +public class ActiveInventoryNode { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + ActiveInventoryNode.class); + private static final String URIRegexPattern = "aai/v[\\d]/"; + + public static final int DEFAULT_INIT_NODE_DEPTH = 1000; + + private String nodeId; + private String selfLink; + + private boolean isRootNode; + private ConcurrentLinkedDeque<String> inboundNeighbors; + private ConcurrentLinkedDeque<String> outboundNeighbors; + + private ConcurrentLinkedDeque<String> inboundNeighborSelfLinks; + private ConcurrentLinkedDeque<String> outboundNeighborSelfLinks; + + private List<JsonNode> complexGroups; + private List<RelationshipList> relationshipLists; + private int nodeDepth; + private OperationResult opResult; + + private boolean processingErrorOccurred; + private List<String> errorCauses; + private boolean selflinkRetrievalFailure; + private NodeProcessingState state; + + private boolean processedNeighbors; + + private boolean selfLinkPendingResolve; + + /* + * I think we shouldn't be using this crutch flags. If these things are meant + * to represent the current state of the node, then they should be legitimate + * state transitions. + */ + + private boolean selfLinkDeterminationPending; + + private AtomicBoolean selfLinkProcessed; + private AtomicBoolean nodeIntegrityProcessed; + + private OxmModelLoader oxmModelLoader; + private VisualizationConfigs visualizationConfigs; + + private String entityType; + private String primaryKeyName; + private String primaryKeyValue; + + private boolean nodeValidated; + private boolean nodeIssue; + private boolean ignoredByFilter; + + private boolean resolvedSelfLink; + + private Map<String, String> properties; + private ArrayList<String> queryParams; + + private ObjectMapper mapper; + + private OxmEntityLookup oxmEntityLookup; + + /** + * Instantiates a new active inventory node. + * + * @param key the key + */ + public ActiveInventoryNode(VisualizationConfigs visualizationConfigs, OxmEntityLookup oxmEntityLookup) { + this.oxmEntityLookup = oxmEntityLookup; + this.nodeId = null; + this.entityType = null; + this.selfLink = null; + this.properties = new HashMap<String, String>(); + this.processingErrorOccurred = false; + this.errorCauses = new ArrayList<String>(); + this.selflinkRetrievalFailure = false; + this.nodeIssue = false; + this.nodeValidated = false; + this.state = NodeProcessingState.INIT; + this.selfLinkPendingResolve = false; + this.selfLinkDeterminationPending = false; + + selfLinkProcessed = new AtomicBoolean(Boolean.FALSE); + nodeIntegrityProcessed = new AtomicBoolean(Boolean.FALSE); + oxmModelLoader = null; + this.visualizationConfigs = visualizationConfigs ; + + isRootNode = false; + inboundNeighbors = new ConcurrentLinkedDeque<String>(); + outboundNeighbors = new ConcurrentLinkedDeque<String>(); + + inboundNeighborSelfLinks = new ConcurrentLinkedDeque<String>(); + outboundNeighborSelfLinks = new ConcurrentLinkedDeque<String>(); + + complexGroups = new ArrayList<JsonNode>(); + relationshipLists = new ArrayList<RelationshipList>(); + nodeDepth = DEFAULT_INIT_NODE_DEPTH; + queryParams = new ArrayList<String>(); + + mapper = new ObjectMapper(); + + processedNeighbors = false; + resolvedSelfLink = false; + + + } + + public void clearQueryParams() { + queryParams.clear(); + } + + public void addQueryParam(String queryParam) { + if ( queryParam!= null) { + if( !queryParams.contains(queryParam)) { + queryParams.add(queryParam); + } + } + } + + public void addInboundSelfLink(String link) { + + if (link == null) { + return; + } + + if (!inboundNeighborSelfLinks.contains(link)) { + inboundNeighborSelfLinks.add(link); + } + + } + + public void addOutboundSelfLink(String link) { + + if (link == null) { + return; + } + + if (!outboundNeighborSelfLinks.contains(link)) { + outboundNeighborSelfLinks.add(link); + } + + } + + public Collection<String> getInboundNeighborSelfLinks() { + return inboundNeighborSelfLinks; + } + + public Collection<String> getOutboundNeighborSelfLinks() { + return outboundNeighborSelfLinks; + } + + public void addQueryParams(Collection<String> params) { + + if (params != null & params.size() > 0) { + + for (String param : params) { + addQueryParam(param); + } + } + } + + + public List<String> getQueryParams() { + return queryParams; + } + + public void setSelfLinkDeterminationPending(boolean selfLinkDeterminationPending) { + this.selfLinkDeterminationPending = selfLinkDeterminationPending; + } + + public boolean isSelfLinkDeterminationPending() { + return selfLinkDeterminationPending; + } + + public NodeProcessingState getState() { + return state; + } + + public List<JsonNode> getComplexGroups() { + return complexGroups; + } + + public List<RelationshipList> getRelationshipLists() { + return relationshipLists; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + public String getPrimaryKeyName() { + return primaryKeyName; + } + + /** + * Gets the visualization config. + * + * @return the visualization config + */ + public VisualizationConfigs getvisualizationConfigs() { + return visualizationConfigs; + } + + public int getNodeDepth() { + return nodeDepth; + } + + public void setNodeDepth(int nodeDepth) { + this.nodeDepth = nodeDepth; + } + + /** + * Sets the visualization config. + * + * @param visualizationConfig the new visualization config + */ + public void setvisualizationConfig(VisualizationConfigs visualizationConfigs) { + this.visualizationConfigs = visualizationConfigs; + } + + public OxmModelLoader getOxmModelLoader() { + return oxmModelLoader; + } + + public void setPrimaryKeyName(String primaryKeyName) { + this.primaryKeyName = primaryKeyName; + } + + public String getPrimaryKeyValue() { + return primaryKeyValue; + } + + public void setPrimaryKeyValue(String primaryKeyValue) { + this.primaryKeyValue = primaryKeyValue; + } + + public boolean isNodeValidated() { + return nodeValidated; + } + + public void setNodeValidated(boolean nodeValidated) { + this.nodeValidated = nodeValidated; + } + + public boolean isNodeIssue() { + return nodeIssue; + } + + public boolean isIgnoredByFilter() { + return ignoredByFilter; + } + + public void setIgnoredByFilter(boolean ignoredByFilter) { + this.ignoredByFilter = ignoredByFilter; + } + + public void setNodeIssue(boolean nodeIssue) { + this.nodeIssue = nodeIssue; + } + + /** + * Checks for processed neighbors. + * + * @return true, if successful + */ + public boolean hasProcessedNeighbors() { + return processedNeighbors; + } + + public void setProcessedNeighbors(boolean processedNeighbors) { + this.processedNeighbors = processedNeighbors; + } + + /** + * Checks for resolved self link. + * + * @return true, if successful + */ + public boolean hasResolvedSelfLink() { + return resolvedSelfLink; + } + + public void setResolvedSelfLink(boolean resolvedSelfLink) { + this.resolvedSelfLink = resolvedSelfLink; + } + + /** + * Checks for neighbors. + * + * @return true, if successful + */ + public boolean hasNeighbors() { + return (inboundNeighbors.size() > 0 || outboundNeighbors.size() > 0); + } + + /** + * Adds the inbound neighbor. + * + * @param nodeId the node id + */ + public void addInboundNeighbor(String nodeId) { + + if (nodeId == null) { + return; + } + + if (!inboundNeighbors.contains(nodeId)) { + inboundNeighbors.add(nodeId); + } + + } + + /** + * Adds the outbound neighbor. + * + * @param nodeId the node id + */ + public void addOutboundNeighbor(String nodeId) { + + if (nodeId == null) { + return; + } + + if (!outboundNeighbors.contains(nodeId)) { + outboundNeighbors.add(nodeId); + } + + } + + public boolean isAtMaxDepth() { + return (nodeDepth >= this.visualizationConfigs.getMaxSelfLinkTraversalDepth()); + } + + public ConcurrentLinkedDeque<String> getInboundNeighbors() { + return inboundNeighbors; + } + + public void setInboundNeighbors(ConcurrentLinkedDeque<String> inboundNeighbors) { + this.inboundNeighbors = inboundNeighbors; + } + + public Collection<String> getOutboundNeighbors() { + List<String> result = new ArrayList<String>(); + + Iterator<String> neighborIterator = outboundNeighbors.iterator(); + + while (neighborIterator.hasNext()) { + result.add(neighborIterator.next()); + } + + return result; + } + + /** + * Change depth. + * + * @param newDepth the new depth + * @return true, if successful + */ + public boolean changeDepth(int newDepth) { + + boolean nodeDepthWasChanged = false; + + if (newDepth < nodeDepth) { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_DEPTH, nodeId, + String.valueOf(this.nodeDepth), String.valueOf(newDepth)); + this.nodeDepth = newDepth; + nodeDepthWasChanged = true; + } + + return nodeDepthWasChanged; + + } + + public void setOutboundNeighbors(ConcurrentLinkedDeque<String> outboundNeighbors) { + this.outboundNeighbors = outboundNeighbors; + } + + public boolean isRootNode() { + return isRootNode; + } + + public void setRootNode(boolean isRootNode) { + this.isRootNode = isRootNode; + } + + /** + * Change state. + * + * @param newState the new state + * @param action the action + */ + public void changeState(NodeProcessingState newState, NodeProcessingAction action) { + /* + * NodeId may be null depending on the current node life-cycle state + */ + + if (getNodeId() != null) { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE, state.toString(), newState.toString(), action.toString()); + } else { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID, state.toString(), newState.toString(), action.toString()); + } + this.state = newState; + } + + public boolean isSelfLinkPendingResolve() { + return selfLinkPendingResolve; + } + + public void setSelfLinkPendingResolve(boolean selfLinkPendingResolve) { + this.selfLinkPendingResolve = selfLinkPendingResolve; + } + + public boolean isSelflinkRetrievalFailure() { + return selflinkRetrievalFailure; + } + + public void setSelflinkRetrievalFailure(boolean selflinkRetrievalFailure) { + this.selflinkRetrievalFailure = selflinkRetrievalFailure; + } + + public void setOxmModelLoader(OxmModelLoader loader) { + this.oxmModelLoader = loader; + } + + public boolean getSelfLinkProcessed() { + return selfLinkProcessed.get(); + } + + public void setSelfLinkProcessed(boolean selfLinkProcessed) { + this.selfLinkProcessed.set(selfLinkProcessed); + } + + public boolean getNodeIntegrityProcessed() { + return nodeIntegrityProcessed.get(); + } + + public void setNodeIntegrityProcessed(boolean nodeIntegrityProcessed) { + this.nodeIntegrityProcessed.set(nodeIntegrityProcessed); + } + + public boolean isDirectSelfLink() { + return isDirectSelfLink(this.selfLink); + } + + /** + * Checks if is direct self link. + * + * @param link the link + * @return true, if is direct self link + */ + public static boolean isDirectSelfLink(String link) { + + if (link == null) { + return false; + } + + return link.contains("/resources/id/"); + + } + + public Map<String, String> getProperties() { + return properties; + } + + /** + * Adds the error cause. + * + * @param error the error + */ + public void addErrorCause(String error) { + if (!errorCauses.contains(error)) { + errorCauses.add(error); + } + } + + /** + * Adds the property. + * + * @param key the key + * @param value the value + */ + public void addProperty(String key, String value) { + properties.put(key, value); + } + + public boolean isProcessingErrorOccurred() { + return processingErrorOccurred; + } + + public void setProcessingErrorOccurred(boolean processingErrorOccurred) { + this.processingErrorOccurred = processingErrorOccurred; + } + + public String getNodeId() { + return nodeId; + } + + public void setNodeId(String nodeId) { + this.nodeId = nodeId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getSelfLink() { + return selfLink; + } + + /** + * Calculate edit attribute uri. + * + * @param link the link + * @return the string + */ + public String calculateEditAttributeUri(String link) { + String uri = null; + Pattern pattern = Pattern.compile(URIRegexPattern); + Matcher matcher = pattern.matcher(link); + if (matcher.find()) { + uri = link.substring(matcher.end()); + } + return uri; + } + + /** + * Analyze self link relationship list. + * + * @param jsonResult the json result + * @return the relationship list + */ + private RelationshipList analyzeSelfLinkRelationshipList(String jsonResult) { + + + RelationshipList relationshipList = null; + + try { + relationshipList = mapper.readValue(jsonResult, RelationshipList.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_RELATIONSHIP_LIST_ERROR, exc.toString()); + } + + return relationshipList; + } + + /** + * Adds the relationship list. + * + * @param relationshipList the relationship list + */ + public void addRelationshipList(RelationshipList relationshipList) { + + if (!relationshipLists.contains(relationshipList)) { + relationshipLists.add(relationshipList); + } + + } + + /** + * Process pathed self link response. + * + * @param selfLinkJsonResponse the self link json response + * @param startNodeType the start node type + * @param startNodeResourceKey the start node resource key + */ + public void processPathedSelfLinkResponse(String selfLinkJsonResponse, String startNodeType, + String startNodeResourceKey) { + + if (selfLinkJsonResponse == null || selfLinkJsonResponse.length() == 0) { + LOG.error(AaiUiMsgs.SELF_LINK_NULL_EMPTY_RESPONSE); + return; + } + + try { + JsonNode jsonNode = mapper.readValue(selfLinkJsonResponse, JsonNode.class); + + Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields(); + Entry<String, JsonNode> field = null; + + while (fieldNames.hasNext()) { + + field = fieldNames.next(); + + /* + * Is there a way to tell if the field is an aggregate or an atomic value? This is where our + * flattening code needs to live + */ + + String fieldName = field.getKey(); + + if ("relationship-list".equals(fieldName)) { + + /* + * Parse the relationship list like we were doing before, so we can determine whether or + * not to keep it or traverse it after we have performed the evaluative node depth logic. + */ + RelationshipList relationshipList = + analyzeSelfLinkRelationshipList(field.getValue().toString()); + + if (relationshipList != null) { + this.relationshipLists.add(relationshipList); + } else { + LOG.info(AaiUiMsgs.NO_RELATIONSHIP_DISCOVERED, nodeId); + } + } else { + JsonNode nodeValue = field.getValue(); + + if (nodeValue != null && nodeValue.isValueNode()) { + + /* + * before we blindly add the fieldName and value to our property set, let's do one more + * check to see if the field name is an entity type. If it is, then our complex + * attribute processing code will pick it up and process it instead, but this is + * probably more likely just for array node types, but we'll see. + */ + + if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) { + /* + * this is no an entity type as far as we can tell, so we can add it to our property + * set. + */ + + addProperty(fieldName, nodeValue.asText()); + + } + + } else { + + if (nodeValue.isArray()) { + + /* + * make sure array entity-type collection is not an entityType before adding it to the + * property set. The expetation is that it will be added the visualization through a + * complex group or relationship. + */ + + if (oxmEntityLookup.getEntityDescriptors().get(field.getKey()) == null) { + /* + * this is no an entity type as far as we can tell, so we can add it to our property + * set. + */ + + addProperty(field.getKey(), nodeValue.toString()); + + } + + } else { + + complexGroups.add(nodeValue); + + } + + } + + } + + } + + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "POJO", exc.getLocalizedMessage()); + this.setProcessingErrorOccurred(true); + this.addErrorCause( + "An error occurred while converting JSON into POJO = " + exc.getLocalizedMessage()); + } + + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + /** + * Adds the complex group. + * + * @param complexGroup the complex group + */ + public void addComplexGroup(JsonNode complexGroup) { + + if (!complexGroups.contains(complexGroup)) { + complexGroups.add(complexGroup); + } + + } + + /** + * Gets the padding. + * + * @param level the level + * @param paddingString the padding string + * @return the padding + */ + private static String getPadding(int level, String paddingString) { + StringBuilder sb = new StringBuilder(32); + for (int x = 0; x < level; x++) { + sb.append(paddingString); + } + return sb.toString(); + } + + /** + * Dump node tree. + * + * @param showProperties the show properties + * @return the string + */ + public String dumpNodeTree(boolean showProperties) { + return dumpNodeTree(0, showProperties); + } + + /** + * Dump node tree. + * + * @param level the level + * @param showProperties the show properties + * @return the string + */ + private String dumpNodeTree(int level, boolean showProperties) { + StringBuilder sb = new StringBuilder(128); + String padding = getPadding(level, " "); + + sb.append(padding + " -> " + getNodeId() + "]").append("\n"); + sb.append(padding + " -> primaryKeyName = " + primaryKeyName + "]").append("\n"); + sb.append(padding + " -> primaryKeyValue = " + primaryKeyValue + "]").append("\n"); + sb.append(padding + " -> entityType = " + entityType + "]").append("\n"); + + if (showProperties) { + Set<Entry<String, String>> entries = properties.entrySet(); + for (Entry<String, String> entry : entries) { + sb.append( + padding + " ----> " + String.format("[ %s => %s ]", entry.getKey(), entry.getValue())) + .append("\n"); + } + } + + sb.append(padding + " ----> " + String.format("[ selfLink => %s ]", getSelfLink())) + .append("\n"); + + sb.append("\n").append(padding + " ----> Inbound Neighbors:").append("\n"); + + for (String inboundNeighbor : inboundNeighbors) { + sb.append("\n").append(inboundNeighbor.toString()); + } + + sb.append(padding + " ----> Outbound Neighbors:").append("\n"); + sb.append("\n").append(padding + " ----> Outbound Neighbors:").append("\n"); + + for (String outboundNeighbor : outboundNeighbors) { + sb.append("\n").append(outboundNeighbor.toString()); + } + + return sb.toString(); + + } + + public String getProcessingErrorCauses() { + + StringBuilder sb = new StringBuilder(128); + + for (String c : this.errorCauses) { + sb.append(c).append("\n"); + } + + return sb.toString(); + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java new file mode 100644 index 0000000..5da9c20 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutput.java @@ -0,0 +1,93 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.ArrayList; +import java.util.List; + +/** + * The Class D3VisualizationOutput. + */ +public class D3VisualizationOutput { + + public GraphMeta graphMeta; + public List<SparkyGraphNode> nodes; + public List<SparkyGraphLink> links; + public InlineMessage inlineMessage; + + /** + * Instantiates a new d 3 visualization output. + */ + public D3VisualizationOutput() { + nodes = new ArrayList<SparkyGraphNode>(); + links = new ArrayList<SparkyGraphLink>(); + inlineMessage = null; + } + + public GraphMeta getGraphMeta() { + return graphMeta; + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + graphMeta.pegCounter(counterName); + } + + public void setGraphMeta(GraphMeta graphMeta) { + this.graphMeta = graphMeta; + } + + /** + * Adds the nodes. + * + * @param nodes the nodes + */ + public void addNodes(List<SparkyGraphNode> nodes) { + this.nodes.addAll(nodes); + } + + /** + * Adds the links. + * + * @param links the links + */ + public void addLinks(List<SparkyGraphLink> links) { + this.links.addAll(links); + } + + public InlineMessage getInlineMessage() { + return inlineMessage; + } + + public void setInlineMessage(InlineMessage inlineMessage) { + this.inlineMessage = inlineMessage; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java new file mode 100644 index 0000000..91c615e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntry.java @@ -0,0 +1,81 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * The Class EntityEntry. + */ +public class EntityEntry { + + private String entityType; + + private String entityPrimaryKeyValue; + + private String searchTags; + + private String entityId; + + public String getEntityId() { + return entityId; + } + + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getSearchTags() { + return searchTags; + } + + public void setSearchTags(String searchTags) { + this.searchTags = searchTags; + } + + @Override + public String toString() { + return "EntityEntry [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (searchTags != null ? "searchTags=" + searchTags + ", " : "") + + (entityId != null ? "entityId=" + entityId : "") + "]"; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java new file mode 100644 index 0000000..39106d2 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoEntity.java @@ -0,0 +1,98 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.Arrays; +import java.util.Map; + +public class GizmoEntity { + + private String id; + private String type; + private String url; + private Map<String, String> properties; + private GizmoRelationshipHint[] in; + private GizmoRelationshipHint[] out; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Map<String, String> getProperties() { + return properties; + } + + public void setProperties(Map<String, String> properties) { + this.properties = properties; + } + + public GizmoRelationshipHint[] getIn() { + return in; + } + + public void setIn(GizmoRelationshipHint[] in) { + this.in = in; + } + + public GizmoRelationshipHint[] getOut() { + return out; + } + + public void setOut(GizmoRelationshipHint[] out) { + this.out = out; + } + + @Override + public String toString() { + return "GizmoEntity [" + (id != null ? "id=" + id + ", " : "") + + (type != null ? "type=" + type + ", " : "") + (url != null ? "url=" + url + ", " : "") + + (properties != null ? "properties=" + properties + ", " : "") + + (in != null ? "in=" + Arrays.toString(in) + ", " : "") + + (out != null ? "out=" + Arrays.toString(out) : "") + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java new file mode 100644 index 0000000..31ea78a --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipEntity.java @@ -0,0 +1,103 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.Map; + +public class GizmoRelationshipEntity { + + /* + * {"id":"oe4ur-3a0-27th-fu8","type":"has","url": + * "services/inventory/relationships/v8/has/oe4ur-3a0-27th-fu8","source": + * "services/inventory/v8/generic-vnf/4248","target": + * "services/inventory/v8/vserver/20528", + * "properties":{"is-parent":"true","multiplicity":"many","has-del-target": + * "true","uses-resource": "true"}} + */ + + private String id; + private String type; + private String url; + private String source; + private String target; + private Map<String, String> properties; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + public Map<String, String> getProperties() { + return properties; + } + + public void setProperties(Map<String, String> properties) { + this.properties = properties; + } + + @Override + public String toString() { + return "GizmoRelationshipEntity [" + (id != null ? "id=" + id + ", " : "") + + (type != null ? "type=" + type + ", " : "") + (url != null ? "url=" + url + ", " : "") + + (source != null ? "source=" + source + ", " : "") + (target != null ? "target=" + target + ", " : "") + + (properties != null ? "properties=" + properties : "") + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java new file mode 100644 index 0000000..5e22164 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GizmoRelationshipHint.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +public class GizmoRelationshipHint { + + private String id; + private String type; + private String url; + private String source; + private String target; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + + + } diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java new file mode 100644 index 0000000..7e53665 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphMeta.java @@ -0,0 +1,147 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import org.onap.aai.sparky.viewandinspect.EntityTypeAggregation; + +/** + * The Class GraphMeta. + */ +public class GraphMeta { + + private com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDescriptors; + + private int numNodes; + + private int numLinks; + + private long renderTimeInMs; + + private int numLinksResolvedSuccessfullyFromCache; + + private int numLinksResolvedSuccessfullyFromServer; + + private int numLinkResolveFailed; + + private EntityTypeAggregation entitySummary; + + /** + * Instantiates a new graph meta. + */ + public GraphMeta() { + entitySummary = new EntityTypeAggregation(); + } + + public EntityTypeAggregation getEntitySummary() { + return entitySummary; + } + + public void setEntitySummary(EntityTypeAggregation entitySummary) { + this.entitySummary = entitySummary; + } + + public com.fasterxml.jackson.databind.JsonNode getAaiEntityNodeDescriptors() { + return aaiEntityNodeDescriptors; + } + + public void setAaiEntityNodeDescriptors( + com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions) { + this.aaiEntityNodeDescriptors = aaiEntityNodeDefinitions; + } + + public int getNumLinksResolvedSuccessfullyFromCache() { + return numLinksResolvedSuccessfullyFromCache; + } + + public void setNumLinksResolvedSuccessfullyFromCache(int numLinksResolvedSuccessfullyFromCache) { + this.numLinksResolvedSuccessfullyFromCache = numLinksResolvedSuccessfullyFromCache; + } + + public int getNumLinksResolvedSuccessfullyFromServer() { + return numLinksResolvedSuccessfullyFromServer; + } + + public void setNumLinksResolvedSuccessfullyFromServer( + int numLinksResolvedSuccessfullyFromServer) { + this.numLinksResolvedSuccessfullyFromServer = numLinksResolvedSuccessfullyFromServer; + } + + public int getNumLinkResolveFailed() { + return numLinkResolveFailed; + } + + public void setNumLinkResolveFailed(int numLinkResolveFailed) { + this.numLinkResolveFailed = numLinkResolveFailed; + } + + public int getNumNodes() { + return numNodes; + } + + public void setNumNodes(int numNodes) { + this.numNodes = numNodes; + } + + public int getNumLinks() { + return numLinks; + } + + public void setNumLinks(int numLinks) { + this.numLinks = numLinks; + } + + public long getRenderTimeInMs() { + return renderTimeInMs; + } + + public void setRenderTimeInMs(long renderTimeInMs) { + this.renderTimeInMs = renderTimeInMs; + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + entitySummary.pegCounter(counterName); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "GraphMeta [" + + (aaiEntityNodeDescriptors != null + ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "") + + "numNodes=" + numNodes + ", numLinks=" + numLinks + ", renderTimeInMs=" + renderTimeInMs + + ", numLinksResolvedSuccessfullyFromCache=" + numLinksResolvedSuccessfullyFromCache + + ", numLinksResolvedSuccessfullyFromServer=" + numLinksResolvedSuccessfullyFromServer + + ", numLinkResolveFailed=" + numLinkResolveFailed + ", " + + (entitySummary != null ? "entitySummary=" + entitySummary : "") + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java new file mode 100644 index 0000000..23e50a9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequest.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +public class GraphRequest { + + private String hashId; + private boolean includeGraphMeta; + + public GraphRequest() { + + } + + public String getHashId() { + return hashId; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + public boolean isIncludeGraphMeta() { + return includeGraphMeta; + } + + public void setIncludeGraphMeta(boolean includeGraphMeta) { + this.includeGraphMeta = includeGraphMeta; + } + + @Override + public String toString() { + return "QueryRequest [" + (hashId != null ? "hashId=" + hashId + ", " : "") + + "includeGraphMeta=" + includeGraphMeta + "]"; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java new file mode 100644 index 0000000..f6f85bb --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessage.java @@ -0,0 +1,70 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * The Class InlineMessage. + */ +public class InlineMessage { + + private String level; + private String message; + + /** + * Instantiates a new inline message. + * + * @param level the level + * @param message the message + */ + public InlineMessage(String level, String message) { + this.level = level; + this.message = message; + } + + public String getLevel() { + return level; + } + + public void setLevel(String level) { + this.level = level; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return level + " : " + message; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java new file mode 100644 index 0000000..09e5956 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNode.java @@ -0,0 +1,207 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.apache.log4j.Logger; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/* + * We can use annotations to differentiate between intermediate data we use to build the node, and + * the data that we actually want to appear in the exported JSON. + */ + +/* + * This is our current ( 14-June-2016 ) working schema that will remain organic until we get it just + * right. + * + * { "item-type": "customer", "item-name-key": "subscriber-name", “item-name-value” : + * “subscriber-name-123456789-aai847-data-01”, "item-properties": [{ "property-name": + * "subscriber-name", "property-value": "subscriber-name-123456789-aai847-data-01" }, { + * "property-name": "global-customer-id", "property-value": + * "global-customer-id-123456789-aai847-data-01" } ], "node-meta": { “color” : “#f2d2d2”, + * "isSearchTarget" : false, "nodeGroups" : "1,2,3,4" }, } + * + */ + + +/** + * The Class JsonNode. + */ +public class JsonNode { + + private String id; + private String itemType; + private String itemNameKey; + private String itemNameValue; + private Map<String, String> itemProperties; + private NodeMeta nodeMeta; + + @JsonIgnore + private boolean isRootNode; + + + @JsonIgnore + private String resourceKey; + @JsonIgnore + private Collection<String> inboundNeighbors; + + @JsonIgnore + private Collection<String> outboundNeighbors; + + + @JsonIgnore + private static final Logger LOG = Logger.getLogger(JsonNode.class); + + private VisualizationConfigs visualizationConfigs; + + + /** + * Instantiates a new json node. + * + * @param ain the ain + */ + public JsonNode(ActiveInventoryNode ain, VisualizationConfigs visualizationConfigs) { + this.resourceKey = ain.getNodeId(); + this.itemProperties = ain.getProperties(); + this.setItemType(ain.getEntityType()); + this.setItemNameKey(ain.getPrimaryKeyName()); + this.setItemNameValue(ain.getPrimaryKeyValue()); + this.setId(ain.getNodeId()); + this.isRootNode = ain.isRootNode(); + this.visualizationConfigs = visualizationConfigs; + + if (LOG.isDebugEnabled()) { + LOG.debug("---"); + LOG.debug("JsonNode constructor using AIN = " + ain.dumpNodeTree(true)); + LOG.debug("---"); + } + + inboundNeighbors = ain.getInboundNeighbors(); + outboundNeighbors = ain.getOutboundNeighbors(); + + nodeMeta = new NodeMeta(this.visualizationConfigs); + + nodeMeta.setNodeIssue(ain.isNodeIssue()); + nodeMeta.setNodeValidated(ain.isNodeValidated()); + nodeMeta.setNodeDepth(ain.getNodeDepth()); + + nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size()); + nodeMeta.setNumOutboundNeighbors(ain.getOutboundNeighbors().size()); + + nodeMeta.setAtMaxDepth(ain.isAtMaxDepth()); + nodeMeta.setSelfLinkResolved(!ain.isSelflinkRetrievalFailure()); + nodeMeta.setProcessingErrorOccurred(ain.isProcessingErrorOccurred()); + nodeMeta.setHasNeighbors( + ain.getOutboundNeighbors().size() > 0 || ain.getInboundNeighbors().size() > 0); + nodeMeta.setProcessingState(ain.getState()); + + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getItemNameKey() { + return itemNameKey; + } + + public String getItemNameValue() { + return itemNameValue; + } + + public Map<String, String> getItemProperties() { + return itemProperties; + } + + public String getItemType() { + return itemType; + } + + public String getResourceKey() { + return resourceKey; + } + + public void setItemNameKey(String itemNameKey) { + this.itemNameKey = itemNameKey; + } + + public void setItemNameValue(String itemNameValue) { + this.itemNameValue = itemNameValue; + } + + public void setItemProperties(HashMap<String, String> itemProperties) { + this.itemProperties = itemProperties; + } + + public void setItemType(String itemType) { + this.itemType = itemType; + } + + public void setResourceKey(String resourceKey) { + this.resourceKey = resourceKey; + } + + public NodeMeta getNodeMeta() { + return nodeMeta; + } + + public void setNodeMeta(NodeMeta nodeMeta) { + this.nodeMeta = nodeMeta; + } + + public boolean isRootNode() { + return isRootNode; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "JsonNode [" + (id != null ? "id=" + id + ", " : "") + + (itemType != null ? "itemType=" + itemType + ", " : "") + + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "") + + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "") + + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "") + + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "") + + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "") + + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "") + + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]"; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java new file mode 100644 index 0000000..5891d51 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLink.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/* + * Expected JSON Output: + * + * { JsonNodeLink : { id : <value>, source : <value>, target : <value> } } + * + */ + +/** + * The Class JsonNodeLink. + */ +public class JsonNodeLink { + + protected String id; + protected String source; + protected String target; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "JsonNodeLink [id=" + id + ", source=" + source + ", target=" + target + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java new file mode 100644 index 0000000..0cc0746 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebug.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * The Class NodeDebug. + */ +public class NodeDebug { + private boolean maxTraversalDepthReached; + private boolean processingError; + private String processingErrorCauses; + + public boolean isMaxTraversalDepthReached() { + return maxTraversalDepthReached; + } + + public void setMaxTraversalDepthReached(boolean maxTraversalDepthReached) { + this.maxTraversalDepthReached = maxTraversalDepthReached; + } + + public boolean isProcessingError() { + return processingError; + } + + public void setProcessingError(boolean processingError) { + this.processingError = processingError; + } + + public String getProcessingErrorCauses() { + return processingErrorCauses; + } + + public void setProcessingErrorCauses(String processingErrorCauses) { + this.processingErrorCauses = processingErrorCauses; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java new file mode 100644 index 0000000..bc21941 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeMeta.java @@ -0,0 +1,207 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; + +/** + * The Class NodeMeta. + */ +public class NodeMeta { + + private String className; + + private boolean isSearchTarget; + + private NodeDebug nodeDebug; + private boolean nodeIssue; + private boolean nodeValidated; + private long selfLinkResponseTimeInMs; + private long numInboundNeighbors; + private long numOutboundNeighbors; + + private boolean atMaxDepth; + private boolean selfLinkResolved; + private boolean processingErrorOccurred; + private boolean neighborsProcessed; + private int nodeDepth; + private boolean hasNeighbors; + + private NodeProcessingState processingState; + + private VisualizationConfigs visualizationConfigs; + + + + /** + * Instantiates a new node meta. + */ + public NodeMeta(VisualizationConfigs visualizationConfigs) { + this.isSearchTarget = false; + this.visualizationConfigs = visualizationConfigs; + + + if (this.visualizationConfigs.isVisualizationDebugEnabled()) { + nodeDebug = new NodeDebug(); + } + this.numInboundNeighbors = 0; + this.numOutboundNeighbors = 0; + + this.selfLinkResponseTimeInMs = 0; + + this.atMaxDepth = false; + this.selfLinkResolved = false; + this.processingErrorOccurred = false; + this.hasNeighbors = false; + this.neighborsProcessed = false; + this.nodeDepth = ActiveInventoryNode.DEFAULT_INIT_NODE_DEPTH; + this.processingState = NodeProcessingState.INIT; + + } + + public boolean isAtMaxDepth() { + return atMaxDepth; + } + + public void setAtMaxDepth(boolean atMaxDepth) { + this.atMaxDepth = atMaxDepth; + } + + public boolean isSelfLinkResolved() { + return selfLinkResolved; + } + + + + public NodeProcessingState getProcessingState() { + return processingState; + } + + public void setProcessingState(NodeProcessingState processingState) { + this.processingState = processingState; + } + + public void setSelfLinkResolved(boolean selfLinkResolved) { + this.selfLinkResolved = selfLinkResolved; + } + + public boolean isProcessingErrorOccurred() { + return processingErrorOccurred; + } + + public void setProcessingErrorOccurred(boolean processingErrorOccurred) { + this.processingErrorOccurred = processingErrorOccurred; + } + + public boolean isHasNeighbors() { + return hasNeighbors; + } + + public void setHasNeighbors(boolean hasNeighbors) { + this.hasNeighbors = hasNeighbors; + } + + public boolean isNeighborsProcessed() { + return neighborsProcessed; + } + + public void setNeighborsProcessed(boolean neighborsProcessed) { + this.neighborsProcessed = neighborsProcessed; + } + + public int getNodeDepth() { + return nodeDepth; + } + + public void setNodeDepth(int nodeDepth) { + this.nodeDepth = nodeDepth; + } + + public void setNodeDebug(NodeDebug nodeDebug) { + this.nodeDebug = nodeDebug; + } + + public String getClassName() { + return className; + } + + public long getNumInboundNeighbors() { + return numInboundNeighbors; + } + + public void setNumInboundNeighbors(long numInboundNeighbors) { + this.numInboundNeighbors = numInboundNeighbors; + } + + public long getNumOutboundNeighbors() { + return numOutboundNeighbors; + } + + public void setNumOutboundNeighbors(long numOutboundNeighbors) { + this.numOutboundNeighbors = numOutboundNeighbors; + } + + public NodeDebug getNodeDebug() { + return nodeDebug; + } + + public long getSelfLinkResponseTimeInMs() { + return selfLinkResponseTimeInMs; + } + + public boolean isNodeIssue() { + return nodeIssue; + } + + public boolean isNodeValidated() { + return nodeValidated; + } + + public boolean isSearchTarget() { + return isSearchTarget; + } + + public void setClassName(String className) { + this.className = className; + } + + public void setNodeIssue(boolean nodeIssue) { + this.nodeIssue = nodeIssue; + } + + public void setNodeValidated(boolean nodeValidated) { + this.nodeValidated = nodeValidated; + } + + public void setSearchTarget(boolean isSearchTarget) { + this.isSearchTarget = isSearchTarget; + } + + public void setSelfLinkResponseTimeInMs(long selfLinkResponseTimeInMs) { + this.selfLinkResponseTimeInMs = selfLinkResponseTimeInMs; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java new file mode 100644 index 0000000..8b1cb8d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransaction.java @@ -0,0 +1,109 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import org.onap.aai.restclient.client.OperationResult; + +/** + * The Class NodeProcessingTransaction. + */ +public class NodeProcessingTransaction { + + private ActiveInventoryNode processingNode; + private OperationResult opResult; + private String selfLinkWithModifiers; + private String requestParameters; + + /** + * Instantiates a new node processing transaction. + */ + public NodeProcessingTransaction() {} + + public String getRequestParameters() { + return requestParameters; + } + + public void setRequestParameters(String requestParameters) { + this.requestParameters = requestParameters; + } + + public String getSelfLink() { + if (processingNode == null) { + return null; + } + + return processingNode.getSelfLink(); + } + + public String getSelfLinkWithModifiers() { + if (processingNode == null) { + return null; + } + + return processingNode.getSelfLink() + requestParameters; + } + + public ActiveInventoryNode getProcessingNode() { + return processingNode; + } + + public void setProcessingNode(ActiveInventoryNode processingNode) { + this.processingNode = processingNode; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + /** + * Processing error occurred. + * + * @return true, if successful + */ + public boolean processingErrorOccurred() { + if (opResult == null) { + return true; + } + + return !opResult.wasSuccessful(); + + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "NodeProcessingTransaction [" + + (processingNode != null ? "processingNode=" + processingNode + ", " : "") + + (opResult != null ? "opResult=" + opResult + ", " : "") + "processorErrorOccurred=" + + processingErrorOccurred() + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java new file mode 100644 index 0000000..f1a8e4e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryParams.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * The Class QueryParams. + */ +public class QueryParams { + + private String searchTargetPrimaryKeyValues; + private String searchTargetNodeId; + + /** + * Instantiates a new query params. + */ + public QueryParams() { + + } + + public String getSearchTargetPrimaryKeyValues() { + return searchTargetPrimaryKeyValues; + } + + public void setSearchTargetPrimaryKeyValues(String searchTargetPrimaryKeyValues) { + this.searchTargetPrimaryKeyValues = searchTargetPrimaryKeyValues; + } + + public String getSearchTargetNodeId() { + return searchTargetNodeId; + } + + public void setSearchTargetNodeId(String searchTargetNodeId) { + this.searchTargetNodeId = searchTargetNodeId; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java new file mode 100644 index 0000000..a542efd --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequest.java @@ -0,0 +1,47 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * The Class QueryRequest. + */ +public class QueryRequest { + + private String hashId; + + public String getHashId() { + return hashId; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + @Override + public String toString() { + return "QueryRequest [hashId=" + hashId + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java new file mode 100644 index 0000000..5d0f8c3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToProperty.java @@ -0,0 +1,64 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class RelatedToProperty. + */ +public class RelatedToProperty { + protected String propertyKey; + protected String propertyValue; + + @JsonProperty("property-key") + public String getPropertyKey() { + return propertyKey; + } + + public void setPropertyKey(String propertyKey) { + this.propertyKey = propertyKey; + } + + @JsonProperty("property-value") + public String getPropertyValue() { + return propertyValue; + } + + public void setPropertyValue(String propertyValue) { + this.propertyValue = propertyValue; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelatedToProperty [propertyKey=" + propertyKey + ", propertyValue=" + propertyValue + + "]"; + } + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java new file mode 100644 index 0000000..813dec6 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/Relationship.java @@ -0,0 +1,96 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.Arrays; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class Relationship. + */ +public class Relationship { + + protected String relatedTo; + protected String relationshipLabel; + protected String relatedLink; + protected RelationshipData[] relationshipData; + protected RelatedToProperty[] relatedToProperty; + + public String getRelatedTo() { + return relatedTo; + } + + @JsonProperty("related-to") + public void setRelatedTo(String relatedTo) { + this.relatedTo = relatedTo; + } + + public String getRelationshipLabel() { + return relationshipLabel; + } + + @JsonProperty("relationship-label") + public void setRelationshipLabel(String relationshipLabel) { + this.relationshipLabel = relationshipLabel; + } + + public String getRelatedLink() { + return relatedLink; + } + + @JsonProperty("related-link") + public void setRelatedLink(String relatedLink) { + this.relatedLink = relatedLink; + } + + public RelationshipData[] getRelationshipData() { + return relationshipData; + } + + @JsonProperty("relationship-data") + public void setRelationshipData(RelationshipData[] relationshipData) { + this.relationshipData = relationshipData; + } + + public RelatedToProperty[] getRelatedToProperty() { + return relatedToProperty; + } + + @JsonProperty("related-to-property") + public void setRelatedToProperty(RelatedToProperty[] relatedToProperty) { + this.relatedToProperty = relatedToProperty; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "Relationship [relatedTo=" + relatedTo + ", relationshipLabel=" + relationshipLabel + + ", relatedLink=" + relatedLink + ", relationshipData=" + Arrays.toString(relationshipData) + + ", relatedToProperty=" + Arrays.toString(relatedToProperty) + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java new file mode 100644 index 0000000..c8dfefe --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipData.java @@ -0,0 +1,63 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class RelationshipData. + */ +public class RelationshipData { + protected String relationshipKey; + protected String relationshipValue; + + @JsonProperty("relationship-key") + public String getRelationshipKey() { + return relationshipKey; + } + + public void setRelationshipKey(String relationshipKey) { + this.relationshipKey = relationshipKey; + } + + @JsonProperty("relationship-value") + public String getRelationshipValue() { + return relationshipValue; + } + + public void setRelationshipValue(String relationshipValue) { + this.relationshipValue = relationshipValue; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelationshipData [relationshipKey=" + relationshipKey + ", relationshipValue=" + + relationshipValue + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java new file mode 100644 index 0000000..13d0537 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDirectionality.java @@ -0,0 +1,42 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/** + * This enumeration is intended to be used to help us discriminate neighbor relationships for the + * purpose of visualization and conceptualization to model in/out relationships between + * ActiveInventoryNodes. + * Possible visualization behaviors could be the following: - IN ( draw a line with 1 arrow ) - OUT + * ( draw a line with 1 arrow ) - BOTH ( draw a line with 2 arrows, or 2 lines with 1 arrow each ) - + * UNKNOWN ( draw a line with no arrows ) + * The UNKNOWN case is what we have at the moment where we have a collection neighbors with no + * knowledge of relationship directionality. + * + * @author davea + * + */ +public enum RelationshipDirectionality { + IN, OUT, BOTH, UNKNOWN +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java new file mode 100644 index 0000000..9c81a3d --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipList.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.Arrays; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class RelationshipList. + */ +public class RelationshipList { + + protected Relationship[] relationship; + + public Relationship[] getRelationshipList() { + return relationship; + } + + @JsonProperty("relationship") + public void setRelationshipList(Relationship[] relationship) { + this.relationship = relationship; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelationshipList [relationshipList=" + Arrays.toString(relationship) + "]"; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java new file mode 100644 index 0000000..d853673 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityList.java @@ -0,0 +1,116 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.onap.aai.sparky.sync.entity.SearchableEntity; + +import java.util.Set; + +public class SearchableEntityList { + + private List<SearchableEntity> entities; + + public SearchableEntityList() { + entities = new ArrayList<SearchableEntity>(); + } + + public List<SearchableEntity> getEntities() { + return entities; + } + + public void setEntities(List<SearchableEntity> entities) { + this.entities = entities; + } + + public void addEntity(SearchableEntity entity) { + + if ( !entities.contains(entity)) { + entities.add(entity); + } + + } + + protected static SearchableEntity buildEntity(String entityType, String pkeyValue, String link, Map<String,String> searchTags ) { + + SearchableEntity se = new SearchableEntity(); + + se.setEntityType(entityType); + se.setEntityPrimaryKeyValue(pkeyValue); + se.setLink(link); + + if ( searchTags != null) { + + Set<Entry<String, String>> entrySet = searchTags.entrySet(); + + for ( Entry<String, String> entry : entrySet ) { + se.addSearchTagWithKey(entry.getKey(), entry.getValue()); + } + } + + se.deriveFields(); + + return se; + + } + + protected static Map<String,String> getSearchTagMap(String... tags) { + + HashMap<String,String> dataMap = new HashMap<String,String>(); + + if ( tags != null && tags.length >= 2 ) { + + int numTags = tags.length; + int index = 0; + + while ( index < numTags ) { + + if ( index + 1 < numTags ) { + // we have enough parameters for the current set + dataMap.put(tags[index], tags[index+1]); + index += 2; + } else { + break; + } + } + + } + + return dataMap; + + + } + + @Override + public String toString() { + return "SearchableEntityList [" + (entities != null ? "entities=" + entities : "") + "]"; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java new file mode 100644 index 0000000..d69994b --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import org.onap.aai.restclient.client.OperationResult; + +public class SelfLinkDeterminationTransaction { + + private String parentNodeId; + private ActiveInventoryNode newNode; + private String queryString; + private String entityUrl; + private OperationResult opResult; + + + public String getParentNodeId() { + return parentNodeId; + } + + public void setParentNodeId(String parentNodeId) { + this.parentNodeId = parentNodeId; + } + + public ActiveInventoryNode getNewNode() { + return newNode; + } + + public void setNewNode(ActiveInventoryNode newNode) { + this.newNode = newNode; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + public String getQueryString() { + return queryString; + } + + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + public String getEntityUrl() { + return entityUrl; + } + + public void setEntityUrl(String entityUrl) { + this.entityUrl = entityUrl; + } + + + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java new file mode 100644 index 0000000..9b6e4e9 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLink.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +/* + * Expected JSON Output: + * + * { JsonNodeLink : { id : <value>, source : <value>, target : <value> } } + * + */ + +/** + * The Class JsonNodeLink. + */ +public class SparkyGraphLink { + + protected String id; + protected String source; + protected String target; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "JsonNodeLink [id=" + id + ", source=" + source + ", target=" + target + "]"; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java new file mode 100644 index 0000000..5171eaf --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNode.java @@ -0,0 +1,248 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.entity; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.aggregatevnf.search.AggregateSummaryProcessor; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.subscription.payload.entity.ObjectInspectorPayload; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/* + * We can use annotations to differentiate between intermediate data we use to build the node, and + * the data that we actually want to appear in the exported JSON. + */ + +/* + * This is our current ( 14-June-2016 ) working schema that will remain organic until we get it just + * right. + * + * { "item-type": "customer", "item-name-key": "subscriber-name", “item-name-value” : + * “subscriber-name-123456789-aai847-data-01”, "item-properties": [{ "property-name": + * "subscriber-name", "property-value": "subscriber-name-123456789-aai847-data-01" }, { + * "property-name": "global-customer-id", "property-value": + * "global-customer-id-123456789-aai847-data-01" } ], "node-meta": { “color” : “#f2d2d2”, + * "isSearchTarget" : false, "nodeGroups" : "1,2,3,4" }, } + * + */ + + +/** + * The Class JsonNode. + */ +public class SparkyGraphNode { + + private String id; + private String itemType; + private String itemNameKey; + private String itemNameValue; + private Map<String, String> itemProperties; + private NodeMeta nodeMeta; + private ObjectInspectorPayload externalResourcePayload; + + @JsonIgnore + private boolean isRootNode; + + + @JsonIgnore + private String resourceKey; + @JsonIgnore + private Collection<String> inboundNeighbors; + + @JsonIgnore + private Collection<String> outboundNeighbors; + + + @JsonIgnore + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SparkyGraphNode.class); + + private VisualizationConfigs visualizationConfigs; + private SubscriptionConfig subConfig; + + /** + * Instantiates a new SparkyGraphNode. + * + * @param ain the ain + */ + public SparkyGraphNode(ActiveInventoryNode ain, VisualizationConfigs visualizationConfigs, SubscriptionConfig subConfig) { + this.resourceKey = ain.getNodeId(); + this.itemProperties = ain.getProperties(); + this.setItemType(ain.getEntityType()); + this.setItemNameKey(ain.getPrimaryKeyName()); + this.setItemNameValue(ain.getPrimaryKeyValue()); + this.setId(ain.getNodeId()); + this.isRootNode = ain.isRootNode(); + this.visualizationConfigs = visualizationConfigs; + this.setSubConfig(subConfig); + + if (LOG.isDebugEnabled()) { + LOG.debug("---"); + LOG.debug("JsonNode constructor using AIN = " + ain.dumpNodeTree(true)); + LOG.debug("---"); + } + + inboundNeighbors = ain.getInboundNeighbors(); + outboundNeighbors = ain.getOutboundNeighbors(); + + nodeMeta = new NodeMeta(this.visualizationConfigs); + + nodeMeta.setNodeIssue(ain.isNodeIssue()); + nodeMeta.setNodeValidated(ain.isNodeValidated()); + nodeMeta.setNodeDepth(ain.getNodeDepth()); + + nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size()); + nodeMeta.setNumOutboundNeighbors(ain.getOutboundNeighbors().size()); + + nodeMeta.setAtMaxDepth(ain.isAtMaxDepth()); + nodeMeta.setSelfLinkResolved(!ain.isSelflinkRetrievalFailure()); + nodeMeta.setProcessingErrorOccurred(ain.isProcessingErrorOccurred()); + nodeMeta.setHasNeighbors( + ain.getOutboundNeighbors().size() > 0 || ain.getInboundNeighbors().size() > 0); + + if (subConfig.getIsLaunchOIEnabled()) { + try { + Collection<String> entityTypes = subConfig.getAnnEntitiyTypes(); + for (String entityType : entityTypes) { + if (entityType.equals(this.getItemType())) { + ObjectInspectorPayload lic = ObjectInspectorPayload.getOIPayload(subConfig); + lic.getMessage().getPayload().getParams().setObjectName(this.getItemNameValue()); + this.setExternalResourcePayload(lic); + break; + } + } + } catch (IOException e) { + String message = "Could not map JSON to object " + "Attempted to convert: " + + SparkyConstants.SUBSCRIPTION_OI_MAPPING + ". Error: " + e.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + } + nodeMeta.setProcessingState(ain.getState()); + + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getItemNameKey() { + return itemNameKey; + } + + public String getItemNameValue() { + return itemNameValue; + } + + public Map<String, String> getItemProperties() { + return itemProperties; + } + + public String getItemType() { + return itemType; + } + + public String getResourceKey() { + return resourceKey; + } + + public void setItemNameKey(String itemNameKey) { + this.itemNameKey = itemNameKey; + } + + public void setItemNameValue(String itemNameValue) { + this.itemNameValue = itemNameValue; + } + + public void setItemProperties(HashMap<String, String> itemProperties) { + this.itemProperties = itemProperties; + } + + public void setItemType(String itemType) { + this.itemType = itemType; + } + + public void setResourceKey(String resourceKey) { + this.resourceKey = resourceKey; + } + + public NodeMeta getNodeMeta() { + return nodeMeta; + } + + public void setNodeMeta(NodeMeta nodeMeta) { + this.nodeMeta = nodeMeta; + } + + public boolean isRootNode() { + return isRootNode; + } + + public ObjectInspectorPayload getExternalResourcePayload() { + return externalResourcePayload; + } + + public void setExternalResourcePayload(ObjectInspectorPayload externalResourcePayload) { + this.externalResourcePayload = externalResourcePayload; + } + + public SubscriptionConfig getSubConfig() { + return subConfig; + } + + public void setSubConfig(SubscriptionConfig subConfig) { + this.subConfig = subConfig; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "JsonNode [" + (id != null ? "id=" + id + ", " : "") + + (itemType != null ? "itemType=" + itemType + ", " : "") + + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "") + + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "") + + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "") + + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "") + + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "") + + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "") + + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]"; + } +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java new file mode 100644 index 0000000..5c6cdd8 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingAction.java @@ -0,0 +1,36 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.enumeration; + +/** + * The Enum NodeProcessingAction. + */ +public enum NodeProcessingAction { + SELF_LINK_SET, NEW_NODE_PROCESSED, SELF_LINK_RESOLVE_ERROR, SELF_LINK_DETERMINATION_ERROR, + SELF_LINK_RESOLVE_OK, SELF_LINK_RESPONSE_PARSE_ERROR, SELF_LINK_RESPONSE_PARSE_OK, + NEIGHBORS_PROCESSED_ERROR, NEIGHBORS_PROCESSED_OK, COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR, + COMPLEX_ATTRIBUTE_GROUP_PARSE_OK, NODE_IDENTITY_ERROR,UNEXPECTED_STATE_TRANSITION +} + diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java new file mode 100644 index 0000000..18673ef --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/enumeration/NodeProcessingState.java @@ -0,0 +1,31 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.enumeration; + +/** + * The Enum NodeProcessingState. + */ +public enum NodeProcessingState { + INIT, SELF_LINK_UNRESOLVED, SELF_LINK_RESPONSE_UNPROCESSED, NEIGHBORS_UNPROCESSED, READY, ERROR} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java new file mode 100644 index 0000000..c0a7711 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/search/ViewInspectSearchProvider.java @@ -0,0 +1,426 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.search; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.search.SearchServiceAdapter; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.config.SuggestionConfig; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; + +public class ViewInspectSearchProvider implements SearchProvider { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ViewInspectSearchProvider.class); + + private SearchServiceAdapter searchServiceAdapter = null; + private SuggestionConfig suggestionConfig; + private String additionalSearchSuggestionText; + + private static final String KEY_SEARCH_RESULT = "searchResult"; + private static final String KEY_HITS = "hits"; + private static final String KEY_DOCUMENT = "document"; + private static final String KEY_CONTENT = "content"; + + private static final String KEY_SEARCH_TAG_IDS = "searchTagIDs"; + private static final String KEY_SEARCH_TAGS = "searchTags"; + private static final String KEY_LINK = "link"; + private static final String KEY_ENTITY_TYPE = "entityType"; + + private final String viewInspectIndexName; + private final String viewInspectSuggestionRoute; + private OxmEntityLookup oxmEntityLookup; + + public ViewInspectSearchProvider(SearchServiceAdapter searchServiceAdapter, + SuggestionConfig suggestionConfig, String viewInspectIndexName, + String viewInspectSuggestionRoute, OxmEntityLookup oxmEntityLookup) throws Exception { + + this.searchServiceAdapter = searchServiceAdapter; + this.oxmEntityLookup = oxmEntityLookup; + this.suggestionConfig = suggestionConfig; + additionalSearchSuggestionText = null; + this.viewInspectIndexName = viewInspectIndexName; + this.viewInspectSuggestionRoute = viewInspectSuggestionRoute; + + } + + @Override + public List<SearchSuggestion> search(QuerySearchEntity queryRequest) { + + List<SearchSuggestion> suggestionEntityList = new ArrayList<SearchSuggestion>(); + + /* + * Based on the configured stop words, we need to strip any matched stop-words ( case + * insensitively ) from the query string, before hitting elastic to prevent the words from being + * used against the elastic view-and-inspect index. Another alternative to this approach would + * be to define stop words on the elastic search index configuration for the + * entity-search-index, but but that may be more complicated / more risky than just a simple bug + * fix, but it's something we should think about for the future. + */ + + try { + final String queryStringWithoutStopWords = + stripStopWordsFromQuery(queryRequest.getQueryStr()); + + final String fullUrlStr = searchServiceAdapter.buildSearchServiceQueryUrl(viewInspectIndexName); + + String postBody = String.format(VIUI_SEARCH_TEMPLATE, Integer.parseInt(queryRequest.getMaxResults()), + queryStringWithoutStopWords); + + OperationResult opResult = searchServiceAdapter.doPost(fullUrlStr, postBody, "application/json"); + if (opResult.getResultCode() == 200) { + suggestionEntityList = + generateSuggestionsForSearchResponse(opResult.getResult(), queryRequest.getQueryStr()); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, + "View and inspect query failed with error = " + exc.getMessage()); + } + return suggestionEntityList; + + + } + + public String getAdditionalSearchSuggestionText() { + return additionalSearchSuggestionText; + } + + public void setAdditionalSearchSuggestionText(String additionalSearchSuggestionText) { + this.additionalSearchSuggestionText = additionalSearchSuggestionText; + } + + + + + /** + * Builds the search response. + * + * @param operationResult The Elasticsearch query result + * @param queryStr The string the user typed into the search bar + * @return A list of search suggestions and corresponding UI filter values + */ + private List<SearchSuggestion> generateSuggestionsForSearchResponse(String operationResult, + String queryStr) { + + + if (operationResult == null || operationResult.length() == 0) { + return null; + } + + ObjectMapper mapper = new ObjectMapper(); + JsonNode rootNode = null; + List<SearchSuggestion> suggestionEntityList = new ArrayList<SearchSuggestion>(); + try { + rootNode = mapper.readTree(operationResult); + + JsonNode hitsNode = rootNode.get(KEY_SEARCH_RESULT); + + + + // Check if there are hits that are coming back + if (hitsNode.has(KEY_HITS)) { + ArrayNode hitsArray = (ArrayNode) hitsNode.get(KEY_HITS); + + /* + * next we iterate over the values in the hit array elements + */ + + Iterator<JsonNode> nodeIterator = hitsArray.elements(); + JsonNode entityNode = null; + CommonSearchSuggestion suggestionEntity = null; + JsonNode sourceNode = null; + while (nodeIterator.hasNext()) { + entityNode = nodeIterator.next(); + sourceNode = entityNode.get(KEY_DOCUMENT).get(KEY_CONTENT); + + // do the point transformation as we build the response? + suggestionEntity = new CommonSearchSuggestion(); + suggestionEntity.setRoute(viewInspectSuggestionRoute); + + /* + * This is where we probably want to annotate the search tags because we also have access + * to the seachTagIds + */ + + String searchTagIds = getValueFromNode(sourceNode, KEY_SEARCH_TAG_IDS); + String searchTags = getValueFromNode(sourceNode, KEY_SEARCH_TAGS); + String entityType = getValueFromNode(sourceNode, KEY_ENTITY_TYPE); + String link = getValueFromNode(sourceNode, KEY_LINK); + + if (link != null) { + suggestionEntity.setHashId(NodeUtils.generateUniqueShaDigest(link)); + } + + try { + suggestionEntity + .setText(annotateSearchTags(searchTags, searchTagIds, entityType, queryStr)); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + exc.getLocalizedMessage()); + // at least send back the un-annotated search tags + suggestionEntity.setText(searchTags); + } + + if ( getAdditionalSearchSuggestionText() != null ) { + String suggestionText = suggestionEntity.getText() ; + suggestionText += SparkyConstants.SUGGESTION_TEXT_SEPARATOR + + getAdditionalSearchSuggestionText(); + suggestionEntity.setText(suggestionText); + } + + if (searchTags != null) { + suggestionEntityList.add(suggestionEntity); + } + + } + } + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.SEARCH_RESPONSE_BUILDING_EXCEPTION, exc.getLocalizedMessage()); + } + return suggestionEntityList; + } + + + + /** + * The current format of an UI-dropdown-item is like: "search-terms entityType att1=attr1_val". + * Example, for pserver: search-terms pserver hostname=djmAG-72060, + * pserver-name2=example-pserver-name2-val-17254, pserver-id=example-pserver-id-val-17254, + * ipv4-oam-address=example-ipv4-oam-address-val-17254 SearchController.js parses the above + * format. So if you are modifying the parsing below, please update SearchController.js as well. + * + * @param searchTags the search tags + * @param searchTagIds the search tag ids + * @param entityType the entity type + * @param queryStr the query str + * @return the string + */ + + private String annotateSearchTags(String searchTags, String searchTagIds, String entityType, + String queryStr) { + + if (searchTags == null || searchTagIds == null) { + String valueOfSearchTags = String.valueOf(searchTags); + String valueOfSearchTagIds = String.valueOf(searchTagIds); + + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Search tags = " + valueOfSearchTags + " and Seach tag IDs = " + valueOfSearchTagIds); + return searchTags; + } + + if (entityType == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), "EntityType is null"); + return searchTags; + } + + if (queryStr == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + "Query string is null"); + return searchTags; + } + + /* + * The ElasticSearch analyzer has already applied the lowercase filter, so we don't have to + * covert them again + */ + String[] searchTagsArray = searchTags.split(";"); + String[] searchTagIdsArray = searchTagIds.split(";"); + + // specifically apply lower case to the the query terms to make matching + // simpler + String[] queryTerms = queryStr.toLowerCase().split(" "); + + OxmEntityDescriptor desc = oxmEntityLookup.getEntityDescriptors().get(entityType); + + if (desc == null) { + LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, entityType.toString()); + return searchTags; + } + + String primaryKeyName = NodeUtils.concatArray(desc.getPrimaryKeyAttributeNames(), "/"); + String primaryKeyValue = null; + + /* + * For each used attribute, get the fieldName for the attribute index and transform the search + * tag into t1,t2,t3 => h1=t1, h2=t2, h3=t3; + */ + StringBuilder searchTagsBuilder = new StringBuilder(128); + searchTagsBuilder.append(entityType); + + String primaryKeyConjunctionValue = null; + boolean queryTermsMatchedSearchTags = false; + + if (searchTagsArray.length == searchTagIdsArray.length) { + for (int i = 0; i < searchTagsArray.length; i++) { + String searchTagAttributeId = searchTagIdsArray[i]; + String searchTagAttributeValue = searchTagsArray[i]; + + // Find the concat conjunction + Map<String, String> pairConjunctionList = suggestionConfig.getPairingList(); + + String suggConjunction = null; + if (pairConjunctionList.get(searchTagAttributeId) != null) { + suggConjunction = pairConjunctionList.get(searchTagAttributeId); + } else { + suggConjunction = suggestionConfig.getDefaultPairingValue(); + } + + if (primaryKeyName.equals(searchTagAttributeId)) { + primaryKeyValue = searchTagAttributeValue; + primaryKeyConjunctionValue = suggConjunction; + } + + if (queryTermsMatchSearchTag(queryTerms, searchTagAttributeValue)) { + searchTagsBuilder.append(" " + suggConjunction + " " + searchTagAttributeValue); + queryTermsMatchedSearchTags = true; + } + } + } else { + String errorMessage = + "Search tags length did not match search tag ID length for entity type " + entityType; + LOG.error(AaiUiMsgs.ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, errorMessage); + } + + + + /* + * if none of the user query terms matched the index entity search tags then we should still tag + * the matched entity with a conjunction set to at least it's entity primary key value to + * discriminate between the entities of the same type in the search results displayed in the UI + * search bar results + */ + + if (!queryTermsMatchedSearchTags) { + + if (primaryKeyValue != null && primaryKeyConjunctionValue != null) { + searchTagsBuilder.append(" " + primaryKeyConjunctionValue + " " + primaryKeyValue); + } else { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Could not annotate user query terms " + queryStr + + " from available entity search tags = " + searchTags); + return searchTags; + } + + } + + return searchTagsBuilder.toString(); + + } + + /** + * Query terms match search tag. + * + * @param queryTerms the query terms + * @param searchTag the search tag + * @return true, if successful @return. + */ + private boolean queryTermsMatchSearchTag(String[] queryTerms, String searchTag) { + + if (queryTerms == null || queryTerms.length == 0 || searchTag == null) { + return false; + } + + for (String queryTerm : queryTerms) { + if (searchTag.toLowerCase().contains(queryTerm.toLowerCase())) { + return true; + } + } + + return false; + + } + + /** + * Gets the value from node. + * + * @param node the node + * @param fieldName the field name + * @return the value from node + */ + private String getValueFromNode(JsonNode node, String fieldName) { + + if (node == null || fieldName == null) { + return null; + } + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + return valueNode.asText(); + } + + return null; + + } + + private static final String VIUI_SEARCH_TEMPLATE = + "{ " + "\"results-start\": 0," + "\"results-size\": %d," + "\"queries\": [{" + "\"must\": {" + + "\"match\": {" + "\"field\": \"entityType searchTags crossEntityReferenceValues\"," + + "\"value\": \"%s\"," + "\"operator\": \"and\", " + + "\"analyzer\": \"whitespace_analyzer\"" + "}" + "}" + "}]" + "}"; + + //private SuggestionConfig suggestionConfig = null; + + /** + * @param queryStr - space separate query search terms + * @return - query string with stop-words removed + */ + private String stripStopWordsFromQuery(String queryStr) { + + if (queryStr == null) { + return queryStr; + } + + Collection<String> stopWords = suggestionConfig.getStopWords(); + ArrayList<String> queryTerms = + new ArrayList<String>(Arrays.asList(queryStr.toLowerCase().split(" "))); + + queryTerms.removeAll(stopWords); + + return String.join(" ", queryTerms); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java new file mode 100644 index 0000000..d0cabfe --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseGizmoVisualizationContext.java @@ -0,0 +1,990 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.GizmoEntity; +import org.onap.aai.sparky.viewandinspect.entity.GizmoRelationshipEntity; +import org.onap.aai.sparky.viewandinspect.entity.GizmoRelationshipHint; +import org.onap.aai.sparky.viewandinspect.entity.InlineMessage; +import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; +import org.onap.aai.sparky.viewandinspect.task.PerformGizmoNodeSelfLinkProcessingTask; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.PropertyNamingStrategy; + +/** + * The Class SelfLinkNodeCollector. + */ +public class BaseGizmoVisualizationContext implements VisualizationContext { + + private static final int MAX_DEPTH_EVALUATION_ATTEMPTS = 100; + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(BaseGizmoVisualizationContext.class); + + private final GizmoAdapter gizmoAdapter; + + private AtomicInteger numLinksDiscovered; + private AtomicInteger numSuccessfulLinkResolveFromCache; + private AtomicInteger numSuccessfulLinkResolveFromFromServer; + private AtomicInteger numFailedLinkResolve; + private AtomicInteger aaiWorkOnHand; + + private VisualizationConfigs visualizationConfigs; + + private AtomicInteger totalLinksRetrieved; + + private final long contextId; + private final String contextIdStr; + private long lastProcessStatesSummaryLogInMs = -1; + + + private ObjectMapper mapper; + private InlineMessage inlineMessage = null; + + private ExecutorService graphExecutorService; + private OxmEntityLookup oxmEntityLookup; + private boolean rootNodeFound; + + /* + * The node cache is intended to be a flat structure indexed by a primary key to avoid needlessly + * re-requesting the same self-links over-and-over again, to speed up the overall render time and + * more importantly to reduce the network cost of determining information we already have. + */ + private ConcurrentHashMap<String, ActiveInventoryNode> nodeCache; + + /** + * Instantiates a new self link node collector. + * + * @param loader the loader + * @throws Exception the exception + */ + public BaseGizmoVisualizationContext(long contextId, GizmoAdapter gizmoAdapter, + ExecutorService graphExecutorService, VisualizationConfigs visualizationConfigs, + OxmEntityLookup oxmEntityLookup) throws Exception { + + this.contextId = contextId; + this.contextIdStr = "[Context-Id=" + contextId + "]"; + this.gizmoAdapter = gizmoAdapter; + this.graphExecutorService = graphExecutorService; + this.visualizationConfigs = visualizationConfigs; + this.oxmEntityLookup = oxmEntityLookup; + + this.nodeCache = new ConcurrentHashMap<String, ActiveInventoryNode>(); + this.numLinksDiscovered = new AtomicInteger(0); + this.totalLinksRetrieved = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0); + this.numFailedLinkResolve = new AtomicInteger(0); + this.aaiWorkOnHand = new AtomicInteger(0); + + this.mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy()); + this.rootNodeFound = false; + } + + protected boolean isRootNodeFound() { + return rootNodeFound; + } + + protected void setRootNodeFound(boolean rootNodeFound) { + this.rootNodeFound = rootNodeFound; + } + + public long getContextId() { + return contextId; + } + + public GizmoAdapter getGizmoAdapter() { + return gizmoAdapter; + } + + /** + * Process self link response. + * + * @param nodeId the node id + */ + private void processSelfLinkResponse(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, + "Cannot process self link" + " response because nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, + "Cannot process self link response" + " because can't find node for id = " + nodeId); + return; + } + + GizmoEntity gizmoEntity = null; + + try { + gizmoEntity = mapper.readValue(ain.getOpResult().getResult(), GizmoEntity.class); + } catch (Exception exc) { + exc.printStackTrace(); + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to marshal json" + + " response str into JsonNode with error, " + exc.getLocalizedMessage()); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + if (gizmoEntity == null) { + + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, + "Failed to parse json node str." + " Parse resulted a null value."); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + /* + * Now that we have the gizmo entity we can populate the AIN node with it, as well as the + * relationships + */ + + ain.setEntityType(gizmoEntity.getType()); + + ain.setPrimaryKeyName(getEntityTypePrimaryKeyName(gizmoEntity.getType())); + + OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(gizmoEntity); + + if (descriptor != null) { + ain.setPrimaryKeyValue(getPrimaryKeyValues(gizmoEntity.getProperties(), + descriptor.getPrimaryKeyAttributeNames())); + } else { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Could not determine oxm descriptor for entity type = " + gizmoEntity.getType()); + } + + gizmoEntity.getProperties().forEach((key, value) -> { + ain.getProperties().put(key, value); + }); + + // add edit attributes link + if (ain.getSelfLink() != null) { + ain.addProperty(SparkyConstants.URI_ATTR_NAME, ain.getSelfLink()); + } + + + + /* + * Only discover neighbors if our depth is less than the Max-Traversal-Depth + */ + + if (ain.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { + + /* + * I think the next thing to do is: + * + * 1. Calculate the source / target node id 2. Add the nodeId to the incoming / outgoing links + * collection 3. Add the node to the node cache for processing + */ + + String resourceLink = null; + String relationshipNodeId = null; + ActiveInventoryNode relationshipNode = null; + + for (GizmoRelationshipHint inRelationship : gizmoEntity.getIn()) { + + if (inRelationship.getSource() != null) { + + resourceLink = NodeUtils.extractRawGizmoPathWithoutVersion(inRelationship.getSource()); + relationshipNodeId = NodeUtils.generateUniqueShaDigest(resourceLink); + + if (!nodeCache.containsKey(relationshipNodeId)) { + + relationshipNode = new ActiveInventoryNode(visualizationConfigs, oxmEntityLookup); + relationshipNode.setNodeId(relationshipNodeId); + relationshipNode.setSelfLink(resourceLink); + relationshipNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.NEW_NODE_PROCESSED); + + ain.addInboundNeighbor(relationshipNodeId); + + addNode(relationshipNode); + + } + } + + } + + for (GizmoRelationshipHint outRelationship : gizmoEntity.getOut()) { + + if (outRelationship.getTarget() != null) { + + resourceLink = NodeUtils.extractRawGizmoPathWithoutVersion(outRelationship.getTarget()); + relationshipNodeId = NodeUtils.generateUniqueShaDigest(resourceLink); + + if (!nodeCache.containsKey(relationshipNodeId)) { + + relationshipNode = new ActiveInventoryNode(visualizationConfigs, oxmEntityLookup); + relationshipNode.setNodeId(relationshipNodeId); + relationshipNode.setSelfLink(resourceLink); + relationshipNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.NEW_NODE_PROCESSED); + + ain.addOutboundNeighbor(relationshipNodeId); + + addNode(relationshipNode); + + } + } + + } + } + + ain.changeState(NodeProcessingState.READY, NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + } + + /** + * Perform self link resolve. + * + * @param nodeId the node id + */ + private void performSelfLinkResolve(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, + "Resolve of self-link" + " has been skipped because provided nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Failed to find node with id, " + nodeId + + ", from node cache. Resolve self-link method has been skipped."); + return; + } + + if (!ain.isSelfLinkPendingResolve()) { + + ain.setSelfLinkPendingResolve(true); + + // kick off async self-link resolution + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "About to process node in SELF_LINK_UNPROCESSED State, link = " + ain.getSelfLink()); + } + + numLinksDiscovered.incrementAndGet(); + + /* + * If the current node is the search target, we want to see everything the node has to offer + * from the self-link and not filter it to a single node. + */ + + NodeProcessingTransaction txn = new NodeProcessingTransaction(); + txn.setProcessingNode(ain); + txn.setRequestParameters(null); + aaiWorkOnHand.incrementAndGet(); + supplyAsync(new PerformGizmoNodeSelfLinkProcessingTask(txn, null, gizmoAdapter), + graphExecutorService).whenComplete((nodeTxn, error) -> { + + if (error != null) { + + /* + * an error processing the self link should probably result in the node processing + * state shifting to ERROR + */ + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + + totalLinksRetrieved.incrementAndGet(); + + OperationResult opResult = nodeTxn.getOpResult(); + + if (opResult != null && opResult.wasSuccessful()) { + + if (!opResult.wasSuccessful()) { + numFailedLinkResolve.incrementAndGet(); + } + + if (opResult.isFromCache()) { + numSuccessfulLinkResolveFromCache.incrementAndGet(); + } else { + numSuccessfulLinkResolveFromFromServer.incrementAndGet(); + } + + // success path + nodeTxn.getProcessingNode().setOpResult(opResult); + nodeTxn.getProcessingNode().changeState( + NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESOLVE_OK); + + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, + "Self Link retrieval for link," + txn.getSelfLinkWithModifiers() + + ", failed with error code," + nodeTxn.getOpResult().getResultCode() + + ", and message," + nodeTxn.getOpResult().getResult()); + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } + } + + aaiWorkOnHand.decrementAndGet(); + + }); + + } + + } + + public GizmoRelationshipEntity getGizmoRelationshipEntity(String gizmoJsonResponse) { + + GizmoRelationshipEntity gizmoRelationship = null; + try { + gizmoRelationship = mapper.readValue(gizmoJsonResponse, GizmoRelationshipEntity.class); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Failed to map json to GizmoRelationshipEntity. Error: " + exc.getMessage()); + } + + return gizmoRelationship; + + } + + public String getPrimaryKeyValues(Map<String, String> properties, List<String> pkeyNames) { + + StringBuilder sb = new StringBuilder(64); + + if (pkeyNames.size() > 0) { + String primaryKey = properties.get(pkeyNames.get(0)); + if (primaryKey != null) { + sb.append(primaryKey); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract" + " keyName, " + + pkeyNames.get(0) + ", from properties , " + properties); + return null; + } + + for (int i = 1; i < pkeyNames.size(); i++) { + + String kv = properties.get(pkeyNames.get(i)); + if (kv != null) { + sb.append("/").append(kv); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: failed to extract keyName, " + + pkeyNames.get(i) + ", from properties, " + properties); + return null; + } + } + + return sb.toString(); + + } + + return null; + + } + + + + /** + * Find and mark root node. + * + * @param queryParams the query params + * @return true, if successful + */ + private void findAndMarkRootNode(QueryParams queryParams) { + + if (isRootNodeFound()) { + return; + } + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (queryParams.getSearchTargetNodeId().equals(cacheNode.getNodeId())) { + cacheNode.setNodeDepth(0); + cacheNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + setRootNodeFound(true); + } + } + + } + + public void addNode(ActiveInventoryNode node) { + + if (node == null) { + return; + } + + nodeCache.putIfAbsent(node.getNodeId(), node); + } + + public VisualizationConfigs getVisualizationConfigs() { + return visualizationConfigs; + } + + public void setVisualizationConfigs(VisualizationConfigs visualizationConfigs) { + this.visualizationConfigs = visualizationConfigs; + } + + public OxmEntityLookup getOxmEntityLookup() { + return oxmEntityLookup; + } + + public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) { + this.oxmEntityLookup = oxmEntityLookup; + } + + public ObjectMapper getMapper() { + return mapper; + } + + public void setMapper(ObjectMapper mapper) { + this.mapper = mapper; + } + + private void dumpThrottledWorkOnHandLog() { + dumpThrottledWorkOnHandLog(false); + } + + private void dumpThrottledWorkOnHandLog(boolean override) { + + if ((lastProcessStatesSummaryLogInMs < 0) + || ((System.currentTimeMillis() > (lastProcessStatesSummaryLogInMs + 5000))) || override) { + + lastProcessStatesSummaryLogInMs = System.currentTimeMillis(); + + int numInit = 0; + int numReady = 0; + int numError = 0; + int numSelfLinkUnresolved = 0; + int numSelfLinkResponseUnprocessed = 0; + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + switch (cacheNode.getState()) { + + case INIT: { + numInit++; + break; + } + + case READY: { + numReady++; + break; + } + case ERROR: { + numError++; + break; + } + + case SELF_LINK_UNRESOLVED: { + numSelfLinkUnresolved++; + break; + } + + case SELF_LINK_RESPONSE_UNPROCESSED: { + numSelfLinkResponseUnprocessed++; + break; + } + + default: + break; + } + + } + + LOG.info(AaiUiMsgs.INFO_GENERIC, + String.format( + "ProcessCurrentStates for ContextId=%s, [PendingTxns=%d, numInit=%d, numSelfLinkUnresolved=%d, numSelfLinkResponseUnProcessed=%d, numReady=%d, numError=%d]", + contextIdStr, aaiWorkOnHand.get(), numInit, numSelfLinkUnresolved, numSelfLinkResponseUnprocessed, + numReady, numError)); + } + + } + + /** + * Process current node states. + * + * @param rootNodeDiscovered the root node discovered + */ + private void processCurrentNodeStates(QueryParams queryParams) { + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + + findAndMarkRootNode(queryParams); + + verifyOutboundNeighbors(); + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "processCurrentNodeState(), nid = " + + cacheNode.getNodeId() + " , nodeDepth = " + cacheNode.getNodeDepth()); + } + + switch (cacheNode.getState()) { + + case INIT: { + processInitialState(cacheNode.getNodeId()); + break; + } + + case READY: + case ERROR: { + break; + } + + case SELF_LINK_UNRESOLVED: { + performSelfLinkResolve(cacheNode.getNodeId()); + break; + } + + case SELF_LINK_RESPONSE_UNPROCESSED: { + processSelfLinkResponse(cacheNode.getNodeId()); + break; + } + + default: + break; + } + + } + + dumpThrottledWorkOnHandLog(); + + } + + + + public int getNumSuccessfulLinkResolveFromCache() { + return numSuccessfulLinkResolveFromCache.get(); + } + + public int getNumSuccessfulLinkResolveFromFromServer() { + return numSuccessfulLinkResolveFromFromServer.get(); + } + + public int getNumFailedLinkResolve() { + return numFailedLinkResolve.get(); + } + + public InlineMessage getInlineMessage() { + return inlineMessage; + } + + public void setInlineMessage(InlineMessage inlineMessage) { + this.inlineMessage = inlineMessage; + } + + public ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache() { + return nodeCache; + } + + + + /** + * Process initial state. + * + * @param nodeId the node id + */ + private void processInitialState(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node id is null"); + return; + } + + ActiveInventoryNode cachedNode = nodeCache.get(nodeId); + + if (cachedNode == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, + "Node cannot be" + " found for nodeId, " + nodeId); + return; + } + + if (cachedNode.getSelfLink() == null) { + + if (cachedNode.getNodeId() == null) { + + /* + * if the self link is null at the INIT state, which could be valid if this node is a + * complex attribute group which didn't originate from a self-link, but in that situation + * both the node id and node key should already be set. + */ + + cachedNode.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NODE_IDENTITY_ERROR); + + } + + if (cachedNode.getNodeId() != null) { + + /* + * This should be the success path branch if the self-link is not set + */ + + cachedNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + } + + } else { + + if (cachedNode.hasResolvedSelfLink()) { + LOG.error(AaiUiMsgs.INVALID_RESOLVE_STATE_DURING_INIT); + cachedNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.UNEXPECTED_STATE_TRANSITION); + } else { + cachedNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.SELF_LINK_SET); + } + } + } + + /** + * Process skeleton node. + * + * @param skeletonNode the skeleton node + * @param queryParams the query params + */ + private void processSearchableEntity(SearchableEntity searchTargetEntity, + QueryParams queryParams) { + + if (searchTargetEntity == null) { + return; + } + + if (searchTargetEntity.getId() == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_SKELETON_NODE, "Failed to process skeleton" + + " node because nodeId is null for node, " + searchTargetEntity.getLink()); + return; + } + + ActiveInventoryNode newNode = + new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup); + + newNode.setNodeId(searchTargetEntity.getId()); + + newNode.setNodeDepth(0); + newNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + setRootNodeFound(true); + + newNode.setSelfLink(searchTargetEntity.getLink()); + + nodeCache.putIfAbsent(newNode.getNodeId(), newNode); + } + + private int getTotalWorkOnHand() { + + int numNodesWithPendingStates = 0; + + if (isRootNodeFound()) { + evaluateNodeDepths(); + } + + for (ActiveInventoryNode n : nodeCache.values()) { + + switch (n.getState()) { + + case READY: + case ERROR: { + // do nothing, these are our normal + // exit states + break; + } + + default: { + + /* + * for all other states, there is work to be done + */ + numNodesWithPendingStates++; + } + + } + + } + + return (aaiWorkOnHand.get() + numNodesWithPendingStates); + + } + + /** + * Checks for out standing work. + * + * @return true, if successful + */ + private void processOutstandingWork(QueryParams queryParams) { + + while (getTotalWorkOnHand() > 0) { + + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + + processCurrentNodeStates(queryParams); + + try { + Thread.sleep(10); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.PROCESSING_LOOP_INTERUPTED, exc.getMessage()); + return; + } + + } + + dumpThrottledWorkOnHandLog(true); + + } + + /* + * (non-Javadoc) + * + * @see + * org.onap.aai.sparky.viewandinspect.services.VisualizationContext#processSelfLinks(org.onap.aai. + * sparky.sync.entity.SearchableEntity, org.onap.aai.sparky.viewandinspect.entity.QueryParams) + */ + @Override + public void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams) { + + try { + + + if (searchtargetEntity == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, + contextIdStr + " - Failed to" + " processSelfLinks, searchtargetEntity is null"); + return; + } + + long startTimeInMs = System.currentTimeMillis(); + + processSearchableEntity(searchtargetEntity, queryParams); + + /* + * This method is blocking until we decouple it with a CountDownLatch await condition, and + * make the internal graph processing more event-y. + */ + + processOutstandingWork(queryParams); + + long totalResolveTime = (System.currentTimeMillis() - startTimeInMs); + + long opTime = System.currentTimeMillis() - startTimeInMs; + + LOG.info(AaiUiMsgs.ALL_TRANSACTIONS_RESOLVED, String.valueOf(totalResolveTime), + String.valueOf(totalLinksRetrieved.get()), String.valueOf(opTime)); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, exc.getMessage()); + } + + } + + /** + * Verify outbound neighbors. + */ + private void verifyOutboundNeighbors() { + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null && srcNode.getNodeId() != null) { + + targetNode.addInboundNeighbor(srcNode.getNodeId()); + + if (this.visualizationConfigs.makeAllNeighborsBidirectional()) { + targetNode.addOutboundNeighbor(srcNode.getNodeId()); + } + + } + + } + + } + + } + + /** + * Evaluate node depths. + */ + private void evaluateNodeDepths() { + + int numChanged = -1; + int numAttempts = 0; + + while (numChanged != 0) { + + numChanged = 0; + numAttempts++; + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + if (srcNode.getState() == NodeProcessingState.INIT) { + + /* + * this maybe the only state that we don't want to to process the node depth on, because + * typically it won't have any valid fields set, and it may remain in a partial state + * until we have processed the self-link. + */ + + continue; + + } + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + + for (String targetNodeId : srcNode.getInboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + } + + if (numAttempts >= MAX_DEPTH_EVALUATION_ATTEMPTS) { + LOG.info(AaiUiMsgs.MAX_EVALUATION_ATTEMPTS_EXCEEDED); + return; + } + + } + + if (LOG.isDebugEnabled()) { + if (numAttempts > 0) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in " + numAttempts + " attempts"); + } else { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in 0 attempts because all nodes at correct depth"); + } + } + + } + + + /** + * Gets the entity type primary key name. + * + * @param entityType the entity type + * @return the entity type primary key name + */ + + + private String getEntityTypePrimaryKeyName(String entityType) { + + if (entityType == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, + "node primary key" + " name because entity type is null"); + return null; + } + + OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, + "oxm entity" + " descriptor for entityType = " + entityType); + return null; + } + + List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames(); + + if (pkeyNames == null || pkeyNames.size() == 0) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, + "node primary" + " key because descriptor primary key names is empty"); + return null; + } + + return NodeUtils.concatArray(pkeyNames, "/"); + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java new file mode 100644 index 0000000..b2bdb43 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContext.java @@ -0,0 +1,1631 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.http.client.utils.URIBuilder; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.SparkyConstants; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.InlineMessage; +import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; +import org.onap.aai.sparky.viewandinspect.entity.Relationship; +import org.onap.aai.sparky.viewandinspect.entity.RelationshipData; +import org.onap.aai.sparky.viewandinspect.entity.RelationshipList; +import org.onap.aai.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingAction; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; +import org.onap.aai.sparky.viewandinspect.task.PerformNodeSelfLinkProcessingTask; +import org.onap.aai.sparky.viewandinspect.task.PerformSelfLinkDeterminationTask; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.PropertyNamingStrategy; + +/** + * The Class SelfLinkNodeCollector. + */ +public class BaseVisualizationContext implements VisualizationContext { + + private static final int MAX_DEPTH_EVALUATION_ATTEMPTS = 100; + private static final String DEPTH_ALL_MODIFIER = "?depth=all"; + private static final String NODES_ONLY_MODIFIER = "?nodes-only"; + private static final String SERVICE_INSTANCE = "service-instance"; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + BaseVisualizationContext.class); + private final ActiveInventoryAdapter aaiAdapter; + + private int maxSelfLinkTraversalDepth; + private AtomicInteger numLinksDiscovered; + private AtomicInteger numSuccessfulLinkResolveFromCache; + private AtomicInteger numSuccessfulLinkResolveFromFromServer; + private AtomicInteger numFailedLinkResolve; + private AtomicInteger aaiWorkOnHand; + + private VisualizationConfigs visualizationConfigs; + + private AtomicInteger totalLinksRetrieved; + + private final long contextId; + private final String contextIdStr; + + private ObjectMapper mapper; + private InlineMessage inlineMessage = null; + + private ExecutorService aaiExecutorService; + private OxmEntityLookup oxmEntityLookup; + private boolean rootNodeFound; + + /* + * The node cache is intended to be a flat structure indexed by a primary key to avoid needlessly + * re-requesting the same self-links over-and-over again, to speed up the overall render time and + * more importantly to reduce the network cost of determining information we already have. + */ + private ConcurrentHashMap<String, ActiveInventoryNode> nodeCache; + + /** + * Instantiates a new self link node collector. + * + * @param loader the loader + * @throws Exception the exception + */ + public BaseVisualizationContext(long contextId, ActiveInventoryAdapter aaiAdapter, + ExecutorService aaiExecutorService, VisualizationConfigs visualizationConfigs, + OxmEntityLookup oxmEntityLookup) + throws Exception { + + this.contextId = contextId; + this.contextIdStr = "[Context-Id=" + contextId + "]"; + this.aaiAdapter = aaiAdapter; + this.aaiExecutorService = aaiExecutorService; + this.visualizationConfigs = visualizationConfigs; + this.oxmEntityLookup = oxmEntityLookup; + + this.nodeCache = new ConcurrentHashMap<String, ActiveInventoryNode>(); + this.numLinksDiscovered = new AtomicInteger(0); + this.totalLinksRetrieved = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0); + this.numFailedLinkResolve = new AtomicInteger(0); + this.aaiWorkOnHand = new AtomicInteger(0); + + this.maxSelfLinkTraversalDepth = this.visualizationConfigs.getMaxSelfLinkTraversalDepth(); + + this.mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy()); + this.rootNodeFound = false; + } + + protected boolean isRootNodeFound() { + return rootNodeFound; + } + + protected void setRootNodeFound(boolean rootNodeFound) { + this.rootNodeFound = rootNodeFound; + } + + public long getContextId() { + return contextId; + } + + /** + * A utility method for extracting all entity-type primary key values from a provided self-link + * and return a set of generic-query API keys. + * + * @param parentEntityType + * @param link + * @return a list of key values that can be used for this entity with the AAI generic-query API + */ + protected List<String> extractQueryParamsFromSelfLink(String link) { + + List<String> queryParams = new ArrayList<String>(); + + if (link == null) { + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, "self link is null"); + return queryParams; + } + + Map<String, OxmEntityDescriptor> entityDescriptors = oxmEntityLookup.getEntityDescriptors(); + + try { + + URIBuilder urlBuilder = new URIBuilder(link); + String urlPath = urlBuilder.getPath(); + + OxmEntityDescriptor descriptor = null; + String[] urlPathElements = urlPath.split("/"); + List<String> primaryKeyNames = null; + int index = 0; + String entityType = null; + + while (index < urlPathElements.length) { + + descriptor = entityDescriptors.get(urlPathElements[index]); + + if (descriptor != null) { + entityType = urlPathElements[index]; + primaryKeyNames = descriptor.getPrimaryKeyAttributeNames(); + + /* + * Make sure from what ever index we matched the parent entity-type on that we can extract + * additional path elements for the primary key values. + */ + + if (index + primaryKeyNames.size() < urlPathElements.length) { + + for (String primaryKeyName : primaryKeyNames) { + index++; + queryParams.add(entityType + "." + primaryKeyName + ":" + urlPathElements[index]); + } + } else { + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, + "Could not extract query parametrs for entity-type = '" + entityType + + "' from self-link = " + link); + } + } + + index++; + } + + } catch (URISyntaxException exc) { + + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, + "Error extracting query parameters from self-link = " + link + ". Error = " + + exc.getMessage()); + } + + return queryParams; + + } + + /** + * Decode complex attribute group. + * + * @param ain the ain + * @param attributeGroup the attribute group + * @return boolean indicating whether operation was successful (true), / failure(false). + */ + public boolean decodeComplexAttributeGroup(ActiveInventoryNode ain, JsonNode attributeGroup) { + + try { + + Iterator<Entry<String, JsonNode>> entityArrays = attributeGroup.fields(); + Entry<String, JsonNode> entityArray = null; + + if (entityArrays == null) { + LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, attributeGroup.toString()); + ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + return false; + } + + while (entityArrays.hasNext()) { + + entityArray = entityArrays.next(); + + String entityType = entityArray.getKey(); + JsonNode entityArrayObject = entityArray.getValue(); + + if (entityArrayObject.isArray()) { + + Iterator<JsonNode> entityCollection = entityArrayObject.elements(); + JsonNode entity = null; + while (entityCollection.hasNext()) { + entity = entityCollection.next(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "decodeComplexAttributeGroup()," + + " entity = " + entity.toString()); + } + + /** + * Here's what we are going to do: + * + * <li>In the ActiveInventoryNode, on construction maintain a collection of queryParams + * that is added to for the purpose of discovering parent->child hierarchies. + * + * <li>When we hit this block of the code then we'll use the queryParams to feed the + * generic query to resolve the self-link asynchronously. + * + * <li>Upon successful link determination, then and only then will we create a new node + * in the nodeCache and process the child + * + */ + + ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup); + newNode.setEntityType(entityType); + + /* + * This is partially a lie because we actually don't have a self-link for complex nodes + * discovered in this way. + */ + newNode.setSelfLinkProcessed(true); + newNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + + /* + * copy parent query params into new child + */ + + if (SERVICE_INSTANCE.equals(entityType)) { + + /* + * 1707 AAI has an issue being tracked with AAI-8932 where the generic-query cannot be + * resolved if all the service-instance path keys are provided. The query only works + * if only the service-instance key and valude are passed due to a historical reason. + * A fix is being worked on for 1707, and when it becomes available we can revert this + * small change. + */ + + newNode.clearQueryParams(); + + } else { + + /* + * For all other entity-types we want to copy the parent query parameters into the new node + * query parameters. + */ + + for (String queryParam : ain.getQueryParams()) { + newNode.addQueryParam(queryParam); + } + + } + + + if (!addComplexGroupToNode(newNode, entity)) { + LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, "Failed to add child to parent for child = " + entity.toString()); + } + + if (!addNodeQueryParams(newNode)) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Error determining node id and key for node = " + newNode.dumpNodeTree(true) + + " skipping relationship processing"); + newNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NODE_IDENTITY_ERROR); + return false; + } else { + + newNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + + } + + + /* + * Order matters for the query params. We need to set the parent ones before the child + * node + */ + + String selfLinkQuery = + aaiAdapter.getGenericQueryForSelfLink(entityType, newNode.getQueryParams()); + + /** + * <li>get the self-link + * <li>add it to the new node + * <li>generate node id + * <li>add node to node cache + * <li>add node id to parent outbound links list + * <li>process node children (should be automatic) (but don't query and resolve + * self-link as we already have all the data) + */ + + SelfLinkDeterminationTransaction txn = new SelfLinkDeterminationTransaction(); + + txn.setQueryString(selfLinkQuery); + txn.setNewNode(newNode); + txn.setParentNodeId(ain.getNodeId()); + aaiWorkOnHand.incrementAndGet(); + supplyAsync(new PerformSelfLinkDeterminationTask(txn, null, aaiAdapter), + aaiExecutorService).whenComplete((nodeTxn, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_GENERIC, selfLinkQuery); + } else { + + OperationResult opResult = nodeTxn.getOpResult(); + + ActiveInventoryNode newChildNode = txn.getNewNode(); + + if (opResult != null && opResult.wasSuccessful()) { + + if (!opResult.wasSuccessful()) { + numFailedLinkResolve.incrementAndGet(); + } + + if (opResult.isFromCache()) { + numSuccessfulLinkResolveFromCache.incrementAndGet(); + } else { + numSuccessfulLinkResolveFromFromServer.incrementAndGet(); + } + + /* + * extract the self-link from the operational result. + */ + + Collection<JsonNode> entityLinks = new ArrayList<JsonNode>(); + JsonNode genericQueryResult = null; + try { + genericQueryResult = + NodeUtils.convertJsonStrToJsonNode(nodeTxn.getOpResult().getResult()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getMessage()); + } + + NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", + entityLinks); + + String selfLink = null; + + if (entityLinks.size() != 1) { + + LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS, String.valueOf(entityLinks.size())); + + } else { + selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); + selfLink = ActiveInventoryAdapter.extractResourcePath(selfLink); + + newChildNode.setSelfLink(selfLink); + newChildNode.setNodeId(NodeUtils.generateUniqueShaDigest(selfLink)); + + String uri = NodeUtils.calculateEditAttributeUri(selfLink); + if (uri != null) { + newChildNode.addProperty(SparkyConstants.URI_ATTR_NAME, uri); + } + + ActiveInventoryNode parent = nodeCache.get(txn.getParentNodeId()); + + if (parent != null) { + parent.addOutboundNeighbor(newChildNode.getNodeId()); + newChildNode.addInboundNeighbor(parent.getNodeId()); + } + + newChildNode.setSelfLinkPendingResolve(false); + newChildNode.setSelfLinkProcessed(true); + newChildNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + nodeCache.putIfAbsent(newChildNode.getNodeId(), newChildNode); + + } + + } else { + LOG.error(AaiUiMsgs.SELF_LINK_RETRIEVAL_FAILED, txn.getQueryString(), + String.valueOf(nodeTxn.getOpResult().getResultCode()), nodeTxn.getOpResult().getResult()); + newChildNode.setSelflinkRetrievalFailure(true); + newChildNode.setSelfLinkProcessed(true); + newChildNode.setSelfLinkPendingResolve(false); + + newChildNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_DETERMINATION_ERROR); + + } + + } + + aaiWorkOnHand.decrementAndGet(); + + }); + + } + + return true; + + } else { + LOG.error(AaiUiMsgs.UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE, entityType); + } + + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Exception caught while" + + " decoding complex attribute group - " + exc.getMessage()); + } + + return false; + + } + + /** + * Process self link response. + * + * @param nodeId the node id + */ + private void processSelfLinkResponse(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link" + + " response because nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link response" + + " because can't find node for id = " + nodeId); + return; + } + + JsonNode jsonNode = null; + + try { + jsonNode = mapper.readValue(ain.getOpResult().getResult(), JsonNode.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to marshal json" + + " response str into JsonNode with error, " + exc.getLocalizedMessage()); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + if (jsonNode == null) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse json node str." + + " Parse resulted a null value."); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields(); + Entry<String, JsonNode> field = null; + + RelationshipList relationshipList = null; + + while (fieldNames.hasNext()) { + + field = fieldNames.next(); + String fieldName = field.getKey(); + + if ("relationship-list".equals(fieldName)) { + + try { + relationshipList = mapper.readValue(field.getValue().toString(), RelationshipList.class); + + if (relationshipList != null) { + ain.addRelationshipList(relationshipList); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse relationship-list" + + " attribute. Parse resulted in error, " + exc.getLocalizedMessage()); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + } else { + + JsonNode nodeValue = field.getValue(); + + if (nodeValue != null && nodeValue.isValueNode()) { + + if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) { + + /* + * entity property name is not an entity, thus we can add this property name and value + * to our property set + */ + + ain.addProperty(fieldName, nodeValue.asText()); + + } + + } else { + + if (nodeValue.isArray()) { + + if (oxmEntityLookup.getEntityDescriptors().get(fieldName) == null) { + + /* + * entity property name is not an entity, thus we can add this property name and value + * to our property set + */ + + ain.addProperty(field.getKey(), nodeValue.toString()); + + } + + } else { + + ain.addComplexGroup(nodeValue); + + } + + } + } + + } + + String uri = NodeUtils.calculateEditAttributeUri(ain.getSelfLink()); + if (uri != null) { + ain.addProperty(SparkyConstants.URI_ATTR_NAME, uri); + } + + /* + * We need a special behavior for intermediate entities from the REST model + * + * Tenants are not top level entities, and when we want to visualization + * their children, we need to construct keys that include the parent entity query + * keys, the current entity type keys, and the child keys. We'll always have the + * current entity and children, but never the parent entity in the current (1707) REST + * data model. + * + * We have two possible solutions: + * + * 1) Try to use the custom-query approach to learn about the entity keys + * - this could be done, but it could be very expensive for large objects. When we do the first + * query to get a tenant, it will list all the in and out edges related to this entity, + * there is presently no way to filter this. But the approach could be made to work and it would be + * somewhat data-model driven, other than the fact that we have to first realize that the entity + * that is being searched for is not top-level entity. Once we have globally unique ids for resources + * this logic will not be needed and everything will be simpler. The only reason we are in this logic + * at all is to be able to calculate a url for the child entities so we can hash it to generate + * a globally unique id that can be safely used for the node. + * + * *2* Extract the keys from the pathed self-link. + * This is a bad solution and I don't like it but it will be fast for all resource types, as the + * information is already encoded in the URI. When we get to a point where we switch to a better + * globally unique entity identity model, then a lot of the code being used to calculate an entity url + * to in-turn generate a deterministic globally unique id will disappear. + * + * + * right now we have the following: + * + * - cloud-regions/cloud-region/{cloud-region-id}/{cloud-owner-id}/tenants/tenant/{tenant-id} + * + */ + + /* + * For all entity types use the self-link extraction method to be consistent. Once we have a + * globally unique identity mechanism for entities, this logic can be revisited. + */ + ain.clearQueryParams(); + ain.addQueryParams(extractQueryParamsFromSelfLink(ain.getSelfLink())); + ain.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + + } + + /** + * Perform self link resolve. + * + * @param nodeId the node id + */ + private void performSelfLinkResolve(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Resolve of self-link" + + " has been skipped because provided nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Failed to find node with id, " + nodeId + + ", from node cache. Resolve self-link method has been skipped."); + return; + } + + if (!ain.isSelfLinkPendingResolve()) { + + ain.setSelfLinkPendingResolve(true); + + // kick off async self-link resolution + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "About to process node in SELF_LINK_UNPROCESSED State, link = " + ain.getSelfLink()); + } + + numLinksDiscovered.incrementAndGet(); + + String depthModifier = DEPTH_ALL_MODIFIER; + + /* + * If the current node is the search target, we want to see everything the node has to offer + * from the self-link and not filter it to a single node. + */ + + if (visualizationConfigs.getShallowEntities().contains(ain.getEntityType()) + && !ain.isRootNode()) { + depthModifier = NODES_ONLY_MODIFIER; + } + + NodeProcessingTransaction txn = new NodeProcessingTransaction(); + txn.setProcessingNode(ain); + txn.setRequestParameters(depthModifier); + aaiWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformNodeSelfLinkProcessingTask(txn, depthModifier, aaiAdapter), + aaiExecutorService).whenComplete((nodeTxn, error) -> { + + if (error != null) { + + /* + * an error processing the self link should probably result in the node processing + * state shifting to ERROR + */ + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + + totalLinksRetrieved.incrementAndGet(); + + OperationResult opResult = nodeTxn.getOpResult(); + + if (opResult != null && opResult.wasSuccessful()) { + + if (!opResult.wasSuccessful()) { + numFailedLinkResolve.incrementAndGet(); + } + + if (opResult.isFromCache()) { + numSuccessfulLinkResolveFromCache.incrementAndGet(); + } else { + numSuccessfulLinkResolveFromFromServer.incrementAndGet(); + } + + // success path + nodeTxn.getProcessingNode().setOpResult(opResult); + nodeTxn.getProcessingNode().changeState( + NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESOLVE_OK); + + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Self Link retrieval for link," + + txn.getSelfLinkWithModifiers() + ", failed with error code," + + nodeTxn.getOpResult().getResultCode() + ", and message," + + nodeTxn.getOpResult().getResult()); + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } + } + + aaiWorkOnHand.decrementAndGet(); + + }); + + } + + } + + + /** + * Process neighbors. + * + * @param nodeId the node id + */ + private void processNeighbors(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process" + + " neighbors because nodeId is null."); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process" + + " neighbors because node could not be found in nodeCache with id, " + nodeId); + return; + } + + /* + * process complex attribute and relationships + */ + + boolean neighborsProcessedSuccessfully = true; + + for (JsonNode n : ain.getComplexGroups()) { + neighborsProcessedSuccessfully &= decodeComplexAttributeGroup(ain, n); + } + + for (RelationshipList relationshipList : ain.getRelationshipLists()) { + neighborsProcessedSuccessfully &= addSelfLinkRelationshipChildren(ain, relationshipList); + } + + + if (neighborsProcessedSuccessfully) { + ain.changeState(NodeProcessingState.READY, NodeProcessingAction.NEIGHBORS_PROCESSED_OK); + } else { + ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + } + + + /* + * If neighbors fail to process, there is already a call to change the state within the + * relationship and neighbor processing functions. + */ + + } + + /** + * Find and mark root node. + * + * @param queryParams the query params + * @return true, if successful + */ + private void findAndMarkRootNode(QueryParams queryParams) { + + if (isRootNodeFound()) { + return; + } + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (queryParams.getSearchTargetNodeId().equals(cacheNode.getNodeId())) { + cacheNode.setNodeDepth(0); + cacheNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + setRootNodeFound(true); + } + } + + } + + /** + * Process current node states. + * + * @param rootNodeDiscovered the root node discovered + */ + private void processCurrentNodeStates(QueryParams queryParams) { + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + + findAndMarkRootNode(queryParams); + + verifyOutboundNeighbors(); + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "processCurrentNodeState(), nid = " + + cacheNode.getNodeId() + " , nodeDepth = " + cacheNode.getNodeDepth()); + } + + switch (cacheNode.getState()) { + + case INIT: { + processInitialState(cacheNode.getNodeId()); + break; + } + + case READY: + case ERROR: { + break; + } + + case SELF_LINK_UNRESOLVED: { + performSelfLinkResolve(cacheNode.getNodeId()); + break; + } + + case SELF_LINK_RESPONSE_UNPROCESSED: { + processSelfLinkResponse(cacheNode.getNodeId()); + break; + } + + case NEIGHBORS_UNPROCESSED: { + + /* + * We use the rootNodeDiscovered flag to ignore depth retrieval thresholds until the root + * node is identified. Then the evaluative depth calculations should re-balance the graph + * around the root node. + */ + + if (!isRootNodeFound() || cacheNode.getNodeDepth() < this.visualizationConfigs + .getMaxSelfLinkTraversalDepth()) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "processCurrentNodeState() -- Node at max depth," + + " halting processing at current state = -- " + + cacheNode.getState() + " nodeId = " + cacheNode.getNodeId()); + } + + processNeighbors(cacheNode.getNodeId()); + + } + + break; + } + default: + break; + } + + } + + } + + /** + * Adds the complex group to node. + * + * @param targetNode the target node + * @param attributeGroup the attribute group + * @return true, if successful + */ + private boolean addComplexGroupToNode(ActiveInventoryNode targetNode, JsonNode attributeGroup) { + + if (attributeGroup == null) { + targetNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + return false; + } + + RelationshipList relationshipList = null; + + if (attributeGroup.isObject()) { + + Iterator<Entry<String, JsonNode>> fields = attributeGroup.fields(); + Entry<String, JsonNode> field = null; + String fieldName; + JsonNode fieldValue; + + while (fields.hasNext()) { + field = fields.next(); + fieldName = field.getKey(); + fieldValue = field.getValue(); + + if (fieldValue.isObject()) { + + if (fieldName.equals("relationship-list")) { + + try { + relationshipList = + mapper.readValue(field.getValue().toString(), RelationshipList.class); + + if (relationshipList != null) { + targetNode.addRelationshipList(relationshipList); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse" + + " relationship-list attribute. Parse resulted in error, " + + exc.getLocalizedMessage()); + targetNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR); + return false; + } + + } else { + targetNode.addComplexGroup(fieldValue); + } + + } else if (fieldValue.isArray()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected array type with a key = " + fieldName); + } + } else if (fieldValue.isValueNode()) { + if (oxmEntityLookup.getEntityDescriptors().get(field.getKey()) == null) { + /* + * property key is not an entity type, add it to our property set. + */ + targetNode.addProperty(field.getKey(), fieldValue.asText()); + } + + } + } + + } else if (attributeGroup.isArray()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected array type for attributeGroup = " + attributeGroup); + } + } else if (attributeGroup.isValueNode()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected value type for attributeGroup = " + attributeGroup); + } + } + + return true; + } + + public int getNumSuccessfulLinkResolveFromCache() { + return numSuccessfulLinkResolveFromCache.get(); + } + + public int getNumSuccessfulLinkResolveFromFromServer() { + return numSuccessfulLinkResolveFromFromServer.get(); + } + + public int getNumFailedLinkResolve() { + return numFailedLinkResolve.get(); + } + + public InlineMessage getInlineMessage() { + return inlineMessage; + } + + public void setInlineMessage(InlineMessage inlineMessage) { + this.inlineMessage = inlineMessage; + } + + public void setMaxSelfLinkTraversalDepth(int depth) { + this.maxSelfLinkTraversalDepth = depth; + } + + public int getMaxSelfLinkTraversalDepth() { + return this.maxSelfLinkTraversalDepth; + } + + public ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache() { + return nodeCache; + } + + /** + * Gets the relationship primary key values. + * + * @param r the r + * @param entityType the entity type + * @param pkeyNames the pkey names + * @return the relationship primary key values + */ + private String getRelationshipPrimaryKeyValues(Relationship r, String entityType, + List<String> pkeyNames) { + + StringBuilder sb = new StringBuilder(64); + + if (pkeyNames.size() > 0) { + String primaryKey = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(0)); + if (primaryKey != null) { + + sb.append(primaryKey); + + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract" + + " keyName, " + entityType + "." + pkeyNames.get(0) + + ", from relationship data, " + r.toString()); + return null; + } + + for (int i = 1; i < pkeyNames.size(); i++) { + + String kv = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(i)); + if (kv != null) { + sb.append("/").append(kv); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: failed to extract keyName, " + + entityType + "." + pkeyNames.get(i) + + ", from relationship data, " + r.toString()); + return null; + } + } + + return sb.toString(); + + } + + return null; + + } + + /** + * Extract key value from relation data. + * + * @param r the r + * @param keyName the key name + * @return the string + */ + private String extractKeyValueFromRelationData(Relationship r, String keyName) { + + RelationshipData[] rdList = r.getRelationshipData(); + + for (RelationshipData relData : rdList) { + + if (relData.getRelationshipKey().equals(keyName)) { + return relData.getRelationshipValue(); + } + } + + return null; + } + + /** + * Determine node id and key. + * + * @param ain the ain + * @return true, if successful + */ + private boolean addNodeQueryParams(ActiveInventoryNode ain) { + + if (ain == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "ActiveInventoryNode is null"); + return false; + } + + List<String> pkeyNames = + oxmEntityLookup.getEntityDescriptors().get(ain.getEntityType()).getPrimaryKeyAttributeNames(); + + if (pkeyNames == null || pkeyNames.size() == 0) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Primary key names is empty"); + return false; + } + + StringBuilder sb = new StringBuilder(64); + + if (pkeyNames.size() > 0) { + String primaryKey = ain.getProperties().get(pkeyNames.get(0)); + if (primaryKey != null) { + sb.append(primaryKey); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, " + + pkeyNames.get(0) + ", from entity properties"); + return false; + } + + for (int i = 1; i < pkeyNames.size(); i++) { + + String kv = ain.getProperties().get(pkeyNames.get(i)); + if (kv != null) { + sb.append("/").append(kv); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, " + + pkeyNames.get(i) + ", from entity properties"); + return false; + } + } + + /*final String nodeId = NodeUtils.generateUniqueShaDigest(ain.getEntityType(), + NodeUtils.concatArray(pkeyNames, "/"), sb.toString());*/ + + //ain.setNodeId(nodeId); + ain.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/")); + ain.setPrimaryKeyValue(sb.toString()); + + if (ain.getEntityType() != null && ain.getPrimaryKeyName() != null + && ain.getPrimaryKeyValue() != null) { + ain.addQueryParam( + ain.getEntityType() + "." + ain.getPrimaryKeyName() + ":" + ain.getPrimaryKeyValue()); + } + return true; + + } + + return false; + + } + + /** + * Adds the self link relationship children. + * + * @param processingNode the processing node + * @param relationshipList the relationship list + * @return true, if successful + */ + private boolean addSelfLinkRelationshipChildren(ActiveInventoryNode processingNode, + RelationshipList relationshipList) { + + if (relationshipList == null) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "No relationships added to parent node = " + + processingNode.getNodeId() + " because relationshipList is empty"); + processingNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + return false; + } + + Relationship[] relationshipArray = relationshipList.getRelationshipList(); + OxmEntityDescriptor descriptor = null; + String repairedSelfLink = null; + + if (relationshipArray != null) { + + ActiveInventoryNode newNode = null; + String resourcePath = null; + + for (Relationship r : relationshipArray) { + + resourcePath = ActiveInventoryAdapter.extractResourcePath(r.getRelatedLink()); + + String nodeId = NodeUtils.generateUniqueShaDigest(resourcePath); + + if (nodeId == null) { + + LOG.error(AaiUiMsgs.SKIPPING_RELATIONSHIP, r.toString()); + processingNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NODE_IDENTITY_ERROR); + return false; + } + + newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup); + + String entityType = r.getRelatedTo(); + + if (r.getRelationshipData() != null) { + for (RelationshipData rd : r.getRelationshipData()) { + newNode.addQueryParam(rd.getRelationshipKey() + ":" + rd.getRelationshipValue()); + } + } + + descriptor = oxmEntityLookup.getEntityDescriptors().get(r.getRelatedTo()); + + newNode.setNodeId(nodeId); + newNode.setEntityType(entityType); + newNode.setSelfLink(resourcePath); + + processingNode.addOutboundNeighbor(nodeId); + + if (descriptor != null) { + + List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames(); + + newNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.SELF_LINK_SET); + + newNode.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/")); + + String primaryKeyValues = getRelationshipPrimaryKeyValues(r, entityType, pkeyNames); + newNode.setPrimaryKeyValue(primaryKeyValues); + + } else { + + LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, + "Failed to parse entity because OXM descriptor could not be found for type = " + + r.getRelatedTo()); + + newNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + + } + + if (nodeCache.putIfAbsent(nodeId, newNode) != null) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Failed to add node to nodeCache because it already exists. Node id = " + + newNode.getNodeId()); + } + } + + } + + } + + return true; + + } + + /** + * Process initial state. + * + * @param nodeId the node id + */ + private void processInitialState(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node id is null"); + return; + } + + ActiveInventoryNode cachedNode = nodeCache.get(nodeId); + + if (cachedNode == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node cannot be" + + " found for nodeId, " + nodeId); + return; + } + + if (cachedNode.getSelfLink() == null) { + + if (cachedNode.getNodeId() == null ) { + + /* + * if the self link is null at the INIT state, which could be valid if this node is a + * complex attribute group which didn't originate from a self-link, but in that situation + * both the node id and node key should already be set. + */ + + cachedNode.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NODE_IDENTITY_ERROR); + + } + + if (cachedNode.getNodeId() != null) { + + /* + * This should be the success path branch if the self-link is not set + */ + + cachedNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + } + + } else { + + if (cachedNode.hasResolvedSelfLink()) { + LOG.error(AaiUiMsgs.INVALID_RESOLVE_STATE_DURING_INIT); + cachedNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.UNEXPECTED_STATE_TRANSITION); + } else { + cachedNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.SELF_LINK_SET); + } + } + } + + /** + * Process skeleton node. + * + * @param skeletonNode the skeleton node + * @param queryParams the query params + */ + private void processSearchableEntity(SearchableEntity searchTargetEntity, QueryParams queryParams) { + + if (searchTargetEntity == null) { + return; + } + + if (searchTargetEntity.getId() == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_SKELETON_NODE, "Failed to process skeleton" + + " node because nodeId is null for node, " + searchTargetEntity.getLink()); + return; + } + + ActiveInventoryNode newNode = new ActiveInventoryNode(this.visualizationConfigs, oxmEntityLookup); + + newNode.setNodeId(searchTargetEntity.getId()); + newNode.setEntityType(searchTargetEntity.getEntityType()); + newNode.setPrimaryKeyName(getEntityTypePrimaryKeyName(searchTargetEntity.getEntityType())); + newNode.setPrimaryKeyValue(searchTargetEntity.getEntityPrimaryKeyValue()); + + if (newNode.getEntityType() != null && newNode.getPrimaryKeyName() != null + && newNode.getPrimaryKeyValue() != null) { + newNode.addQueryParam( + newNode.getEntityType() + "." + newNode.getPrimaryKeyName() + ":" + newNode.getPrimaryKeyValue()); + } + /* + * This code may need some explanation. In any graph there will be a single root node. The root + * node is really the center of the universe, and for now, we are tagging the search target as + * the root node. Everything else in the visualization of the graph will be centered around this + * node as the focal point of interest. + * + * Due to it's special nature, there will only ever be one root node, and it's node depth will + * always be equal to zero. + */ + + if (!isRootNodeFound()) { + if (queryParams.getSearchTargetNodeId().equals(newNode.getNodeId())) { + newNode.setNodeDepth(0); + newNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + setRootNodeFound(true); + } + } + + newNode.setSelfLink(searchTargetEntity.getLink()); + + nodeCache.putIfAbsent(newNode.getNodeId(), newNode); + } + + private int getTotalWorkOnHand() { + + int numNodesWithPendingStates = 0; + + if( isRootNodeFound()) { + evaluateNodeDepths(); + } + + for (ActiveInventoryNode n : nodeCache.values()) { + + switch (n.getState()) { + + case READY: + case ERROR: { + // do nothing, these are our normal + // exit states + break; + } + + case NEIGHBORS_UNPROCESSED: { + + if (n.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { + /* + * Only process our neighbors relationships if our current depth is less than the max + * depth + */ + numNodesWithPendingStates++; + } + + break; + } + + default: { + + /* + * for all other states, there is work to be done + */ + numNodesWithPendingStates++; + } + + } + + } + + LOG.debug(AaiUiMsgs.OUTSTANDING_WORK_PENDING_NODES, + String.valueOf(numNodesWithPendingStates)); + + int totalWorkOnHand = aaiWorkOnHand.get() + numNodesWithPendingStates; + + return totalWorkOnHand; + + } + + /** + * Checks for out standing work. + * + * @return true, if successful + */ + private void processOutstandingWork(QueryParams queryParams) { + + while (getTotalWorkOnHand() > 0) { + + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + + processCurrentNodeStates(queryParams); + + try { + Thread.sleep(10); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.PROCESSING_LOOP_INTERUPTED, exc.getMessage()); + return; + } + + } + + } + + /* (non-Javadoc) + * @see org.onap.aai.sparky.viewandinspect.services.VisualizationContext#processSelfLinks(org.onap.aai.sparky.sync.entity.SearchableEntity, org.onap.aai.sparky.viewandinspect.entity.QueryParams) + */ + @Override + public void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams) { + + try { + + + if (searchtargetEntity == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, contextIdStr + " - Failed to" + + " processSelfLinks, searchtargetEntity is null"); + return; + } + + long startTimeInMs = System.currentTimeMillis(); + + processSearchableEntity(searchtargetEntity, queryParams); + + /* + * This method is blocking until we decouple it with a CountDownLatch await condition, + * and make the internal graph processing more event-y. + */ + + processOutstandingWork(queryParams); + + long totalResolveTime = (System.currentTimeMillis() - startTimeInMs); + + long opTime = System.currentTimeMillis() - startTimeInMs; + + LOG.info(AaiUiMsgs.ALL_TRANSACTIONS_RESOLVED, String.valueOf(totalResolveTime), + String.valueOf(totalLinksRetrieved.get()), String.valueOf(opTime)); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, exc.getMessage()); + } + + } + + /** + * Verify outbound neighbors. + */ + private void verifyOutboundNeighbors() { + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null && srcNode.getNodeId() != null) { + + targetNode.addInboundNeighbor(srcNode.getNodeId()); + + if (this.visualizationConfigs.makeAllNeighborsBidirectional()) { + targetNode.addOutboundNeighbor(srcNode.getNodeId()); + } + + } + + } + + } + + } + + /** + * Evaluate node depths. + */ + private void evaluateNodeDepths() { + + int numChanged = -1; + int numAttempts = 0; + + while (numChanged != 0) { + + numChanged = 0; + numAttempts++; + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + if (srcNode.getState() == NodeProcessingState.INIT) { + + /* + * this maybe the only state that we don't want to to process the node depth on, because + * typically it won't have any valid fields set, and it may remain in a partial state + * until we have processed the self-link. + */ + + continue; + + } + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + + for (String targetNodeId : srcNode.getInboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + } + + if (numAttempts >= MAX_DEPTH_EVALUATION_ATTEMPTS) { + LOG.info(AaiUiMsgs.MAX_EVALUATION_ATTEMPTS_EXCEEDED); + return; + } + + } + + if (LOG.isDebugEnabled()) { + if (numAttempts > 0) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in " + numAttempts + " attempts"); + } else { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in 0 attempts because all nodes at correct depth"); + } + } + + } + + + /** + * Gets the entity type primary key name. + * + * @param entityType the entity type + * @return the entity type primary key name + */ + + + private String getEntityTypePrimaryKeyName(String entityType) { + + if (entityType == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary key" + + " name because entity type is null"); + return null; + } + + OxmEntityDescriptor descriptor = oxmEntityLookup.getEntityDescriptors().get(entityType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "oxm entity" + + " descriptor for entityType = " + entityType); + return null; + } + + List<String> pkeyNames = descriptor.getPrimaryKeyAttributeNames(); + + if (pkeyNames == null || pkeyNames.size() == 0) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary" + + " key because descriptor primary key names is empty"); + return null; + } + + return NodeUtils.concatArray(pkeyNames, "/"); + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java new file mode 100644 index 0000000..b0b8b9e --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationService.java @@ -0,0 +1,382 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + +import java.io.IOException; +import java.security.SecureRandom; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; + +import javax.servlet.ServletException; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput; +import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class BaseVisualizationService implements VisualizationService { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(BaseVisualizationService.class); + + private ObjectMapper mapper = new ObjectMapper(); + + private final ActiveInventoryAdapter aaiAdapter; + private final GizmoAdapter gizmoAdapter; + private final ElasticSearchAdapter esAdapter; + private final ExecutorService aaiExecutorService; + + private ConcurrentHashMap<Long, VisualizationContext> contextMap; + private final SecureRandom secureRandom; + + private VisualizationConfigs visualizationConfigs; + private SubscriptionConfig subConfig; + private ElasticSearchEndpointConfig endpointEConfig; + private ElasticSearchSchemaConfig schemaEConfig; + private OxmEntityLookup oxmEntityLookup; + + public BaseVisualizationService(OxmModelLoader loader, VisualizationConfigs visualizationConfigs, + ActiveInventoryAdapter aaiAdapter, GizmoAdapter gizmoAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig, + int numActiveInventoryWorkers, OxmEntityLookup oxmEntityLookup, SubscriptionConfig subscriptionConfig) + throws Exception { + + this.visualizationConfigs = visualizationConfigs; + this.endpointEConfig = endpointConfig; + this.schemaEConfig = schemaConfig; + this.oxmEntityLookup = oxmEntityLookup; + this.subConfig = subscriptionConfig; + + + secureRandom = new SecureRandom(); + + /* + * Fix constructor with properly wired in properties + */ + + this.aaiAdapter = aaiAdapter; + this.gizmoAdapter = gizmoAdapter; + this.esAdapter = esAdapter; + + this.mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + this.contextMap = new ConcurrentHashMap<Long, VisualizationContext>(); + + this.aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER", + numActiveInventoryWorkers, LOG); + + } + + /** + * Analyze query request body. + * + * @param queryRequestJson the query request json + * @return the query request + */ + + public QueryRequest analyzeQueryRequestBody(String queryRequestJson) { + + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "analyzeQueryRequestBody()," + " queryRequestJson = " + queryRequestJson); + + ObjectMapper nonEmptyMapper = new ObjectMapper(); + nonEmptyMapper.setSerializationInclusion(Include.NON_EMPTY); + + QueryRequest queryBody = null; + + try { + queryBody = nonEmptyMapper.readValue(queryRequestJson, QueryRequest.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, "Analyzing query request body.", + exc.getLocalizedMessage()); + } + + return queryBody; + + } + + /** + * Log optime. + * + * @param method the method + * @param opStartTimeInMs the op start time in ms + */ + private void logOptime(String method, long opStartTimeInMs) { + LOG.info(AaiUiMsgs.OPERATION_TIME, method, + String.valueOf(System.currentTimeMillis() - opStartTimeInMs)); + } + + private SearchableEntity extractSearchableEntityFromElasticEntity(OperationResult operationResult) { + if (operationResult == null || !operationResult.wasSuccessful()) { + // error, return empty collection + return null; + } + + SearchableEntity sourceEntity = null; + if (operationResult.wasSuccessful()) { + + try { + JsonNode elasticValue = mapper.readValue(operationResult.getResult(), JsonNode.class); + + if (elasticValue != null) { + JsonNode sourceField = elasticValue.get("_source"); + + if (sourceField != null) { + sourceEntity = new SearchableEntity(); + + String entityType = NodeUtils.extractFieldValueFromObject(sourceField, "entityType"); + sourceEntity.setEntityType(entityType); + String entityPrimaryKeyValue = NodeUtils.extractFieldValueFromObject(sourceField, "entityPrimaryKeyValue"); + sourceEntity.setEntityPrimaryKeyValue(entityPrimaryKeyValue); + String link = NodeUtils.extractFieldValueFromObject(sourceField, "link"); + sourceEntity.setLink(link); + String lastmodTimestamp = NodeUtils.extractFieldValueFromObject(sourceField, "lastmodTimestamp"); + sourceEntity.setEntityTimeStamp(lastmodTimestamp); + } + } + } catch (IOException ioe) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "a json node ", ioe.getLocalizedMessage()); + } + } + return sourceEntity; + } + + /** + * Builds the visualization using generic query. + * + * @param queryRequest the query request + * @return the operation result + */ + public OperationResult buildVisualizationUsingGenericQuery(QueryRequest queryRequest) { + + OperationResult returnValue = new OperationResult(); + OperationResult dataCollectionResult = null; + QueryParams queryParams = null; + SearchableEntity sourceEntity = null; + + try { + + /* + * Here is where we need to make a dip to elastic-search for the self-link by entity-id (link + * hash). + */ + dataCollectionResult = esAdapter.retrieveEntityById(endpointEConfig.getEsIpAddress(), + endpointEConfig.getEsServerPort(),schemaEConfig.getIndexName(), + schemaEConfig.getIndexDocType(), queryRequest.getHashId()); + sourceEntity = extractSearchableEntityFromElasticEntity(dataCollectionResult); + + if (sourceEntity != null) { + sourceEntity.generateId(); + } + + queryParams = new QueryParams(); + queryParams.setSearchTargetNodeId(queryRequest.getHashId()); + + } catch (Exception e1) { + LOG.error(AaiUiMsgs.FAILED_TO_GET_NODES_QUERY_RESULT, e1.getLocalizedMessage()); + dataCollectionResult = new OperationResult(500, "Failed to get nodes-query result from AAI"); + } + + if (dataCollectionResult.getResultCode() == 200) { + + String d3OutputJsonOutput = null; + + try { + + d3OutputJsonOutput = getVisualizationOutputBasedonGenericQuery( sourceEntity, queryParams, queryRequest); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Generated D3" + " output as json = " + d3OutputJsonOutput); + } + + if (d3OutputJsonOutput != null) { + returnValue.setResultCode(200); + returnValue.setResult(d3OutputJsonOutput); + } else { + returnValue.setResult(500, "Failed to generate D3 graph visualization"); + } + + } catch (Exception exc) { + returnValue.setResult(500, + "Failed to generate D3 graph visualization, due to a servlet exception."); + LOG.error(AaiUiMsgs.ERROR_D3_GRAPH_VISUALIZATION, exc.getLocalizedMessage()); + } + } else { + returnValue.setResult(dataCollectionResult.getResultCode(), dataCollectionResult.getResult()); + } + + return returnValue; + + } + + + /** + * Gets the visualization output basedon generic query. + * + * @param searchtargetEntity entity that will be used to start visualization flow + * @param queryParams the query params + * @return the visualization output basedon generic query + * @throws ServletException the servlet exception + * @throws + */ + private String getVisualizationOutputBasedonGenericQuery(SearchableEntity searchtargetEntity, + QueryParams queryParams, QueryRequest request) throws ServletException { + + long opStartTimeInMs = System.currentTimeMillis(); + + VisualizationTransformer transformer = null; + try { + transformer = new VisualizationTransformer(visualizationConfigs, subConfig); + } catch (Exception exc) { + throw new ServletException( + "Failed to create VisualizationTransformer instance because of execption", exc); + } + + VisualizationContext visContext = null; + long contextId = secureRandom.nextLong(); + try { + if ( visualizationConfigs.isGizmoEnabled()) { + visContext = new BaseGizmoVisualizationContext(contextId, this.gizmoAdapter, aaiExecutorService, + this.visualizationConfigs, oxmEntityLookup); + } else { + visContext = new BaseVisualizationContext(contextId, this.aaiAdapter, aaiExecutorService, + this.visualizationConfigs, oxmEntityLookup); + } + + contextMap.putIfAbsent(contextId, visContext); + } catch (Exception e1) { + LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, + "While building Visualization Context, " + e1.getLocalizedMessage()); + throw new ServletException(e1); + } + + String jsonResponse = null; + + long startTimeInMs = System.currentTimeMillis(); + + visContext.processSelfLinks(searchtargetEntity, queryParams); + contextMap.remove(contextId); + + logOptime("collectSelfLinkNodes()", startTimeInMs); + + /* + * Flatten the graphs into a set of Graph and Link nodes. In this method I want the node graph + * resulting from the edge-tag-query to be represented first, and then we'll layer in + * relationship data. + */ + long overlayDataStartTimeInMs = System.currentTimeMillis(); + + Map<String, ActiveInventoryNode> cachedNodeMap = visContext.getNodeCache(); + + if (LOG.isDebugEnabled()) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\nCached Node Map:\n"); + for (String k : cachedNodeMap.keySet()) { + sb.append("\n----"); + sb.append("\n").append(cachedNodeMap.get(k).dumpNodeTree(true)); + } + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); + } + + transformer.buildFlatNodeArrayFromGraphCollection(cachedNodeMap); + transformer.buildLinksFromGraphCollection(cachedNodeMap); + + /* + * - Apply configuration-driven styling + * - Build the final transformation response object + * - Use information we have to populate the GraphMeta object + */ + + transformer.addSearchTargetAttributesToRootNode(); + + GraphMeta graphMeta = new GraphMeta(); + + D3VisualizationOutput output = null; + try { + output = transformer + .generateVisualizationOutput((System.currentTimeMillis() - opStartTimeInMs), graphMeta); + } catch (JsonProcessingException exc) { + throw new ServletException("Caught an exception while generation visualization output", exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, exc.getLocalizedMessage()); + } + + output.setInlineMessage(visContext.getInlineMessage()); + output.getGraphMeta().setNumLinkResolveFailed(visContext.getNumFailedLinkResolve()); + output.getGraphMeta().setNumLinksResolvedSuccessfullyFromCache( + visContext.getNumSuccessfulLinkResolveFromCache()); + output.getGraphMeta().setNumLinksResolvedSuccessfullyFromServer( + visContext.getNumSuccessfulLinkResolveFromFromServer()); + + try { + jsonResponse = transformer.convertVisualizationOutputToJson(output); + } catch (JsonProcessingException jpe) { + throw new ServletException( + "Caught an exception while converting visualization output to json", jpe); + } + + logOptime("[build flat node array, add relationship data, search target," + + " color scheme, and generate visualization output]", overlayDataStartTimeInMs); + + logOptime("doFilter()", opStartTimeInMs); + + return jsonResponse; + + } + + public void shutdown() { + aaiExecutorService.shutdown(); + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java new file mode 100644 index 0000000..6d4ed88 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationContext.java @@ -0,0 +1,55 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + +import java.util.concurrent.ConcurrentHashMap; + +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.InlineMessage; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; + + +public interface VisualizationContext { + + /** + * Process self links. + * + * @param skeletonNode the skeleton node + * @param queryParams the query params + */ + void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams); + + ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache(); + + InlineMessage getInlineMessage(); + + int getNumFailedLinkResolve(); + + int getNumSuccessfulLinkResolveFromCache(); + + int getNumSuccessfulLinkResolveFromFromServer(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java new file mode 100644 index 0000000..c8f252c --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationService.java @@ -0,0 +1,52 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; + +public interface VisualizationService { + + /** + * Analyze query request body. + * + * @param queryRequestJson the query request json + * @return the query request + */ + + QueryRequest analyzeQueryRequestBody(String queryRequestJson); + + /** + * Builds the visualization using generic query. + * + * @param queryRequest the query request + * @return the operation result + */ + OperationResult buildVisualizationUsingGenericQuery(QueryRequest queryRequest); + + void shutdown(); + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java new file mode 100644 index 0000000..46a70c3 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/services/VisualizationTransformer.java @@ -0,0 +1,305 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.services; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.util.ConfigHelper; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.D3VisualizationOutput; +import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; +import org.onap.aai.sparky.viewandinspect.entity.NodeDebug; +import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphLink; +import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphNode; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; + +/** + * The idea here is to receive a collection of graphs and then fold them together (or not) based on + * configuration. The first goal will be to fold all like-resources together, but the choice of + * folding could/should be configurable, and will simply change the degree of link based nodes when + * we generate the Node-Array and Link-Array output. + * + * @author DAVEA + * + */ + +public class VisualizationTransformer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + VisualizationTransformer.class); + + List<SparkyGraphNode> flatNodeArray = new ArrayList<SparkyGraphNode>(); + + /* + * Maybe this isn't a string but Json-Model objects that we will convert to final string + * representation when we dump the node-array and link-array collections the post-data blob in the + * HttpServletResponse. + */ + + List<SparkyGraphLink> linkArrayOutput = new ArrayList<SparkyGraphLink>(); + + private VisualizationConfigs visualizationConfigs; + private SubscriptionConfig subConfig; + + /** + * Instantiates a new visualization transformer. + * + * @throws Exception the exception + */ + public VisualizationTransformer(VisualizationConfigs visualizationConfigs, + SubscriptionConfig subConfig) throws Exception { + this.visualizationConfigs = visualizationConfigs; + this.subConfig = subConfig; + } + + + /** + * Log optime. + * + * @param method the method + * @param startTimeInMs the start time in ms + */ + private void logOptime(String method, long startTimeInMs) { + LOG.info(AaiUiMsgs.OPERATION_TIME, method, + String.valueOf((System.currentTimeMillis() - startTimeInMs))); + } + + /** + * Adds the search target attributes to root node. + */ + public void addSearchTargetAttributesToRootNode() { + + for (SparkyGraphNode n : flatNodeArray) { + if (n.isRootNode()) { + n.getNodeMeta().setSearchTarget(true); + n.getNodeMeta().setClassName(this.visualizationConfigs.getSelectedSearchedNodeClassName()); + } + + } + + } + + /** + * Generate visualization output. + * + * @param preProcessingOpTimeInMs the pre processing op time in ms + * @param graphMeta the graph meta + * @return the d 3 visualization output + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + + public D3VisualizationOutput generateVisualizationOutput(long preProcessingOpTimeInMs, + GraphMeta graphMeta) throws JsonProcessingException, IOException { + + long opStartTimeInMs = System.currentTimeMillis(); + + /* + * iterate over the flat collection, and only add the graph nodes to the graph node collection + */ + + D3VisualizationOutput output = new D3VisualizationOutput(); + + output.setGraphMeta(graphMeta); + + for (SparkyGraphNode n : flatNodeArray) { + if ( n.getItemType()!= null) { + output.pegCounter(n.getItemType()); + } + } + + output.addNodes(flatNodeArray); + output.addLinks(linkArrayOutput); + + int numNodes = flatNodeArray.size(); + int numLinks = linkArrayOutput.size(); + + LOG.info(AaiUiMsgs.VISUALIZATION_GRAPH_OUTPUT, String.valueOf(numNodes), + String.valueOf(numLinks)); + + if (numLinks < (numNodes - 1)) { + LOG.warn(AaiUiMsgs.DANGLING_NODE_WARNING, String.valueOf(numLinks), + String.valueOf(numNodes)); + } + + ObjectMapper mapper = new ObjectMapper(); + + SparkyResourceLoader resourceLoader = visualizationConfigs.getResourceLoader(); + File aaiEntityDescriptorsFile = resourceLoader.getResourceAsFile(visualizationConfigs.getAaiEntityNodeDescriptors(), true); + + if (aaiEntityDescriptorsFile != null) { + com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions = + mapper.readTree(aaiEntityDescriptorsFile); + graphMeta.setAaiEntityNodeDescriptors(aaiEntityNodeDefinitions); + } else { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Failed to find " + visualizationConfigs.getAaiEntityNodeDescriptors()); + graphMeta.setAaiEntityNodeDescriptors(null); + } + + graphMeta.setNumLinks(linkArrayOutput.size()); + graphMeta.setNumNodes(flatNodeArray.size()); + graphMeta.setRenderTimeInMs(preProcessingOpTimeInMs); + + output.setGraphMeta(graphMeta); + + logOptime("generateVisualizationOutput()", opStartTimeInMs); + + return output; + } + + /** + * Convert visualization output to json. + * + * @param output the output + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public String convertVisualizationOutputToJson(D3VisualizationOutput output) + throws JsonProcessingException { + + if (output == null) { + return null; + } + + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + + return ow.writeValueAsString(output); + + } + + /** + * Builds the links from graph collection. + * + * @param nodeMap the node map + */ + public void buildLinksFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) { + + for (ActiveInventoryNode ain : nodeMap.values()) { + + /* + * This one is a little bit different, when we iterate over the collection we only want to + * draw the links for node that are less than the max traversal depth. We want to only draw + * links at a depth of n-1 because we are basing the links on the outbound neighbors from the + * current node. + */ + + if (ain.getNodeDepth() < this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { + + Collection<String> outboundNeighbors = ain.getOutboundNeighbors(); + + for (String outboundNeighbor : outboundNeighbors) { + + SparkyGraphLink nodeLink = new SparkyGraphLink(); + + nodeLink.setId(UUID.randomUUID().toString()); + nodeLink.setSource(ain.getNodeId()); + nodeLink.setTarget(outboundNeighbor); + + linkArrayOutput.add(nodeLink); + + } + + Collection<String> inboundNeighbors = ain.getInboundNeighbors(); + + for (String inboundNeighbor : inboundNeighbors) { + + SparkyGraphLink nodeLink = new SparkyGraphLink(); + + nodeLink.setId(UUID.randomUUID().toString()); + nodeLink.setSource(ain.getNodeId()); + nodeLink.setTarget(inboundNeighbor); + + linkArrayOutput.add(nodeLink); + + } + + + } else { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "buildLinks()," + + " Filtering node = " + ain.getNodeId() + " @ depth = " + + ain.getNodeDepth()); + } + + } + } + + } + + /** + * Builds the flat node array from graph collection. + * + * @param nodeMap the node map + */ + /* + * Recursive function to walk multi-graph nodes and children to build a folded resource target + * graph. + */ + public void buildFlatNodeArrayFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) { + + for (ActiveInventoryNode n : nodeMap.values()) { + + if (n.getNodeDepth() <= this.visualizationConfigs.getMaxSelfLinkTraversalDepth()) { + + SparkyGraphNode jsonNode = new SparkyGraphNode(n, this.visualizationConfigs, this.subConfig); + + jsonNode.getNodeMeta().setClassName(this.visualizationConfigs.getGeneralNodeClassName()); + + if (this.visualizationConfigs.isVisualizationDebugEnabled()) { + + NodeDebug nodeDebug = jsonNode.getNodeMeta().getNodeDebug(); + + if (nodeDebug != null) { + nodeDebug.setProcessingError(n.isProcessingErrorOccurred()); + nodeDebug.setProcessingErrorCauses(n.getProcessingErrorCauses()); + } + } + flatNodeArray.add(jsonNode); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Filtering node from visualization: " + n.getNodeId() + " @ depth = " + + n.getNodeDepth()); + } + } + } + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java new file mode 100644 index 0000000..820b749 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformGizmoNodeSelfLinkProcessingTask.java @@ -0,0 +1,128 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformNodeSelfLinkProcessingTask. + */ +public class PerformGizmoNodeSelfLinkProcessingTask implements Supplier<NodeProcessingTransaction> { + + private static final Logger logger = + LoggerFactory.getInstance().getLogger(PerformGizmoNodeSelfLinkProcessingTask.class); + + private NodeProcessingTransaction txn; + private GizmoAdapter gizmoAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform node self link processing task. + * + * @param txn the txn + * @param aaiProvider the aai provider + * @param aaiConfig the aai config + */ + /** + * + * @param txn + * @param requestParameters + * @param aaiProvider + * @param aaiConfig + */ + public PerformGizmoNodeSelfLinkProcessingTask(NodeProcessingTransaction txn, String requestParameters, + GizmoAdapter gizmoAdapter) { + this.gizmoAdapter = gizmoAdapter; + this.txn = txn; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NodeProcessingTransaction get() { + MDC.setContextMap(contextMap); + OperationResult opResult = new OperationResult(); + String link = txn.getSelfLink(); + + if (link == null) { + opResult.setResult(500, "Aborting self-link processing because self link is null"); + txn.setOpResult(opResult); + return txn; + } + + /** + * Rebuild the self link: + * + * <li>build the base url with the configured scheme + authority (server:port) + * <li>recombine baseUrl + originalEncodedLink + queryStringParameters + * + */ + + final String urlSchemeAndAuthority = gizmoAdapter.repairInventorySelfLink("", null); + + String parameters = txn.getRequestParameters(); + link = urlSchemeAndAuthority + link; + + if (parameters != null) { + link += parameters; + } + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Collecting " + link); + } + + try { + + opResult = gizmoAdapter.queryGizmoWithRetries(link, "application/json", + gizmoAdapter.getEndpointConfig().getNumRequestRetries()); + } catch (Exception exc) { + opResult = new OperationResult(); + opResult.setResult(500, "Querying AAI with retry failed due to an exception."); + logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage()); + } + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Operation result = " + opResult.toString()); + } + + txn.setOpResult(opResult); + return txn; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java new file mode 100644 index 0000000..c686443 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java @@ -0,0 +1,129 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformNodeSelfLinkProcessingTask. + */ +public class PerformNodeSelfLinkProcessingTask implements Supplier<NodeProcessingTransaction> { + + private static final Logger logger = + LoggerFactory.getInstance().getLogger(PerformNodeSelfLinkProcessingTask.class); + + private NodeProcessingTransaction txn; + private ActiveInventoryAdapter aaiAdapter; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform node self link processing task. + * + * @param txn the txn + * @param aaiProvider the aai provider + * @param aaiConfig the aai config + */ + /** + * + * @param txn + * @param requestParameters + * @param aaiProvider + * @param aaiConfig + */ + public PerformNodeSelfLinkProcessingTask(NodeProcessingTransaction txn, String requestParameters, + ActiveInventoryAdapter aaiAdapter) { + this.aaiAdapter = aaiAdapter; + this.txn = txn; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* + * (non-Javadoc) + * + * @see java.util.function.Supplier#get() + */ + @Override + public NodeProcessingTransaction get() { + MDC.setContextMap(contextMap); + OperationResult opResult = new OperationResult(); + String link = txn.getSelfLink(); + + if (link == null) { + opResult.setResult(500, "Aborting self-link processing because self link is null"); + txn.setOpResult(opResult); + return txn; + } + + /** + * Rebuild the self link: + * + * <li>build the base url with the configured scheme + authority (server:port) + * <li>recombine baseUrl + originalEncodedLink + queryStringParameters + * + */ + + final String urlSchemeAndAuthority = aaiAdapter.repairSelfLink(""); + + String parameters = txn.getRequestParameters(); + link = urlSchemeAndAuthority + link; + + if (parameters != null) { + link += parameters; + } + + + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Collecting " + link); + } + + try { + opResult = aaiAdapter.queryActiveInventoryWithRetries(link, "application/json", + aaiAdapter.getEndpointConfig().getNumRequestRetries()); + } catch (Exception exc) { + opResult = new OperationResult(); + opResult.setResult(500, "Querying AAI with retry failed due to an exception."); + logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage()); + } + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Operation result = " + opResult.toString()); + } + + txn.setOpResult(opResult); + return txn; + + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java new file mode 100644 index 0000000..6d39849 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java @@ -0,0 +1,95 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction; +import org.slf4j.MDC; + +public class PerformSelfLinkDeterminationTask implements Supplier<SelfLinkDeterminationTransaction> { + + private static final Logger logger = + LoggerFactory.getInstance().getLogger(PerformSelfLinkDeterminationTask.class); + + private SelfLinkDeterminationTransaction txn; + private ActiveInventoryAdapter aaiAdapter; + private Map<String, String> contextMap; + + + /** + * Instantiates a new perform node self link processing task. + * + * @param txn the txn + * @param requestParameters the request parameters + * @param aaiProvider the aai provider + */ + public PerformSelfLinkDeterminationTask(SelfLinkDeterminationTransaction txn, String requestParameters, + ActiveInventoryAdapter aaiAdapter) { + + this.aaiAdapter = aaiAdapter; + this.txn = txn; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public SelfLinkDeterminationTransaction get() { + MDC.setContextMap(contextMap); + if (txn.getQueryString() == null) { + OperationResult opResult = new OperationResult(); + opResult.setResult(500, "Aborting self-link determination because self link query is null."); + txn.setOpResult(opResult); + return txn; + } + + OperationResult opResult = null; + try { + opResult = aaiAdapter.queryActiveInventoryWithRetries(txn.getQueryString(), "application/json", + aaiAdapter.getEndpointConfig().getNumRequestRetries()); + } catch (Exception exc) { + opResult = new OperationResult(); + opResult.setResult(500, "Querying AAI with retry failed due to an exception."); + logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage()); + } + + if (logger.isDebugEnabled()) { + logger.debug("Operation result = " + opResult.toString()); + } + + txn.setOpResult(opResult); + return txn; + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java new file mode 100644 index 0000000..8f29519 --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectEntitySynchronizer.java @@ -0,0 +1,779 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewinspect.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.SynchronizerConstants; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class SearchableEntitySynchronizer. + */ +public class ViewInspectEntitySynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetrySearchableEntitySyncContainer. + */ + private class RetrySearchableEntitySyncContainer { + NetworkTransaction txn; + SearchableEntity se; + + /** + * Instantiates a new retry searchable entity sync container. + * + * @param txn the txn + * @param se the se + */ + public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { + this.txn = txn; + this.se = se; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SearchableEntity getSearchableEntity() { + return se; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ViewInspectEntitySynchronizer.class); + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetrySearchableEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + protected ExecutorService esPutExecutor; + private OxmEntityLookup oxmEntityLookup; + private SearchableEntityLookup searchableEntityLookup; + + /** + * Instantiates a new searchable entity synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public ViewInspectEntitySynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup, + SearchableEntityLookup searchableEntityLookup) throws Exception { + super(LOG, "SES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + + this.oxmEntityLookup = oxmEntityLookup; + this.searchableEntityLookup = searchableEntityLookup; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetrySearchableEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.synchronizerName = "Searchable Entity Synchronizer"; + this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); + this.aaiEntityStats.intializeEntityCounters( + searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + this.esEntityStats.intializeEntityCounters( + searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + this.syncDurationInMs = -1; + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + Map<String, SearchableOxmEntityDescriptor> descriptorMap = + searchableEntityLookup.getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + /*Collection<String> syncTypes = new ArrayList<String>(); + syncTypes.add("service-instance");*/ + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the + * of the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key); + System.out.println(typeLinksResult); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + this.allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + collectAllTheWork(); + + return OperationState.OK; + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = + "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + SearchableOxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = searchableEntityLookup.getSearchableEntityDescriptors().get(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + if (descriptor.hasSearchableAttributes()) { + selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType)); + } + + } + } + } + } + + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param se the se + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(NodeUtils.convertObjectToJson(se,false)); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = se.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest(getIndexName(), + "default", se.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Searchable entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + + } else { + + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during searchable entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + + /** + * Populate searchable entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateSearchableEntityDocument(SearchableEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup.getSearchableEntityDescriptors().get(resultDescriptor.getEntityName()); + + for (String keyName : searchableDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " + + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + final List<String> searchTagFields = searchableDescriptor.getSearchableAttributes(); + + /* + * Based on configuration, use the configured field names for this entity-Type to build a + * multi-value collection of search tags for elastic search entity search criteria. + */ + for (String searchTagField : searchTagFields) { + String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField); + if (searchTagValue != null && !searchTagValue.isEmpty()) { + doc.addSearchTagWithKey(searchTagValue, searchTagField); + } + } + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup + .getSearchableEntityDescriptors().get(txn.getDescriptor().getEntityName()); + + try { + if (searchableDescriptor.hasSearchableAttributes()) { + + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + SearchableEntity se = new SearchableEntity(); + se.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink())); + populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor()); + se.deriveFields(); + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + } + + } + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param se the se + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SearchableEntity se) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(se.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySearchableEntitySyncContainer rsc = + new RetrySearchableEntitySyncContainer(esGetResult, se); + retryQueue.push(rsc); + + String message = "Store document failed during searchable entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during searchable entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + SearchableEntity se = rsc.getSearchableEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java new file mode 100644 index 0000000..bd15e1f --- /dev/null +++ b/sparkybe-onap-service/src/main/java/org/onap/aai/sparky/viewinspect/sync/ViewInspectSyncController.java @@ -0,0 +1,122 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewinspect.sync; + +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.crossentityreference.sync.CrossEntityReferenceSynchronizer; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class ViewInspectSyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + private ActiveInventoryAdapter aaiAdapter; + private ElasticSearchAdapter esAdapter; + private ElasticSearchSchemaConfig schemaConfig; + private ElasticSearchEndpointConfig endpointConfig; + + public ViewInspectSyncController(SyncControllerConfig syncControllerConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig aaiStatConfig, NetworkStatisticsConfig esStatConfig, + CrossEntityReferenceLookup crossEntityReferenceLookup, OxmEntityLookup oxmEntityLookup, + SearchableEntityLookup searchableEntityLookup, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + + // final String controllerName = "View and Inspect Entity Synchronizer"; + + this.aaiAdapter = aaiAdapter; + this.esAdapter = esAdapter; + this.schemaConfig = schemaConfig; + this.endpointConfig = endpointConfig; + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + + ViewInspectEntitySynchronizer ses = new ViewInspectEntitySynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), aaiStatConfig, esStatConfig, + oxmEntityLookup, searchableEntityLookup); + + ses.setAaiAdapter(aaiAdapter); + ses.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(ses); + + CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(),aaiStatConfig,esStatConfig, + crossEntityReferenceLookup, oxmEntityLookup, searchableEntityLookup); + + cers.setAaiAdapter(aaiAdapter); + cers.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(cers); + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/sparkybe-onap-service/src/main/resources/logging/AAIUIMsgs.properties b/sparkybe-onap-service/src/main/resources/logging/AAIUIMsgs.properties new file mode 100644 index 0000000..1843604 --- /dev/null +++ b/sparkybe-onap-service/src/main/resources/logging/AAIUIMsgs.properties @@ -0,0 +1,901 @@ +#Resource key=Error Code|Message text|Resolution text |Description text +####### +#Newlines can be utilized to add some clarity ensuring continuing line +#has at least one leading space +#ResourceKey=\ +# ERR0000E\ +# Sample error msg txt\ +# Sample resolution msg\ +# Sample description txt +# +###### +#Error code classification category +#000 Info/Debug +#100 Permission errors +#200 Availability errors/Timeouts +#300 Data errors +#400 Schema Interface type/validation errors +#500 Business process errors +#900 Unknown errors +# +######################################################################## + +#-------------------- 000 Series Info/Warning/Debug --------------------# + +DANGLING_NODE_WARNING=\ + AAIUI0001W|\ + Dangling node issue detected: {0} + +FILE_READ_IN_PROGRESS=\ + AAIUI0002W|\ + Attempting getFileContents() for file: {0} + +VISUALIZATION_GRAPH_OUTPUT=\ + AAIUI0003I|\ + Generated graph output has {0} node(s) and {1} link(s) + +NODE_INTEGRITY_ALREADY_PROCESSED=\ + AAIUI0004I|\ + Node integrity for nodeId, {0}, has already been processed + +SKIPPING_PROCESS_NODE_INTEGRITY=\ + AAIUI0005I|\ + Skipping processNodeIntegrity() for node, {0}, because node integrity overlay is disabled + +MAX_EVALUATION_ATTEMPTS_EXCEEDED=\ + AAIUI0006I|\ + Evaluate node depths exceeded max evaluation attempts + +SYNC_DURATION=\ + AAIUI0007I|\ + {0} + +SYNC_TO_BEGIN=\ + AAIUI0008I|\ + [{0}] next synchronization operation will begin at {1} + +WILL_RETRIEVE_TXN=\ + AAIUI0009I|\ + About to retrieve the txn {0} + +ALL_TRANSACTIONS_RESOLVED=\ + AAIUI00010I|\ + All transactions are resolved, total resolve time was, {0}, total links retrieved, {1}, with an opTime of, {2} ms + +OUTSTANDING_WORK_PENDING_NODES=\ + AAIUI00011I|\ + Method hasOutstandingWork: Number of pending nodes, {0} + +OPERATION_TIME=\ + AAIUI00012I|\ + Operation: {0} - Time taken: {1} + +NO_RELATIONSHIP_DISCOVERED=\ + AAIUI00013I|\ + No relationships discovered for entity: {0} + +ACTIVE_INV_NODE_CHANGE_DEPTH=\ + AAIUI00014I|\ + AIN - {0} - changing depth from {1} to {2} + +ACTIVE_INV_NODE_CHANGE_STATE=\ + AAIUI00015I|\ + [{0}], State change from {1} to {2}, caused by action {3} + +ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID=\ + AAIUI00016I|\ + Node state change from {0} => {1} caused by action = {2} + +INITIALIZE_OXM_MODEL_LOADER=\ + AAIUI00017I|\ + Initializing OXM Model Loader + +OXM_READ_ERROR_NONVERBOSE=\ + AAIUI00018I|\ + Unable to Read OXM File + +OXM_LOAD_SUCCESS=\ + AAIUI00019I|\ + OXM file version v{0} loaded successfully + +OXM_PARSE_ERROR_NONVERBOSE=\ + AAIUI00020I|\ + Unable to Parse OXM File + +ETAG_RETRY_SEQ=\ + AAIUI00021D|\ + doEdgeTagQueryWithRetries: attempt number = {0} + +QUERY_AAI_RETRY_SEQ=\ + AAIUI00022D|\ + queryActiveInventory: {0} attempt number = {1} + +QUERY_AAI_RETRY_DONE_SEQ=\ + AAIUI00023D|\ + queryActiveInventory: {0} after = {1} attempt(s). + +QUERY_AAI_RETRY_MAXED_OUT=\ + AAIUI00024I|\ + Failed to queryActiveInventory: {0} after max attempt(s). + +DATA_CACHE_SUCCESS=\ + AAIUI00025D|\ + InMemoryEntityCache cached data with key = {0} + +ATTRIBUTES_UPDATE_METHOD_CALLED=\ + AAIUI00026I|\ + updateObjectAttribute called for : {0} ATTUID : {1} Attributes : {2} + +ATTRIBUTES_HANDLING_EDIT=\ + AAIUI00027I|\ + Handling Edit Attributes: requestUri = {0} Body : {1} + +RESTFULL_OP_COMPLETE=\ + AAIUI00028I|\ + doRestfulOperation() operation for {0} execution time = {1} ms for link = {2}, ResultCode = {3} + +DI_MS_TIME_FOR_DATA_FETCH=\ + AAIUI00029I|\ + TabularService data fetch time: {0} ms. Status: {1}. + +COOKIE_FOUND=\ + AAIUI00030I|\ + attESHr cookie found in the request <{0}> + +INDEX_ALREADY_EXISTS=\ + AAIUI00031I|\ + [{0}] - Index Already Exists + +INDEX_RECREATED=\ + AAIUI00032I|\ + [{0}] - Index successfully re-created + +INDEX_EXISTS=\ + AAIUI00033I|\ + [{0}] - Index exists + +INDEX_INTEGRITY_CHECK_FAILED=\ + AAIUI00034W|\ + [{0}] - Index Integrity check failed, a failure occurred re-creating index. Aborting sync operation. Index Creation error = {1} + +INDEX_NOT_EXIST=\ + AAIUI00035I|\ + [{0}] - Index Does not Exist + +SYNC_INTERNAL_STATE_CHANGED=\ + AAIUI00036I|\ + [{0}] Changing from state = {1} -> {2} caused by {3} + +SYNC_START_TIME=\ + AAIUI00037I|\ + Scheduled synchronization will happen on default time '05:00:00 UTC'. Check value for 'synchronizer.syncTask.startTimestamp' parameter + +SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH=\ + AAIUI00038I|\ + Synchronization did not finish yet. Skipping periodic synchronization at {0} + +SEARCH_ENGINE_SYNC_STARTED=\ + AAIUI00039I|\ + Search Engine synchronization starting at {0} + +FAILED_TO_RESTORE_TXN_FILE_MISSING=\ + AAIUI00040D|\ + Failed to restore txn because {0} does not exist. + +ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY=\ + AAIUI00041W|\ + Caught an exception while building a search response for table query. Error: {0} + +ERROR_BUILDING_SEARCH_RESPONSE=\ + AAIUI00042W|\ + Caught an exception while building a search response. Error: {0} + +WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED=\ + AAIUI00043D|\ + Waiting for all self-link lists to be collected + +ES_SIMPLE_PUT=\ + AAIUI00044I|\ + Element {0} not discovered for merge. Simple put will be used. + +ES_OPERATION_RETURN_CODE=\ + AAIUI00045I|\ + Operation did not return 200, instead returned code : {0} + +ES_CROSS_REF_SYNC_VERSION_CONFLICT=\ + AAIUI00046W|\ + Store document failed during cross reference entity synchronization due to version conflict. Entity will be resynced. + +ES_PKEYVALUE_NULL=\ + AAIUI00047W|\ + getPopulatedDocument() pKeyValue is null for entityType : {1} + +ES_SYNC_CLEAN_UP=\ + AAIUI00048I|\ + ElasticSearchEntityPurger.performCleanup() for indexName : {0} + +ES_SYNC_CLEAN_UP_SIZE=\ + AAIUI00049I|\ + [ {0} ], performCleanup(), Pre-Sync Collection Size : {1} and Post-Sync Collection Size : {2} + +ES_SYNC_SELECTIVE_DELETE=\ + AAIUI00050I|\ + About to perform selective delete with indexName={0}, indexType {1}, numrecords= {2} + +ES_BULK_DELETE=\ + AAIUI00051I|\ + [ {0} ] - Sending bulk delete request with a total of {1} records + +COLLECT_TIME_WITH_SUCCESS=\ + AAIUI00052I|\ + retrieve {0}AllDocumentIdentifiers operation completed in {0} ms successfully + +SYNC_NUMBER_REQ_FETCHES=\ + AAIUI00053D|\ + numRequiredFetches : {0} + +SYNC_NUMBER_REQ_FETCHES=\ + AAIUI00054D|\ + Total fetched {0} of total available {1} + +COLLECT_TOTAL=\ + AAIUI00055I|\ + retrieve {0}: Total returned : {1} + +COLLECT_TOTAL_TIME=\ + AAIUI00056I|\ + retrieve {0}, took = {0} + +ES_SCROLL_CONTEXT_ERROR=\ + AAIUI00057W|\ + Failed to get results from elastic search scroll context. Error cause : {0} + +ES_BULK_DELETE_SKIP=\ + AAIUI00058I|\ + Skipping bulkDelete(); operation because docs to delete list is empty + +ES_BULK_DELETE_START=\ + AAIUI00059I|\ + bulkDelete: about to delete {0} docs + +GEO_SYNC_IGNORING_ENTITY=\ + AAIUI00060I|\ + GeoSynchronizer ignoring an entity of type {0} because of missing / invalid long/lat coordinates. Entity : {1} + +HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING=\ + AAIUI00061I|\ + Historical Entity Count Summarizer starting at {0} + +HISTORICAL_SYNC_PENDING=\ + AAIUI00062I|\ + History Entity Summarizer is already running, skipping request for another doSync + +HISTORICAL_SYNC_TO_BEGIN=\ + AAIUI00063I|\ + Next historical entity summary will begin at {0} + +HISTORICAL_SYNC_DURATION=\ + AAIUI00064I|\ + {0} synchronization took {1} ms. + +DEBUG_GENERIC=\ + AAIUI00065D|\ + {0} + +INFO_GENERIC=\ + AAIUI00066I|\ + {0} + +WARN_GENERIC=\ + AAIUI00067W|\ + {0} + +VALID_REDIRECT_URL=\ + AAIUI00070D|\ + Redirecting to login URL: {0} + +LOGIN_FILTER_INFO=\ + AAIUI00071I|\ + {0} + +LOGIN_FILTER_DEBUG=\ + AAIUI00072D|\ + {0} + +DR_PROCESSING_FAILURE=\ + AAIUI00073I|\ + Failure to resolve proxied request. Response code: {0} for proxy payload: {1} + +DR_PROCESSING_TIME=\ + AAIUI00074I|\ + Time taken to resolve proxied request: {0} ms + +DR_PROXY_FROM_TO=\ + AAIUI00075I|\ + Proxying request from url: {0} to: {1} + + +#-------------------- 300 Series Errors --------------------# + +ETAG_WAIT_INTERRUPTION=\ + AAIUI3001E|\ + doEdgeTagQueryWithRetries: interrupted while sleeping with cause = {0} + +QUERY_AAI_WAIT_INTERRUPTION=\ + AAIUI3002E|\ + queryActiveInventoryWithRetries: interrupted while sleeping with cause = {0} + +EXECUTOR_SERV_EXCEPTION=\ + AAIUI3003E|\ + Thread: {0}. The following exception has occurred: {1} + +ATTRIBUTES_NOT_UPDATED_EXCEPTION=\ + AAIUI3004E|\ + Attributes not updated. {0} + +ATTRIBUTES_NOT_UPDATED_MESSAGE=\ + AAIUI3005E|\ + {0} + +SYNC_NOT_VALID_STATE_DURING_REQUEST=\ + AAIUI3006E|\ + Sync requested while synchronizer not in valid state. Current internal state: {0} + +SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED=\ + AAIUI3007E|\ + SyncController has not been initialized. Synchronization skipped + +ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND=\ + AAIUI3008E|\ + Entity sync failed because entity descriptor could not be located for entityType = {0} + +ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION=\ + AAIUI3009E|\ + Sync Entity Failure caused by error in converting AAI response into an object. + +ENTITY_SYNC_FAILED_QUERY_ERROR=\ + AAIUI30010E|\ + {0} + +ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY=\ + AAIUI30011E|\ + Entity sync failed due to self-link determination ambiguity. Unexpected number of links = {0} + +AGGREGATION_KEY_ERROR=\ + AAIUI30012E|\ + Failed to derive {0} for aggregation by {1} + +INTERRUPTED=\ + AAIUI30013E|\ + Interrupted {0} while waiting for elastic search tasks to be processed with error : {1} + +JSON_PROCESSING_ERROR=\ + AAIUI30014E|\ + Failed to process json with error : {0} + +HISTORICAL_COLLECT_ERROR=\ + AAIUI30015E|\ + Caught an error while collecting results for historical entity summary. Error : {0} + +HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED=\ + AAIUI30016E|\ + HistoricalEntityCountSummaryTask has not been initialized. Synchronization skipped + +OXM_FAILED_RETRIEVAL=\ + AAIUI30017E|\ + Failed to load searchable entities for {0} in OXM file. Synchronizer stopped. + +SELF_LINK_GET_NO_RESPONSE=\ + AAIUI30018E|\ + AAI did not provide a response for self-link: {0} + +ES_BULK_DELETE=\ + AAIUI30019E|\ + [ {0} ] - An error occurred while attempting to perform selective delete to elastic search index with an error cause : {1} + +COLLECT_TIME_WITH_ERROR=\ + AAIUI30020E|\ + retrieve {0} operation completed in {1} ms with some errors + +ES_SEARCHABLE_ENTITY_SYNC_ERROR=\ + AAIUI30021E|\ + {0} + +ES_STORE_FAILURE=\ + AAIUI30022E|\ + There was an error storing the document into elastic search. Error : {0} + +ES_PRE_SYNC_FAILURE=\ + AAIUI30023E|\ + {0} An error occured while collecting the pre-sync object id collection. Error : {1} + +ES_CROSS_REF_SYNC_FAILURE=\ + AAIUI30024E|\ + Store document failed during cross reference entity synchronization with result code {0} and result message {1} + +ES_FAILED_TO_CONSTRUCT_URI=\ + AAIUI30025E|\ + Failed to construct an elastic search uri during re-sync, with error : {0} + +ES_RETRIEVAL_FAILED_RESYNC=\ + AAIUI30026E|\ + Elasticsearch retrieval failed for re-sync. Error : {0} + +ES_CROSS_ENTITY_RESYNC_LIMIT=\ + AAIUI30027E|\ + Cross entity re-sync limit reached for {0}, re-sync will no longer be attempted for this entity + +ES_CROSS_ENTITY_REF_PUT=\ + AAIUI30028E|\ + Cross entity reference sync UPDATE PUT error: {0} + +ES_ABORT_CROSS_ENTITY_REF_SYNC=\ + AAIUI30029E|\ + Error extracting {0} from response, aborting cross entity ref sync of {1}. Error : {2} + +MISSING_ENTITY_DESCRIPTOR=\ + AAIUI30030E + Missing entity descriptor for type : {0} + +SELF_LINK_GET=\ + AAIUI30031E|\ + Failure during self link GET. Error : {0} + +SELF_LINK_CROSS_REF_SYNC=\ + AAIUI30032E|\ + Self link GET has returned null during cross entity reference sync + +ES_FAILED_TO_CONSTRUCT_QUERY=\ + AAIUI30033E|\ + Failed to construct an elastic search uri with error : {0} + +ES_RETRIEVAL_FAILED=\ + AAIUI30034E|\ + Elasticsearch retrieval failed. Error : {0} + +ES_LINK_UPSERT=\ + AAIUI30035E|\ + Error creating link for upsert. Error : {0} + +ERROR_GENERIC=\ + AAIUI30036E|\ + {0} + +ERROR_PROCESSING_REQUEST=\ + AAIUI30037E\ + Failure to process request with error: {1} + +ERROR_CSP_CONFIG_FILE=\ + AAIUI30038E|\ + Failed to load CSP filter configuration properties + +ERROR_SHUTDOWN_EXECUTORS=\ + AAIUI30039E|\ + Failure during shutdown of executors. Error : {0} + +ERROR_LOADING_OXM=\ + AAIUI30040E|\ + Failed to load searchable entities in OXM file. Synchronizer stopped. + +ERROR_GETTING_DATA_FROM_AAI=\ + AAIUI30041E|\ + An error occurred getting data from AAI. Error : {0} + +SOT_FILE_NOT_FOUND=\ + AAIUI30042E|\ + Error in reading source-of-truth configuration + +INVALID_REQUEST_PARAMS=\ + AAIUI30043E|\ + Invalid request parameters + +PEGGING_ERROR=\ + AAIUI30044E|\ + Pegging UNKNOWN_EXCEPTION due to unexpected exception = {0} + +ATTRIBUTES_ERROR_LOADING_MODEL_VERSION=\ + AAIUI30045E|\ + Model Version Error. {0} Not Found or not loaded successfully. + +INVALID_REQUEST=\ + AAIUI30046E|\ + {0} + +INVALID_URL_VERBOSE=\ + AAIUI30047E|\ + Invalid URL: {0}. Reason: {1}. + +DI_DATA_NOT_FOUND_NONVERBOSE=\ + AAIUI30048E|\ + No data integrity data found for rowID: {0}. + +DI_DATA_NOT_FOUND_VERBOSE=\ + AAIUI30049E|\ + No data integrity data found for rowID: {0} after {1} attempts. + +OXM_FILE_NOT_FOUND=\ + AAIUI30050E|\ + Unable to find any OXM file. + +OXM_READ_ERROR_VERBOSE=\ + AAIUI30051E|\ + Unable to read OXM file: {0} + +ERROR_PARSING_JSON_PAYLOAD_NONVERBOSE=\ + AAIUI30052E|\ + Error in parsing JSON payload for {0} + +ERROR_PARSING_JSON_PAYLOAD_VERBOSE=\ + AAIUI30053E|\ + Error in parsing JSON payload: {0} + +ERROR_FETCHING_JSON_VALUE=\ + AAIUI30054E|\ + Error in getting value for key: {0}. Data: {1} + +OXM_READ_PARSE_VERBOSE=\ + AAIUI30055E|\ + Unable to parse OXM file: {0}. The following exception has occurred: {1} + +OXM_PROP_DEF_ERR_CROSS_ENTITY_REF=\ + AAIUI30056E|\ + Invalid OXM definition of xml-property 'crossEntityReference' for entity : {0} with a value of : {1} + +SYNC_INVALID_CONFIG_PARAM=\ + AAIUI30057E|\ + {0} + +ERROR_PARSING_PARAMS=\ + AAIUI30058E|\ + Error parsing parameters. Error: {0} + +ERROR_SORTING_VIOLATION_DATA=\ + AAIUI30059E|\ + Error in sorting violation data based on key: {0} + +CONFIGURATION_ERROR=\ + AAIUI30060E|\ + Failed to load {0} configurations + +QUERY_AAI_RETRY_FAILURE_WITH_SEQ=\ + AAIUI30062E|\ + Failed to queryActiveInventory {0} attempt number = {1} + +DISK_CACHE_READ_IO_ERROR=\ + AAIUI30063E|\ + Failed to read from disk cache. Exception: {0} + +DISK_CREATE_DIR_IO_ERROR=\ + AAIUI30064E|\ + Failed to create directory in disk. Exception: {0} + +DISK_DATA_WRITE_IO_ERROR=\ + AAIUI30065E|\ + Failed to persist data in disk. Exception: {0} + +DISK_NAMED_DATA_WRITE_IO_ERROR=\ + AAIUI30066E|\ + Failed to persist data for {0} in disk. Exception: {1} + +DISK_NAMED_DATA_READ_IO_ERROR=\ + AAIUI30067E|\ + Failed to retrieve data for {0} from disk. Exception: {1} + +OFFLINE_STORAGE_PATH_ERROR=\ + AAIUI30068E|\ + Error in determining offline storage path for link: {0}. Exception: {1} + +RESTFULL_OP_ERROR_VERBOSE=\ + AAIUI30069E|\ + Error retrieving link: {0} from restful endpoint due to error: {1} + +ATTRIBUTES_ERROR_GETTING_AAI_CONFIG_OR_ADAPTER=\ + AAIUI30070E|\ + Error in getting AAI configuration or Adaptor: {0} + +USER_AUTHORIZATION_FILE_UNAVAILABLE=\ + AAIUI30071E|\ + User authorization file unavailable. User {0} cannot be authorized. + +COOKIE_NOT_FOUND=\ + AAIUI30072E|\ + No cookies found in the request + +CONFIG_NOT_FOUND_VERBOSE=\ + AAIUI30073E|\ + Error in loading configuration from file: {0}. Cause: {1} + +FILE_NOT_FOUND=\ + AAIUI30074E|\ + Failed to find file: {0} + +ATTRIBUTES_USER_NOT_AUTHORIZED_TO_UPDATE=\ + AAIUI30075E|\ + User {0} is not authorized for Attributes update + +SELF_LINK_NULL_EMPTY_RESPONSE=\ + AAIUI30076E|\ + AIN - Failed to process null or empty pathed self link response + +SELF_LINK_RELATIONSHIP_LIST_ERROR=\ + AAIUI30077E|\ + AIN - Caught an error processing the self-link relationship-list: {0} + +SEARCH_SERVLET_ERROR=\ + AAIUI30078E|\ + Search Servlet Error: {0} + +SEARCH_RESPONSE_BUILDING_EXCEPTION=\ + AAIUI30079E|\ + Caught an exception while building a search response. Error: {0} + +SEARCH_TAG_ANNOTATION_ERROR=\ + AAIUI30080E|\ + An error occurred annotating search tags. Search tags: {0} Error: {1} + +QUERY_FAILED_UNHANDLED_APP_TYPE=\ + AAIUI30081E|\ + Do-Query failed because of an unhandled application type: {0} + +ENTITY_NOT_FOUND_IN_OXM=\ + AAIUI30082E|\ + No {0} descriptors found in OXM file + +JSON_CONVERSION_ERROR=\ + AAIUI30083E|\ + An error occurred while converting JSON into {0}. Error: {1} + +ERROR_LOADING_OXM_SEARCHABLE_ENTITIES=\ + AAIUI30084E|\ + Failed to load searchable entities in OXM file. Synchronizer stopped. + +AAI_RETRIEVAL_FAILED_GENERIC=\ + AAIUI30085E|\ + Retrieving data from AAI failed with error = {0} + +AAI_RETRIEVAL_FAILED_FOR_SELF_LINK=\ + AAIUI30086E|\ + Failed to get result from AAI for link = {0} + +FAILED_TO_REGISTER_DUE_TO_NULL=\ + AAIUI30087E|\ + {0} + +FAILED_TO_ADD_SKELETON_NODE=\ + AAIUI30088E|\ + Failed to add skeleton node: {0} + +FAILED_TO_PROCESS_SKELETON_NODE=\ + AAIUI30089E|\ + Failed to process skeleton node: {0} + +INVALID_RESOLVE_STATE_DURING_INIT=\ + AAIUI30090E|\ + An error has occurred because Node in INIT state should not already have its self link resolved + +FAILED_TO_PROCESS_INITIAL_STATE=\ + AAIUI30091E|\ + Failed to process initial state: {0} + +SKIPPING_RELATIONSHIP=\ + AAIUI30092E|\ + Skipping relationship because failed to generate nodeId for relationship, {0} + +FAILED_TO_DETERMINE_NODE_ID=\ + AAIUI30093E|\ + Failed to determine node id: {0} + +EXTRACTION_ERROR=\ + AAIUI30094E|\ + Extraction failed: {0} + +SELF_LINK_NODE_PARSE_ERROR=\ + AAIUI30095E|\ + Self link node parsing error: {0} + +SELF_LINK_RETRIEVAL_FAILED=\ + AAIUI30096E|\ + Complex Entity Self link retrieval for link = {0} failed with error code = {1} and message = {2} + +SELF_LINK_DETERMINATION_FAILED_GENERIC=\ + AAIUI30097E|\ + Self link determination failed for entity with link = {0} + +SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS=\ + AAIUI30098E|\ + Self link determination failed with an ambiguous result with an unexpected number of links = {0} + +ROOT_NODE_DISCOVERED=\ + AAIUI30099E|\ + Root node discovered for search target node ID = {0} + +SELF_LINK_PROCESS_NEIGHBORS_ERROR=\ + AAIUI300100E|\ + Self link node process neighbors error: {0} + +SELF_LINK_JSON_PARSE_ERROR=\ + AAIUI300101E|\ + Self link JSON parsing error: {0} + +SELF_LINK_PROCESSING_ERROR=\ + AAIUI300102E|\ + Self link processing error: {0} + +UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE=\ + AAIUI300103E|\ + Error: Unhandled object type for entityType, {0}, which is not an array + +ATTRIBUTE_GROUP_FAILURE=\ + AAIUI300104E|\ + Failure to process attribute group field, fields is null for attribute group {0} + +EXCEPTION_CAUGHT=\ + AAIUI300105E|\ + Exception caught. {0} Exception: {1} + +ERROR_EXTRACTING_FROM_RESPONSE=\ + AAIUI300106E|\ + {0} + +PROCESSING_LOOP_INTERUPTED=\ + AAIUI300107E|\ + Processing loop interrupted: {0} + +IGNORING_SKELETON_NODE=\ + AAIUI300108E|\ + Ignoring skeleton node with unique ID, {0}, because of processing error + +VISUALIZATION_OUTPUT_ERROR=\ + AAIUI300109E|\ + An error occurred while preparing D3 visualization output: {0} + +FAILED_TO_PROCESS_NODE_INTEGRITY=\ + AAIUI300110E|\ + Failed to process node integrity: {0} + +FAILURE_TO_PROCESS_REQUEST=\ + AAIUI300111E\ + Failure to process request. {0} + +FAILED_TO_DETERMINE=\ + AAIUI300112E\ + Failed to determine {0} + +FAILED_TO_ANALYZE=\ + AAIUI300113E|\ + Failed to analyze {0} + +FAILED_TO_GET_NODES_QUERY_RESULT=\ + AAIUI300114E|\ + Failed to get nodes-query result from AAI with error {0} + +UNEXPECTED_NUMBER_OF_LINKS=\ + AAIUI300115E|\ + Unexpected number of links found. Expected {0}, but found {1} + +ITEM_TYPE_NULL=\ + AAIUI300116E|\ + Item type null for node, {0} + +UNEXPECTED_TOKEN_COUNT=\ + AAIUI300117E|\ + Unexpected number of tokens returned from splitting typeAndField by period delimiter. Field value: {0} + +ADD_SEARCH_TARGET_ATTRIBUTES_FAILED=\ + AAIUI300118E|\ + Add SearchTargetAttributes failure: {0} + +NODE_INTEGRITY_OVERLAY_ERROR=\ + AAIUI300119E|\ + Error processing node integrity overlay: {0} + +ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES=\ + AAIUI300120E|\ + Failed to load suggestible entities in OXM file. Synchronizer stopped. + +ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR=\ + AAIUI300121E|\ + {0} + +ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR=\ + AAIUI300122E|\ + {0} + +ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED=\ + AAIUI300123E|\ + {0} + +UNSUPPORTED_URL_ENCODING=\ + AAIUI300124E|\ + Unsupported URL encoding: {0} + +INVALID_REDIRECT_URL=\ + AAIUI300125E|\ + Cannot redirect to invalid URL: {0} + +ERROR_REMOVING_URL_PARAM=\ + AAIUI300127E|\ + Failed to remove query param from URL: {0} + +ERROR_INVALID_HASH=\ + AAIUI300128E|\ + Invalid hash value: {0} + +ERROR_HASH_NOT_FOUND=\ + AAIUI300129E|\ + Could not find hash value. + +ERROR_READING_HTTP_REQ_PARAMS=\ + AAIUI300130E|\ + Could not read HTTP header parameters. + +ERROR_D3_GRAPH_VISUALIZATION=\ + AAIUI300129E|\ + Failed to generate D3 graph visualization, due to a servlet exception with a cause: {0} + +ERROR_AAI_QUERY_WITH_RETRY=\ + AAIUI300130E|\ + Querying AAI with retry failed due to exception: {0} + +ERROR_READING_JSON_SCHEMA=\ + AAIUI300131E|\ + Error reading JSON schema from: {0} + +VIEW_NAME_NOT_SUPPORTED=\ + AAIUI300132E|\ + View name not supported: {0} + +ERROR_FETCHING_FILTER_VALUES=\ + AAIUI300133E|\ + Result {0} while fetching filter values for filter {1}. + +ERROR_PROCESSING_WIDGET_REQUEST=\ + AAIUI300134E|\ + Error processing for query: {0} in view: {1} + + +ERROR_FILTERS_NOT_FOUND=\ + AAIUI300135E|\ + No filters were provided as part of request. + +DR_REQUEST_URI_FOR_PROXY_UNKNOWN=\ + AAIUI300136E|\ + Failure to generate routing service URL from: {0} + +OXM_LOADING_ERROR=\ + AAIUI300137E|\ + OXM loading error. Reason: {0} + +URI_DECODING_EXCEPTION=\ + AAIUI300138E|\ + Error decoding exception. {0} + +ENCRYPTION_ERROR=\ + AAIUI300139E|\ + Encryption error for value: {0}. Cause: {1} + +DECRYPTION_ERROR=\ + AAIUI300140E|\ + Decryption error for encrypted value: {0}. Cause: {1} + +RESOURCE_NOT_FOUND=\ + AAIUI300141E|\ + Unsupported request. Resource not found: {0} + +#-------------------- 900 Series Errors --------------------# + +UNKNOWN_SERVER_ERROR=\ + AAIUI9001E|\ + Unknown Server Error: {0} + +SEARCH_ADAPTER_ERROR=\ + AAIUI9002E|\ + Search Adapter Error: {0} + +QUERY_PARAM_EXTRACTION_ERROR=\ + AAIUI9003E|\ + Query Parameter Self-Link Extraction Error: {0} + +ERROR_EXTRACTING_RESOURCE_PATH_FROM_LINK=\ + AAIUI9004E|\ + Error extracting resource path from self-link. Error: {0} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/AveragingRingBufferTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/AveragingRingBufferTest.java new file mode 100644 index 0000000..f64a0f5 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/AveragingRingBufferTest.java @@ -0,0 +1,134 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.analytics; + +import static org.junit.Assert.assertEquals; + +import java.security.SecureRandom; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.analytics.AveragingRingBuffer; + +/** + * The Class AveragingRingBufferTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class AveragingRingBufferTest { + + protected SecureRandom random = new SecureRandom(); + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + // nothing at the moment + } + + /** + * Validate pre index roll averaging. + */ + @Test + public void validatePreIndexRollAveraging() { + + AveragingRingBuffer arb = new AveragingRingBuffer(5); + assertEquals(0, arb.getAvg()); + + /* + * On initial buffer fill, the average will be re-calculated on the fly for the first nth data + * points until the data buffer has been filled the first time, and then the buffer + * automatically recalculates the average every time the buffer index rolls over, to the keep + * the average relative to the last "nth" data points. + */ + + // [ 1, 0, 0, 0, 0 ], sum = 1, avg = 1/1 =1 + arb.addSample(1); + assertEquals(1, arb.getAvg()); + + // [ 1, 2, 0, 0, 0 ], sum = 3, avg = 3/2 = 1 + arb.addSample(2); + assertEquals(1, arb.getAvg()); + + // [ 1, 2, 3, 0, 0 ], sum = 6, avg = 6/3 = 2 + arb.addSample(3); + assertEquals(2, arb.getAvg()); + + // [ 1, 2, 3, 4, 0 ], sum = 10, avg = 10/4 = 2 + arb.addSample(4); + assertEquals(2, arb.getAvg()); + + // [ 1, 2, 3, 4, 5 ], sum = 15, avg = 15/5 = 3 + arb.addSample(5); + assertEquals(3, arb.getAvg()); + + } + + /** + * Validate post index roll averaging. + */ + @Test + public void validatePostIndexRollAveraging() { + + AveragingRingBuffer arb = new AveragingRingBuffer(5); + arb.addSample(1); + arb.addSample(2); + arb.addSample(3); + arb.addSample(4); + arb.addSample(5); + + /* + * The behavior switches, and now doesn't re-calculate the average until each nth data point, to + * reduce the computational over-head of re-calculating on each value. + */ + + // [ 10, 2, 3, 4, 5 ], + arb.addSample(10); + assertEquals(3, arb.getAvg()); + + // [ 10, 20, 3, 4, 5 ], + arb.addSample(20); + assertEquals(3, arb.getAvg()); + + // [ 10, 20, 30, 4, 5 ], + arb.addSample(30); + assertEquals(3, arb.getAvg()); + + // [ 10, 20, 30, 40, 5 ], + arb.addSample(40); + assertEquals(3, arb.getAvg()); + + // [ 10, 20, 30, 40, 50 ], s=150, avg=150/5=30 + arb.addSample(50); + assertEquals(30, arb.getAvg()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistogramSamplerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistogramSamplerTest.java new file mode 100644 index 0000000..bba52a7 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistogramSamplerTest.java @@ -0,0 +1,91 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.analytics; + +import java.security.SecureRandom; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.analytics.HistogramSampler; + +/** + * The Class HistogramSamplerTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class HistogramSamplerTest { + + protected SecureRandom random = new SecureRandom(); + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + // nothing at the moment + } + + /** + * Validate basic construction and delimited reporting. + */ + @Test + public void validateBasicConstructionAndDelimitedReporting() { + + HistogramSampler histoSampler = new HistogramSampler("[File byte size]", 500000, 22, 3); + + SecureRandom random = new SecureRandom(); + + for (int x = 0; x < 100000; x++) { + histoSampler.track(random.nextInt(9999999)); + } + + System.out.println(histoSampler.getStats(false, " ")); + + } + + + /** + * Validate basic construction and formatted reporting. + */ + @Test + public void validateBasicConstructionAndFormattedReporting() { + + HistogramSampler histoSampler = new HistogramSampler("[Queue Length Samples]", 100000, 15, 3); + + SecureRandom random = new SecureRandom(); + + for (int x = 0; x < 100000; x++) { + histoSampler.track(random.nextInt(9999999)); + } + + System.out.println(histoSampler.getStats(true, " ")); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistoricalCounterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistoricalCounterTest.java new file mode 100644 index 0000000..21899a9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/HistoricalCounterTest.java @@ -0,0 +1,70 @@ +package org.onap.aai.sparky.analytics; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.analytics.HistoricalCounter; + +public class HistoricalCounterTest { + + + private HistoricalCounter historicalCounter; + private HistoricalCounter historicalCount; + @Before + public void init() throws Exception { + historicalCounter = new HistoricalCounter(true); + historicalCount = new HistoricalCounter(false); + } + + @Test + public void successfullInitialization() { + assertEquals(-1, historicalCounter.getMin(),0); + assertEquals(0, historicalCounter.getMax(),0); + assertEquals(0, historicalCounter.getNumSamples(),0); + assertEquals(0, historicalCounter.getNumSamples(),0); + assertEquals(0.0, historicalCounter.getValue(),0); + assertEquals(0, historicalCounter.getAvg(),0); + assertTrue(historicalCounter.isSingleValue()); + + } + + @Test + public void updateValuesAndReset() { + historicalCounter.update(-1); + assertEquals(0, historicalCounter.getValue(),0); + historicalCounter.update(10); + assertEquals(10, historicalCounter.getValue(),0); + historicalCounter.reset(); + assertEquals(-1, historicalCounter.getMin(),0); + assertEquals(0, historicalCounter.getMax(),0); + assertEquals(0, historicalCounter.getNumSamples(),0); + assertEquals(0, historicalCounter.getNumSamples(),0); + assertEquals(0.0, historicalCounter.getValue(),0); + + } + + @Test + public void updateValues() { + historicalCount.update(2); + assertEquals(2, historicalCount.getMin(),0); + historicalCount.setMin(10); + historicalCount.update(3); + assertEquals(3, historicalCount.getMin(),0); + historicalCount.setMax(1); + historicalCount.update(4); + assertEquals(4, historicalCount.getMax(),0); + historicalCount.setTotalOfSamples(10); + historicalCount.setNumSamples(2); + assertEquals(5, historicalCount.getAvg(),0); + historicalCount.setTotalOfSamples(10); + assertEquals(10, historicalCount.getTotalOfSamples(),0); + historicalCount.setMaintainSingleValue(true); + assertTrue(historicalCounter.isSingleValue()); + + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/TransactionRateControllerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/TransactionRateControllerTest.java new file mode 100644 index 0000000..881c9ab --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/analytics/TransactionRateControllerTest.java @@ -0,0 +1,217 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.analytics; + +import org.junit.Before; + + +/** + * The Class TransactionRateControllerTest. + */ +public class TransactionRateControllerTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + // nothing at the moment + } + /* + * @Test public void tenTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() { + * + * TransactionRateController trc = new TransactionRateController(10.0, 1, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(54, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void tenTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(10.0, 1, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(0, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void oneTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() { + * + * TransactionRateController trc = new TransactionRateController(1.0, 1, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(954, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void oneTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(1.0, 1, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(885, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void halfTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() { + * + * TransactionRateController trc = new TransactionRateController(0.5, 1, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(1954, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void halfTPS_oneThread_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(0.5, 1, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(1885, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void tenTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() { + * + * TransactionRateController trc = new TransactionRateController(10.0, 10, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(540, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void tenTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(10.0, 10, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(0, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void oneTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() { + * + * TransactionRateController trc = new TransactionRateController(1.0, 10, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(9540, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void oneTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(1.0, 10, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(8850, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void halfTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsUnderBudget() + * { + * + * TransactionRateController trc = new TransactionRateController(0.5, 10, 5); + * + * trc.trackResponseTime(25); trc.trackResponseTime(35); trc.trackResponseTime(45); + * trc.trackResponseTime(55); trc.trackResponseTime(70); + * + * // avg should be 46 ms + * + * assertEquals(19540, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void halfTPS_tenThreads_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(0.5, 10, 5); + * + * trc.trackResponseTime(75); trc.trackResponseTime(125); trc.trackResponseTime(250); + * trc.trackResponseTime(105); trc.trackResponseTime(23); + * + * // avg should be 115 ms + * + * assertEquals(18850, trc.getFixedDelayInMs()); + * + * } + * + * @Test public void oneTPS_fiveThreads_validateRateEnforcementWhenAvgResposneTimeIsOverBudget() { + * + * TransactionRateController trc = new TransactionRateController(1, 5, 5); + * + * trc.trackResponseTime(0); trc.trackResponseTime(0); trc.trackResponseTime(0); + * trc.trackResponseTime(0); trc.trackResponseTime(0); + * + * // avg should be 0 ms + * + * assertEquals(5000, trc.getFixedDelayInMs()); + * + * } + */ + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizerTest.java new file mode 100644 index 0000000..dd6f1c7 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/autosuggestion/sync/AutosuggestionSynchronizerTest.java @@ -0,0 +1,387 @@ +package org.onap.aai.sparky.autosuggestion.sync; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Matchers; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.config.oxm.OxmModelProcessor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig; +import org.onap.aai.sparky.search.filters.config.FiltersForViewsConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.util.TestResourceLoader; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class AutosuggestionSynchronizerTest { + + private static ObjectMapper mapper = new ObjectMapper(); + + private AutosuggestionSynchronizer suggestionSynchronizer; + + private ElasticSearchSchemaConfig esSchemaConfig; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + private OxmEntityLookup oxmEntityLookup; + private SuggestionEntityLookup suggestionEntityLookup; + private ElasticSearchAdapter esAdapter; + private ActiveInventoryAdapter aaiAdapter; + + + private FiltersConfig filtersConfig; + + + + @Before + public void init() throws Exception { + + esSchemaConfig = new ElasticSearchSchemaConfig(); + esSchemaConfig.setIndexDocType("default"); + esSchemaConfig.setIndexMappingsFileName(null); + esSchemaConfig.setIndexName("aggregation-index-name"); + esSchemaConfig.setIndexSettingsFileName(null); + + + aaiStatConfig = new NetworkStatisticsConfig(); + + aaiStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + aaiStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + aaiStatConfig.setBytesHistogramMaxYAxis(1000000L); + aaiStatConfig.setBytesHistogramNumBins(20); + aaiStatConfig.setBytesHistogramNumDecimalPoints(2); + + aaiStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + aaiStatConfig.setQueueLengthHistogramMaxYAxis(20000); + aaiStatConfig.setQueueLengthHistogramNumBins(20); + aaiStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + aaiStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + aaiStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + aaiStatConfig.setTaskAgeHistogramNumBins(20); + aaiStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + aaiStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + aaiStatConfig.setResponseTimeHistogramMaxYAxis(1000L); + aaiStatConfig.setResponseTimeHistogramNumBins(20); + aaiStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + aaiStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + aaiStatConfig.setTpsHistogramMaxYAxis(100); + aaiStatConfig.setTpsHistogramNumBins(20); + aaiStatConfig.setTpsHistogramNumDecimalPoints(2); + + esStatConfig = new NetworkStatisticsConfig(); + + esStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + esStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + esStatConfig.setBytesHistogramMaxYAxis(1000000L); + esStatConfig.setBytesHistogramNumBins(20); + esStatConfig.setBytesHistogramNumDecimalPoints(2); + + esStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + esStatConfig.setQueueLengthHistogramMaxYAxis(20000); + esStatConfig.setQueueLengthHistogramNumBins(20); + esStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + esStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + esStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + esStatConfig.setTaskAgeHistogramNumBins(20); + esStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + esStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + esStatConfig.setResponseTimeHistogramMaxYAxis(10000L); + esStatConfig.setResponseTimeHistogramNumBins(20); + esStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + esStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + esStatConfig.setTpsHistogramMaxYAxis(100); + esStatConfig.setTpsHistogramNumBins(20); + esStatConfig.setTpsHistogramNumDecimalPoints(2); + + oxmEntityLookup = new OxmEntityLookup(); + + esAdapter = Mockito.mock(ElasticSearchAdapter.class); + aaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + + + Set<OxmModelProcessor> processors = new HashSet<OxmModelProcessor>(); + + processors.add(oxmEntityLookup); + + + + Map<String, OxmEntityDescriptor> oxmEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + OxmEntityDescriptor genericVnfDescriptor = new OxmEntityDescriptor(); + genericVnfDescriptor.setEntityName("generic-vnf"); + List<String> pkeyNames = new ArrayList<String>(); + pkeyNames.add("vnf-name"); + + genericVnfDescriptor.setPrimaryKeyAttributeNames(pkeyNames); + + oxmEntityDescriptors.put("generic-vnf", genericVnfDescriptor); + + + oxmEntityLookup.setEntityDescriptors(oxmEntityDescriptors); + + + Map<String, SuggestionEntityDescriptor> suggestionEntityDescriptors = + new HashMap<String, SuggestionEntityDescriptor>(); + + SuggestionEntityDescriptor genericVnfSuggestionDescriptor = new SuggestionEntityDescriptor(); + genericVnfSuggestionDescriptor.setEntityName("generic-vnf"); + genericVnfSuggestionDescriptor.setPrimaryKeyAttributeNames(pkeyNames); + + filtersConfig = new FiltersConfig(null, null, null); + + FiltersDetailsConfig filtersDetailsConfig = mapper.readValue( + TestResourceLoader.getTestResourceDataJson("/filters/aaiui_filters_testConfig.json"), + FiltersDetailsConfig.class); + FiltersForViewsConfig filtersForViewsConfig = mapper.readValue( + TestResourceLoader.getTestResourceDataJson("/filters/aaiui_views_testConfig.json"), + FiltersForViewsConfig.class); + + filtersConfig.setFiltersConfig(filtersDetailsConfig); + filtersConfig.setViewsConfig(filtersForViewsConfig); + + /* + * SuggestionSearchEntity sse = new SuggestionSearchEntity(filtersConfig); + * + * sse.setEntityType("generic-vnf"); sse.setSuggestionPropertyTypes( Arrays.asList("vnf-name")); + * + * genericVnfSuggestionDescriptor.setSuggestionSearchEntity(sse); + * + * suggestionEntityDescriptors.put("generic-vnf", genericVnfSuggestionDescriptor); + */ + + suggestionEntityLookup = new SuggestionEntityLookup(filtersConfig); + + processors.add(suggestionEntityLookup); + + OxmModelLoader oxmModelLoader = new OxmModelLoader(-1, processors); + oxmModelLoader.loadLatestOxmModel(); + + // suggestionEntityLookup.setSuggestionSearchEntityDescriptors(suggestionEntityDescriptors); + } + + @Test + public void validateBasicConstruction() throws Exception { + + suggestionSynchronizer = new AutosuggestionSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup, suggestionEntityLookup, filtersConfig); + + suggestionSynchronizer.setAaiAdapter(aaiAdapter); + suggestionSynchronizer.setElasticSearchAdapter(esAdapter); + + assertNotNull(suggestionSynchronizer.getAaiAdapter()); + assertNotNull(suggestionSynchronizer.getElasticSearchAdapter()); + + } + + @Test + public void validateSmallSync() throws Exception { + + suggestionSynchronizer = new AutosuggestionSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup, suggestionEntityLookup, filtersConfig); + + + suggestionSynchronizer.setAaiAdapter(aaiAdapter); + suggestionSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_generic-vnf_nodesQuery_response.json"); + + OperationResult genericVnfSelfLinks = new OperationResult(); + + genericVnfSelfLinks.setResultCode(200); + genericVnfSelfLinks.setResult(nodesQueryResponse); + + Mockito.when(aaiAdapter.getSelfLinksByEntityType("generic-vnf")) + .thenReturn(genericVnfSelfLinks); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-1"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-1"); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-2"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-2"); + + Mockito + .when( + aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-3"), Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-3"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-vnf-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/generic-vnf-generic-vnf-1_full_depth.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-vnf-2"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/generic-vnf-generic-vnf-2_full_depth.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("generic-vnf-3"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/generic-vnf-generic-vnf-3_full_depth.json"))); + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())) + .thenReturn("http://localhost:9200/myindex/mytype/doc1", + "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + OperationState syncState = suggestionSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertNotNull(suggestionSynchronizer.getStatReport(false)); + assertNotNull(suggestionSynchronizer.getStatReport(true)); + + suggestionSynchronizer.clearCache(); + suggestionSynchronizer.shutdown(); + + + } + + @Test + public void validateSmallSyncWithRetries() throws Exception { + + suggestionSynchronizer = new AutosuggestionSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup, suggestionEntityLookup, filtersConfig); + + + suggestionSynchronizer.setAaiAdapter(aaiAdapter); + suggestionSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_generic-vnf_nodesQuery_response.json"); + + OperationResult genericVnfSelfLinks = new OperationResult(); + + genericVnfSelfLinks.setResultCode(200); + genericVnfSelfLinks.setResult(nodesQueryResponse); + + Mockito.when(aaiAdapter.getSelfLinksByEntityType("generic-vnf")) + .thenReturn(genericVnfSelfLinks); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-1"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-1"); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-2"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-2"); + + Mockito + .when( + aaiAdapter.repairSelfLink(Matchers.contains("generic-vnf-3"), Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-3"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-vnf-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/generic-vnf-generic-vnf-1_full_depth.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-vnf-2"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/generic-vnf-generic-vnf-2_full_depth.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("generic-vnf-3"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/generic-vnf-generic-vnf-3_full_depth.json"))); + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())) + .thenReturn("http://localhost:9200/myindex/mytype/doc1", + "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + + + /* + * Elastic Search puts always fail with a version conflict = 409 + */ + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(409, null)); + + OperationState syncState = suggestionSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertNotNull(suggestionSynchronizer.getStatReport(false)); + assertNotNull(suggestionSynchronizer.getStatReport(true)); + + suggestionSynchronizer.clearCache(); + suggestionSynchronizer.shutdown(); + + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/common/search/CommonSearchSuggestionTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/common/search/CommonSearchSuggestionTest.java new file mode 100644 index 0000000..abe31a3 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/common/search/CommonSearchSuggestionTest.java @@ -0,0 +1,55 @@ +package org.onap.aai.sparky.common.search; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.search.filters.entity.UiFilterValueEntity; + + +public class CommonSearchSuggestionTest { + + private CommonSearchSuggestion commonSearchSuggestions; + private CommonSearchSuggestion commonSearchSuggestion; + private ArrayList<UiFilterValueEntity> filterValues; + + @Before + public void init() throws Exception { + + filterValues = new ArrayList<UiFilterValueEntity>(); + commonSearchSuggestions = new CommonSearchSuggestion("627dfa28b","schema","generic-vnf called sdwanfwdemo_vnf1_under_fw-si1","VNF-Stack",filterValues); + commonSearchSuggestion = new CommonSearchSuggestion("627dfa28b","schema","generic-vnf called sdwanfwdemo_vnf1_under_fw-si1","VNF-Stack",null); + + } + + + @Test + public void updateValues() { + + commonSearchSuggestions.setHashId("4a83c197ffa19ec4a1"); + assertNotNull(commonSearchSuggestions.getHashId()); + commonSearchSuggestions.setRoute("structuredView"); + assertNotNull(commonSearchSuggestions.getRoute()); + commonSearchSuggestions.setText("generic-vnf called fp_vnf1_under_fw-si1 called fp_vnf1_under_fw-si1 -- Schema"); + assertNotNull(commonSearchSuggestions.getText()); + assertNotNull(commonSearchSuggestions.getFilterValues()); + assertNotNull(commonSearchSuggestions.toString()); + + + commonSearchSuggestion.setHashId(null); + assertNull(commonSearchSuggestion.getHashId()); + commonSearchSuggestion.setRoute(null); + assertNull(commonSearchSuggestion.getRoute()); + commonSearchSuggestion.setText(null); + assertNull(commonSearchSuggestion.getText()); + assertNull(commonSearchSuggestion.getFilterValues()); + assertNotNull(commonSearchSuggestion.toString()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptorTest.java new file mode 100644 index 0000000..0b506df --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceDescriptorTest.java @@ -0,0 +1,34 @@ +package org.onap.aai.sparky.config.oxm; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class CrossEntityReferenceDescriptorTest { + + private CrossEntityReferenceDescriptor crossEntityReferenceDescriptor; + private CrossEntityReference crossEntityReference; + + @Before + public void init() throws Exception { + + crossEntityReference = new CrossEntityReference(); + crossEntityReferenceDescriptor = new CrossEntityReferenceDescriptor(); + + } + + @Test + public void updateValues() { + + crossEntityReferenceDescriptor.setCrossEntityReference(crossEntityReference); + assertNotNull(crossEntityReferenceDescriptor.getCrossEntityReference()); + crossEntityReferenceDescriptor.hasCrossEntityReferences(); + assertFalse(crossEntityReferenceDescriptor.hasCrossEntityReferences()); + assertNotNull(crossEntityReferenceDescriptor.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceTest.java new file mode 100644 index 0000000..c403213 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/CrossEntityReferenceTest.java @@ -0,0 +1,36 @@ +package org.onap.aai.sparky.config.oxm; + +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class CrossEntityReferenceTest { + + private CrossEntityReference crossEntityReference; + private ArrayList<String> refAttribute; + + @Before + public void init() throws Exception { + + crossEntityReference = new CrossEntityReference(); + refAttribute = new ArrayList<String>(); + + } + + @Test + public void updateValues() { + + crossEntityReference.setTargetEntityType(""); + assertNotNull(crossEntityReference.getTargetEntityType()); + crossEntityReference.setReferenceAttributes(refAttribute); + assertNotNull(crossEntityReference.getReferenceAttributes()); + crossEntityReference.addReferenceAttribute(""); + assertNotNull(crossEntityReference.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptorTest.java new file mode 100644 index 0000000..dffda5c --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoEntityDescriptorTest.java @@ -0,0 +1,37 @@ +package org.onap.aai.sparky.config.oxm; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + + +import org.junit.Before; +import org.junit.Test; + +public class GeoEntityDescriptorTest { + + private GeoEntityDescriptor geoEntityDescriptor; + + @Before + public void init() throws Exception { + geoEntityDescriptor = new GeoEntityDescriptor(); + + } + + @Test + public void updateValues() { + + geoEntityDescriptor.setGeoLatName(""); + assertNotNull(geoEntityDescriptor.getGeoLatName()); + geoEntityDescriptor.setGeoLongName(""); + assertNotNull(geoEntityDescriptor.getGeoLongName()); + assertNotNull(geoEntityDescriptor.toString()); + geoEntityDescriptor.setGeoLatName(null); + assertNull(geoEntityDescriptor.getGeoLatName()); + geoEntityDescriptor.setGeoLongName(null); + assertNull(geoEntityDescriptor.getGeoLongName()); + assertNotNull(geoEntityDescriptor.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptorTest.java new file mode 100644 index 0000000..2324194 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/GeoOxmEntityDescriptorTest.java @@ -0,0 +1,40 @@ +package org.onap.aai.sparky.config.oxm; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; + +public class GeoOxmEntityDescriptorTest { + + private GeoOxmEntityDescriptor geoOxmEntityDescriptor; + + @Before + public void init() throws Exception { + geoOxmEntityDescriptor = new GeoOxmEntityDescriptor(); + + } + + @Test + public void updateValues() { + + geoOxmEntityDescriptor.setGeoLatName(""); + assertNotNull(geoOxmEntityDescriptor.getGeoLatName()); + geoOxmEntityDescriptor.setGeoLongName(""); + assertNotNull(geoOxmEntityDescriptor.getGeoLongName()); + assertTrue(geoOxmEntityDescriptor.hasGeoEntity()); + assertNotNull(geoOxmEntityDescriptor.toString()); + geoOxmEntityDescriptor.setGeoLatName(null); + assertNull(geoOxmEntityDescriptor.getGeoLatName()); + geoOxmEntityDescriptor.setGeoLongName(null); + assertNull(geoOxmEntityDescriptor.getGeoLongName()); + assertFalse(geoOxmEntityDescriptor.hasGeoEntity()); + assertNotNull(geoOxmEntityDescriptor.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityContainerLookup.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityContainerLookup.java new file mode 100644 index 0000000..7d55e4d --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityContainerLookup.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; + +public class OxmEntityContainerLookup implements OxmModelProcessor { + + private Collection<String> searchableEntityGroups; + private Collection<String> entityContainers; + + public OxmEntityContainerLookup() { + searchableEntityGroups = new ArrayList<String>(); + entityContainers = new ArrayList<String>(); + } + + @Override + public void processOxmModel(DynamicJAXBContext jaxbContext) { + + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = jaxbContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = jaxbContext.getDynamicType(desc.getAlias()); + + @SuppressWarnings("unchecked") + Map<String, String> properties = entity.getDescriptor().getProperties(); + + if (properties != null) { + + String container = properties.get("container"); + + if (container != null && !entityContainers.contains(container)) { + + entityContainers.add(container); + + if (properties.containsKey("searchable")) { + if (!searchableEntityGroups.contains(container)) { + searchableEntityGroups.add(container); + } + } + } + + } + + } + + } + + public Collection<String> getSearchableEntityGroups() { + return searchableEntityGroups; + } + + public void setSearchableEntityGroups(Collection<String> searchableEntityGroups) { + this.searchableEntityGroups = searchableEntityGroups; + } + + public Collection<String> getEntityContainers() { + return entityContainers; + } + + public void setEntityContainers(Collection<String> entityContainers) { + this.entityContainers = entityContainers; + } + + public boolean isEntityContainer(String entityType) { + return entityContainers.contains(entityType); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptorTest.java new file mode 100644 index 0000000..dea5d79 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/config/oxm/OxmEntityDescriptorTest.java @@ -0,0 +1,41 @@ +package org.onap.aai.sparky.config.oxm; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class OxmEntityDescriptorTest { + + private OxmEntityDescriptor oxmEntityDescriptor; + private ArrayList<String> primaryKeyAttributeNames; + + @Before + public void init() throws Exception { + oxmEntityDescriptor = new OxmEntityDescriptor(); + primaryKeyAttributeNames = new ArrayList<String>(); + + } + + @Test + public void updateValues() { + + oxmEntityDescriptor.setEntityName(""); + assertNotNull(oxmEntityDescriptor.getEntityName()); + oxmEntityDescriptor.setPrimaryKeyAttributeNames(primaryKeyAttributeNames); + assertNotNull(oxmEntityDescriptor.getPrimaryKeyAttributeNames()); + oxmEntityDescriptor.addPrimaryKeyName(""); + assertNotNull(oxmEntityDescriptor.toString()); + oxmEntityDescriptor.setEntityName(null); + assertNull(oxmEntityDescriptor.getEntityName()); + oxmEntityDescriptor.setPrimaryKeyAttributeNames(null); + assertNull(oxmEntityDescriptor.getPrimaryKeyAttributeNames()); + assertNotNull(oxmEntityDescriptor.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizerTest.java new file mode 100644 index 0000000..c6c999a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/crossentityreference/sync/CrossEntityReferenceSynchronizerTest.java @@ -0,0 +1,1035 @@ +package org.onap.aai.sparky.crossentityreference.sync; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.hamcrest.Matcher; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Matchers; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.config.oxm.OxmModelProcessor; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.util.StringCollectionContainsMatcher; +import org.onap.aai.sparky.util.TestResourceLoader; + +import com.fasterxml.jackson.databind.ObjectMapper; + +public class CrossEntityReferenceSynchronizerTest { + + private static ObjectMapper mapper = new ObjectMapper(); + + private CrossEntityReferenceSynchronizer cerSynchronizer; + + private ElasticSearchSchemaConfig esSchemaConfig; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + private OxmEntityLookup oxmEntityLookup; + private SearchableEntityLookup searchableEntityLookup; + private ElasticSearchAdapter esAdapter; + private ActiveInventoryAdapter aaiAdapter; + private CrossEntityReferenceLookup cerLookup; + private RestEndpointConfig aaiRestEndPointConfig; + + @Before + public void init() throws Exception { + + esSchemaConfig = new ElasticSearchSchemaConfig(); + esSchemaConfig.setIndexDocType("default"); + esSchemaConfig.setIndexMappingsFileName(null); + esSchemaConfig.setIndexName("aggregation-index-name"); + esSchemaConfig.setIndexSettingsFileName(null); + + + aaiStatConfig = new NetworkStatisticsConfig(); + + aaiStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + aaiStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + aaiStatConfig.setBytesHistogramMaxYAxis(1000000L); + aaiStatConfig.setBytesHistogramNumBins(20); + aaiStatConfig.setBytesHistogramNumDecimalPoints(2); + + aaiStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + aaiStatConfig.setQueueLengthHistogramMaxYAxis(20000); + aaiStatConfig.setQueueLengthHistogramNumBins(20); + aaiStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + aaiStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + aaiStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + aaiStatConfig.setTaskAgeHistogramNumBins(20); + aaiStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + aaiStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + aaiStatConfig.setResponseTimeHistogramMaxYAxis(1000L); + aaiStatConfig.setResponseTimeHistogramNumBins(20); + aaiStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + aaiStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + aaiStatConfig.setTpsHistogramMaxYAxis(100); + aaiStatConfig.setTpsHistogramNumBins(20); + aaiStatConfig.setTpsHistogramNumDecimalPoints(2); + + esStatConfig = new NetworkStatisticsConfig(); + + esStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + esStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + esStatConfig.setBytesHistogramMaxYAxis(1000000L); + esStatConfig.setBytesHistogramNumBins(20); + esStatConfig.setBytesHistogramNumDecimalPoints(2); + + esStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + esStatConfig.setQueueLengthHistogramMaxYAxis(20000); + esStatConfig.setQueueLengthHistogramNumBins(20); + esStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + esStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + esStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + esStatConfig.setTaskAgeHistogramNumBins(20); + esStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + esStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + esStatConfig.setResponseTimeHistogramMaxYAxis(10000L); + esStatConfig.setResponseTimeHistogramNumBins(20); + esStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + esStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + esStatConfig.setTpsHistogramMaxYAxis(100); + esStatConfig.setTpsHistogramNumBins(20); + esStatConfig.setTpsHistogramNumDecimalPoints(2); + + oxmEntityLookup = new OxmEntityLookup(); + + esAdapter = Mockito.mock(ElasticSearchAdapter.class); + aaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + + + Set<OxmModelProcessor> processors = new HashSet<OxmModelProcessor>(); + + processors.add(oxmEntityLookup); + + + Map<String, OxmEntityDescriptor> oxmEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + OxmEntityDescriptor genericVnfDescriptor = new OxmEntityDescriptor(); + genericVnfDescriptor.setEntityName("generic-vnf"); + List<String> pkeyNames = new ArrayList<String>(); + pkeyNames.add("vnf-name"); + + genericVnfDescriptor.setPrimaryKeyAttributeNames(pkeyNames); + + oxmEntityDescriptors.put("generic-vnf", genericVnfDescriptor); + + + oxmEntityLookup.setEntityDescriptors(oxmEntityDescriptors); + + cerLookup = new CrossEntityReferenceLookup(); + processors.add(cerLookup); + + searchableEntityLookup = new SearchableEntityLookup(); + processors.add(searchableEntityLookup); + + OxmModelLoader oxmModelLoader = new OxmModelLoader(-1, processors); + oxmModelLoader.loadLatestOxmModel(); + + aaiRestEndPointConfig = new RestEndpointConfig(); + aaiRestEndPointConfig.setNumRequestRetries(5); + + Mockito.when(aaiAdapter.getEndpointConfig()).thenReturn(aaiRestEndPointConfig); + + } + + @Test + public void validateBasicConstruction() throws Exception { + + cerSynchronizer = new CrossEntityReferenceSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, cerLookup, oxmEntityLookup, searchableEntityLookup); + + cerSynchronizer.setAaiAdapter(aaiAdapter); + cerSynchronizer.setElasticSearchAdapter(esAdapter); + + assertNotNull(cerSynchronizer.getAaiAdapter()); + assertNotNull(cerSynchronizer.getElasticSearchAdapter()); + + } + + private Matcher<List<String>> listContainsValue(String expectedValue) { + return new StringCollectionContainsMatcher(expectedValue); + } + + @Test + public void validateSmallSync() throws Exception { + + cerSynchronizer = new CrossEntityReferenceSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, cerLookup, oxmEntityLookup, searchableEntityLookup); + + cerSynchronizer.setAaiAdapter(aaiAdapter); + cerSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_service-subscription_nodesQuery_response.json"); + + OperationResult entitySelfLinks = new OperationResult(); + + entitySelfLinks.setResultCode(200); + entitySelfLinks.setResult(nodesQueryResponse); + + Mockito.when(aaiAdapter.getSelfLinksByEntityType("service-subscription")) + .thenReturn(entitySelfLinks); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-1"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1"); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-2"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2"); + + Mockito + .when( + aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-3"), Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-3/service-subscriptions/service-subscription/service-subscription-3"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-2"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-2.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-1.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("service-subscription-3"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/service-subscription-service-subscription-3.json"))); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-59")))) + .thenReturn("https://server.proxy:8443/aai/v11/search/generic-query/service-instance-59"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-54")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-54"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-55")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-55"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-50")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-50"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-52")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-52"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-57")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-57"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-53")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-53"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-58")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-58"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-51")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-51"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-56")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-56"); + + + + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-59"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-59.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-54"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-54.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-55"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-55.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-50"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-50.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-52"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-52.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-57"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-57.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-53"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-53.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-58"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-58.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-51"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-51.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-56"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-56.json"))); + + + + + + + /* + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-59"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-59.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-54"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-54.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-55"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-55.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-50"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-50.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-52"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-52.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-57"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-57.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-53"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-53.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-58"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-58.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-51"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-51.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-56"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-56.json"))); + + + */ + + + + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())) + .thenReturn("http://localhost:9200/myindex/mytype/doc1", + "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + OperationState syncState = cerSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertNotNull(cerSynchronizer.getStatReport(false)); + assertNotNull(cerSynchronizer.getStatReport(true)); + + cerSynchronizer.clearCache(); + cerSynchronizer.shutdown(); + + + } + + @Test + public void validateSmallSyncWithRetries() throws Exception { + + cerSynchronizer = new CrossEntityReferenceSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, cerLookup, oxmEntityLookup, searchableEntityLookup); + + cerSynchronizer.setAaiAdapter(aaiAdapter); + cerSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_service-subscription_nodesQuery_response.json"); + + OperationResult entitySelfLinks = new OperationResult(); + + entitySelfLinks.setResultCode(200); + entitySelfLinks.setResult(nodesQueryResponse); + + Mockito.when(aaiAdapter.getSelfLinksByEntityType("service-subscription")) + .thenReturn(entitySelfLinks); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-1"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1"); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-2"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2"); + + Mockito + .when( + aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-3"), Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-3/service-subscriptions/service-subscription/service-subscription-3"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-2"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-2.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-1.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("service-subscription-3"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/service-subscription-service-subscription-3.json"))); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-59")))) + .thenReturn("https://server.proxy:8443/aai/v11/search/generic-query/service-instance-59"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-54")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-54"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-55")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-55"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-50")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-50"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-52")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-52"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-57")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-57"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-53")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-53"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-58")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-58"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-51")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-51"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-56")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-56"); + + + + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-59"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-59.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-54"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-54.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-55"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-55.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-50"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-50.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-52"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-52.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-57"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-57.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-53"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-53.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-58"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-58.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-51"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-51.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-56"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-56.json"))); + + + + + + + /* + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-59"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-59.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-54"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-54.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-55"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-55.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-50"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-50.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-52"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-52.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-57"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-57.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-53"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-53.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-58"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-58.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-51"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-51.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-instance-56"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/service-instance/service-instance-56.json"))); + + + */ + + + + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())) + .thenReturn("http://localhost:9200/myindex/mytype/doc1", + "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())) + .thenReturn(new OperationResult(404, null)); + + + /* + * Cause version conflict errors on every put to test retry flow + */ + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(409, null)); + + OperationState syncState = cerSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertNotNull(cerSynchronizer.getStatReport(false)); + assertNotNull(cerSynchronizer.getStatReport(true)); + + cerSynchronizer.clearCache(); + cerSynchronizer.shutdown(); + + + + + } + + + + @Test + public void validateSmallSyncWithEntityMerges() throws Exception { + + cerSynchronizer = new CrossEntityReferenceSynchronizer(esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, cerLookup, oxmEntityLookup, searchableEntityLookup); + + cerSynchronizer.setAaiAdapter(aaiAdapter); + cerSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_service-subscription_nodesQuery_response.json"); + + OperationResult entitySelfLinks = new OperationResult(); + + entitySelfLinks.setResultCode(200); + entitySelfLinks.setResult(nodesQueryResponse); + + Mockito.when(aaiAdapter.getSelfLinksByEntityType("service-subscription")) + .thenReturn(entitySelfLinks); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-1"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1"); + + Mockito + .when(aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-2"), + Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2"); + + Mockito + .when( + aaiAdapter.repairSelfLink(Matchers.contains("service-subscription-3"), Mockito.anyString())) + .thenReturn( + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-3/service-subscriptions/service-subscription/service-subscription-3"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-2"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-2.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("service-subscription-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/service-subscription-service-subscription-1.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("service-subscription-3"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/service-subscription-service-subscription-3.json"))); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-59")))) + .thenReturn("https://server.proxy:8443/aai/v11/search/generic-query/service-instance-59"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-54")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-54"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-55")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-55"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-50")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-50"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-52")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-52"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-57")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-57"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-53")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-53"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-58")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-58"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-51")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-51"); + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat(listContainsValue("service-instance-56")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-56"); + + + + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-59"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-59.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-54"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-54.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-55"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-55.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-50"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-50.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-52"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-52.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-57"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-57.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-53"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-53.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-58"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-58.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-51"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-51.json"))); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-instance-56"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance-56.json"))); + + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())) + .thenReturn("http://localhost:9200/myindex/mytype/doc1", + "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3"); + + /* + * Our initial gets from elastic search return 200 ok with a found entity document requiring a doc update + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch1.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch2.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch3.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc4"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch4.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc5"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch5.json"))); + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + OperationState syncState = cerSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertNotNull(cerSynchronizer.getStatReport(false)); + assertNotNull(cerSynchronizer.getStatReport(true)); + + cerSynchronizer.clearCache(); + cerSynchronizer.shutdown(); + + + + + } + + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ActiveInventoryAdapterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ActiveInventoryAdapterTest.java new file mode 100644 index 0000000..1273e80 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ActiveInventoryAdapterTest.java @@ -0,0 +1,63 @@ +package org.onap.aai.sparky.dal; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.util.ArrayList; + + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; + +public class ActiveInventoryAdapterTest { + + private OxmModelLoader oxmModelLoader; + private OxmEntityLookup oxmEntityLookup; + private RestEndpointConfig endpointConfig; + private ActiveInventoryAdapter aaiAdapter; + private ArrayList<String> queryParams; + + @Before + public void init() throws Exception { + + oxmModelLoader = new OxmModelLoader(); + oxmEntityLookup = new OxmEntityLookup(); + endpointConfig = new RestEndpointConfig(); + queryParams = new ArrayList<String>(); + endpointConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + aaiAdapter = new ActiveInventoryAdapter(oxmModelLoader,oxmEntityLookup,endpointConfig); + } + + + @SuppressWarnings("static-access") + @Test + public void updateValues()throws Exception { + + assertNotNull(aaiAdapter.getMessageHeaders()); + assertNotNull(aaiAdapter.getBasicAuthenticationCredentials()); + assertNotNull(aaiAdapter.getResourceBasePath()); + assertNotNull(aaiAdapter.extractResourcePath("")); + assertNotNull(aaiAdapter.getGenericQueryForSelfLink("",queryParams)); + assertNull(aaiAdapter.getSelfLinkForEntity("","","")); + assertNotNull(aaiAdapter.queryActiveInventory("","")); + assertNotNull(aaiAdapter.queryActiveInventoryWithRetries("","",4)); + aaiAdapter.setOxmEntityLookup(oxmEntityLookup); + assertNotNull(aaiAdapter.getOxmEntityLookup()); + aaiAdapter.setEndpointConfig(endpointConfig); + assertNotNull(aaiAdapter.getEndpointConfig()); + + //assertNull(aaiAdapter.repairSelfLink("","")); + //assertNotNull(aaiAdapter.repairSelfLink(null,"")); + //assertNotNull(aaiAdapter.getSelfLinkForEntity(null,"","")); + //assertNull(aaiAdapter.getSelfLinkForEntity("",null,"")); + //assertNotNull(aaiAdapter.getSelfLinkForEntity("","",null)); + //assertNotNull(aaiAdapter.getSelfLinksByEntityType("")); + //assertNotNull(aaiAdapter.getSelfLinksByEntityType(null)); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ElasticSearchAdapterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ElasticSearchAdapterTest.java new file mode 100644 index 0000000..84fc515 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/ElasticSearchAdapterTest.java @@ -0,0 +1,50 @@ +package org.onap.aai.sparky.dal; + +import static org.junit.Assert.assertNotNull; + +import javax.ws.rs.core.MediaType; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; + +public class ElasticSearchAdapterTest { + + + private ElasticSearchAdapter elasticSearchAdapter; + private RestEndpointConfig endpointConfig; + + @Before + public void init() throws Exception { + + endpointConfig = new RestEndpointConfig(); + endpointConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + elasticSearchAdapter = new ElasticSearchAdapter(endpointConfig); + } + + + @Test + public void updateValues() { + + assertNotNull(elasticSearchAdapter.doPost("https://10.247.40.25:8000","{maxResults:10, queryStr: f}",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doGet("https://10.247.40.25:8000",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doHead("https://10.247.40.25:8000",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doPut("https://10.247.40.25:8000","{maxResults:10, queryStr: f}",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doPatch("https://10.247.40.25:8000","{maxResults:10, queryStr: f}",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doDelete("https://10.247.40.25:8000",MediaType.APPLICATION_JSON_TYPE)); + assertNotNull(elasticSearchAdapter.doBulkOperation("https://10.247.40.25:8000","{maxResults:10, queryStr: f}")); + assertNotNull(elasticSearchAdapter.buildBulkImportOperationRequest("","","","","")); + assertNotNull(elasticSearchAdapter.retrieveEntityById("","","","","")); + assertNotNull(elasticSearchAdapter.buildElasticSearchUrlForApi("","")); + assertNotNull(elasticSearchAdapter.buildElasticSearchUrl("","")); + assertNotNull(elasticSearchAdapter.buildElasticSearchGetDocUrl("","","")); + assertNotNull(elasticSearchAdapter.buildElasticSearchGetDocUrl("","")); + assertNotNull(elasticSearchAdapter.buildElasticSearchPostUrl("")); + assertNotNull(elasticSearchAdapter.getBulkUrl()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/NetworkTransactionTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/NetworkTransactionTest.java new file mode 100644 index 0000000..3852a8f --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/NetworkTransactionTest.java @@ -0,0 +1,59 @@ +package org.onap.aai.sparky.dal; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.dal.rest.HttpMethod; + +public class NetworkTransactionTest { + + + private NetworkTransaction networkTransaction; + private NetworkTransaction networkTransactions; + private OxmEntityDescriptor descriptor; + private OperationResult opResult; + + @Before + public void init() throws Exception { + opResult = new OperationResult(); + descriptor = new OxmEntityDescriptor(); + networkTransaction = new NetworkTransaction(); + networkTransactions = new NetworkTransaction(HttpMethod.POST,"",opResult); + + + + } + + + @Test + public void updateValues() { + + networkTransaction.setOperationResult(opResult); + assertNotNull(networkTransaction.getOperationResult()); + networkTransaction.setOperationType(HttpMethod.PUT); + assertNotNull(networkTransaction.getOperationType()); + networkTransaction.setTaskAgeInMs(); + assertNotNull(networkTransaction.getTaskAgeInMs()); + networkTransaction.setEntityType("searchedNodeClass"); + assertNotNull(networkTransaction.getEntityType()); + networkTransaction.setLink("/etc/aaiEntityNodeDescriptors.json"); + assertNotNull(networkTransaction.getLink()); + networkTransaction.setQueryParameters("/etc/aaiEntityNodeDescriptors.json"); + assertNotNull(networkTransaction.getQueryParameters()); + networkTransaction.setDescriptor(descriptor); + assertNotNull(networkTransaction.getDescriptor()); + networkTransaction.setOpTimeInMs(3); + assertEquals(3,networkTransaction.getOpTimeInMs()); + assertNotNull(networkTransaction.toString()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntity.java new file mode 100644 index 0000000..d01a98e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntity.java @@ -0,0 +1,44 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class AutoSuggestDocumentEntity { + + @JsonProperty("entity_suggest") + AutoSuggestDocumentEntityFields fields; + + public AutoSuggestDocumentEntityFields getFields() { + return fields; + } + + public void setFields(AutoSuggestDocumentEntityFields fields) { + this.fields = fields; + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntityFields.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntityFields.java new file mode 100644 index 0000000..a4b7370 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestDocumentEntityFields.java @@ -0,0 +1,81 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; + +@JsonInclude(Include.NON_NULL) +public class AutoSuggestDocumentEntityFields { + + private String output; + private List<String> input; + private PayloadEntity payload; + private int weight; + + public AutoSuggestDocumentEntityFields() { + input = new ArrayList<String>(); + } + + public String getOutput() { + return output; + } + + public void setOutput(String output) { + this.output = output; + } + + public List<String> getInput() { + return input; + } + + public void setInput(List<String> input) { + this.input = input; + } + + public PayloadEntity getPayload() { + return payload; + } + + public void setPayload(PayloadEntity payload) { + this.payload = payload; + } + + public int getWeight() { + return weight; + } + + public void setWeight(int weight) { + this.weight = weight; + } + + public void addInput(String input) { + this.input.add(input); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitEntity.java new file mode 100644 index 0000000..33ad604 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitEntity.java @@ -0,0 +1,87 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class AutoSuggestElasticHitEntity { + + @JsonProperty("_index") + private String index; + + @JsonProperty("_type") + private String type; + + @JsonProperty("_id") + private String id; + + @JsonProperty("_score") + private String score; + + @JsonProperty("_source") + private AutoSuggestDocumentEntity source; + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getScore() { + return score; + } + + public void setScore(String score) { + this.score = score; + } + + public AutoSuggestDocumentEntity getSource() { + return source; + } + + public void setSource(AutoSuggestDocumentEntity source) { + this.source = source; + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitsEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitsEntity.java new file mode 100644 index 0000000..7efe4a4 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticHitsEntity.java @@ -0,0 +1,50 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.ArrayList; +import java.util.List; + +public class AutoSuggestElasticHitsEntity { + + private List<AutoSuggestElasticHitEntity> hits; + + public AutoSuggestElasticHitsEntity() { + hits = new ArrayList<AutoSuggestElasticHitEntity>(); + } + + public List<AutoSuggestElasticHitEntity> getHits() { + return hits; + } + + public void setHits(List<AutoSuggestElasticHitEntity> hits) { + this.hits = hits; + } + + public void addHit(AutoSuggestElasticHitEntity hit) { + this.hits.add(hit); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticSearchResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticSearchResponse.java new file mode 100644 index 0000000..1b096cd --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/AutoSuggestElasticSearchResponse.java @@ -0,0 +1,85 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class AutoSuggestElasticSearchResponse { + + private int took; + + @JsonProperty("timed_out") + private boolean timedOut; + + @JsonProperty("_shards") + private Map<String,String> shards; + + private AutoSuggestElasticHitsEntity hits; + + public AutoSuggestElasticSearchResponse(){ + this.shards = new HashMap<String,String>(); + } + + public int getTook() { + return took; + } + + public void setTook(int took) { + this.took = took; + } + + public boolean isTimedOut() { + return timedOut; + } + + public void setTimedOut(boolean timedOut) { + this.timedOut = timedOut; + } + + public Map<String, String> getShards() { + return shards; + } + + public void setShards(Map<String, String> shards) { + this.shards = shards; + } + + public void addShard(String name, String value) { + shards.put(name, value); + } + + public AutoSuggestElasticHitsEntity getHits() { + return hits; + } + + public void setHits(AutoSuggestElasticHitsEntity hits) { + this.hits = hits; + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/BucketEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/BucketEntity.java new file mode 100644 index 0000000..69285a2 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/BucketEntity.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class BucketEntity { + private String key; + + @JsonProperty("doc_count") + private int docCount; + + public BucketEntity() { + + } + + public BucketEntity(String name, int value) { + this.key = name; + this.docCount = value; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public int getDocCount() { + return docCount; + } + + public void setDocCount(int docCount) { + this.docCount = docCount; + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHit.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHit.java new file mode 100644 index 0000000..e9409af --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHit.java @@ -0,0 +1,29 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +public class ElasticHit { + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHitsEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHitsEntity.java new file mode 100644 index 0000000..a5a71b8 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticHitsEntity.java @@ -0,0 +1,74 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ElasticHitsEntity { + + private int total; + @JsonProperty("max_score") + private int maxScore; + + private List<ElasticHit> hits; + + public ElasticHitsEntity() { + this.hits = new ArrayList<ElasticHit>(); + } + + public void addHit(ElasticHit hit) { + this.hits.add(hit); + } + + public int getTotal() { + return total; + } + + public void setTotal(int total) { + this.total = total; + } + + public int getMaxScore() { + return maxScore; + } + + public void setMaxScore(int maxScore) { + this.maxScore = maxScore; + } + + public List<ElasticHit> getHits() { + return hits; + } + + public void setHits(List<ElasticHit> hits) { + this.hits = hits; + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggegrationResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggegrationResponse.java new file mode 100644 index 0000000..8e1b5f1 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggegrationResponse.java @@ -0,0 +1,109 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ElasticSearchAggegrationResponse { + + private int took; + + @JsonProperty("timed_out") + private boolean timedOut; + + @JsonProperty("_shards") + private Map<String, String> shards; + + private ElasticHitsEntity hits; + + private Map<String, ElasticSearchAggregation> aggregations; + + public ElasticSearchAggegrationResponse() { + this.shards = new HashMap<String, String>(); + this.aggregations = new HashMap<String,ElasticSearchAggregation>(); + } + + + public int getTook() { + return took; + } + + + public void setTook(int took) { + this.took = took; + } + + + public boolean isTimedOut() { + return timedOut; + } + + + public void setTimedOut(boolean timedOut) { + this.timedOut = timedOut; + } + + + public Map<String, String> getShards() { + return shards; + } + + + public void setShards(Map<String, String> shards) { + this.shards = shards; + } + + + public ElasticHitsEntity getHits() { + return hits; + } + + + public void setHits(ElasticHitsEntity hits) { + this.hits = hits; + } + + public void addShard(String key, String value) { + this.shards.put(key,value); + } + + + public Map<String, ElasticSearchAggregation> getAggregations() { + return aggregations; + } + + + public void setAggregations(Map<String, ElasticSearchAggregation> aggregations) { + this.aggregations = aggregations; + } + + public void addAggregation(String key, ElasticSearchAggregation agg) { + this.aggregations.put(key, agg); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggregation.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggregation.java new file mode 100644 index 0000000..a7d9ce7 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchAggregation.java @@ -0,0 +1,74 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.ArrayList; +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ElasticSearchAggregation { + + @JsonProperty("doc_count_error_upper_bound") + private int docCountErrorUpperBound; + + @JsonProperty("sum_other_doc_count") + private int sumOtherDocCount; + + private List<BucketEntity> buckets; + + public ElasticSearchAggregation() { + buckets = new ArrayList<BucketEntity>(); + } + + public int getDocCountErrorUpperBound() { + return docCountErrorUpperBound; + } + + public void setDocCountErrorUpperBound(int docCountErrorUpperBound) { + this.docCountErrorUpperBound = docCountErrorUpperBound; + } + + public int getSumOtherDocCount() { + return sumOtherDocCount; + } + + public void setSumOtherDocCount(int sumOtherDocCount) { + this.sumOtherDocCount = sumOtherDocCount; + } + + public List<BucketEntity> getBuckets() { + return buckets; + } + + public void setBuckets(List<BucketEntity> buckets) { + this.buckets = buckets; + } + + public void addBucket(BucketEntity bucket) { + buckets.add(bucket); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchCountResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchCountResponse.java new file mode 100644 index 0000000..0d165f3 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/ElasticSearchCountResponse.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +import java.util.HashMap; +import java.util.Map; + +public class ElasticSearchCountResponse { + + private int count; + private Map<String, String> shards; + + public ElasticSearchCountResponse() { + this.shards = new HashMap<String,String>(); + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public Map<String, String> getShards() { + return shards; + } + + public void setShards(Map<String, String> shards) { + this.shards = shards; + } + + public void addShard(String key, String value) { + this.shards.put(key, value); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/PayloadEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/PayloadEntity.java new file mode 100644 index 0000000..a74ac0f --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/elasticsearch/entity/PayloadEntity.java @@ -0,0 +1,32 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.elasticsearch.entity; + +public class PayloadEntity { + + public PayloadEntity() { + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessorTest.java new file mode 100644 index 0000000..e710338 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/proxy/processor/AaiUiProxyProcessorTest.java @@ -0,0 +1,145 @@ +/** + * ============LICENSE_START======================================================= + * SPARKY (AAI UI service) + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.proxy.processor; + +import static org.junit.Assert.assertEquals; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.MediaType; + +import org.apache.camel.Exchange; +import org.apache.camel.Message; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.AdditionalMatchers; +import org.mockito.Matchers; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.client.RestClient; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.restlet.data.Status; + +public class AaiUiProxyProcessorTest { + + private RestClient client = null; + private OperationResult successResult = null; + OperationResult failureResult = null; + private Exchange mockExchange; + private Message mockRequestMessage; + private Message mockResponseMessage; + + private HttpServletRequest mockHttpServletRequest; + + private AaiUiProxyProcessor aaiUiProxyProcessor; + + private String goodBeTargetUrl = "https://0.0.0.0:8000/services/routerService/servicegraph"; + private String badBeTargetUrl = "https://0.0.0.0:8000/aservicegraph"; + private String goodDrTargetUrl = "https://0.0.0.0:9502/ui-request/servicegraph"; + + String successResponsePayload = "good-payload"; + String failureResponsePayload = "Server Error"; + + @Before + public void init()throws RestClientConstructionException { + client = Mockito.mock(RestClient.class); + mockExchange = Mockito.mock(Exchange.class); + mockRequestMessage = Mockito.mock(Message.class); + mockResponseMessage = Mockito.mock(Message.class); + mockHttpServletRequest = Mockito.mock(HttpServletRequest.class); + + + RestEndpointConfig config = new RestEndpointConfig(); + config.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + aaiUiProxyProcessor = new AaiUiProxyProcessor(config,"ui-request"); + + initializeMocks(getProxyRequestJson("someHashValue")); + aaiUiProxyProcessor.setClient(client); + } + + @Test + public void testProxyMessage_successPath() { + OperationResult successResultSpy = Mockito.spy(successResult); + Mockito.when(client.post(Mockito.eq(goodDrTargetUrl), Mockito.anyString(), Mockito.anyMap(), + Mockito.eq(MediaType.APPLICATION_JSON_TYPE), Mockito.eq(MediaType.APPLICATION_JSON_TYPE))) + .thenReturn(successResultSpy); + + Mockito.when(mockExchange.getIn().getHeader(Exchange.HTTP_URI)).thenReturn(goodBeTargetUrl); + Mockito.when(mockExchange.getIn().getBody(HttpServletRequest.class)).thenReturn(mockHttpServletRequest); + aaiUiProxyProcessor.proxyMessage(mockExchange); + + //Mockito.verify(successResultSpy).getResult(); + //assertEquals(Status.SUCCESS_OK.getCode(), aaiUiProxyProcessor.getOperationResult().getResultCode()); + } + + @Test + public void testProxyMessage_failurePath() { + OperationResult failureResultSpy = Mockito.spy(failureResult); + Mockito.when(client.post(AdditionalMatchers.not(Matchers.eq(goodDrTargetUrl)), + Mockito.anyString(), Mockito.anyMap(), Mockito.eq(MediaType.APPLICATION_JSON_TYPE), + Mockito.eq(MediaType.APPLICATION_JSON_TYPE))).thenReturn(failureResultSpy); + + Mockito.when(mockExchange.getIn().getHeader(Exchange.HTTP_URI)).thenReturn(badBeTargetUrl); + Mockito.when(mockExchange.getIn().getBody(HttpServletRequest.class)).thenReturn(mockHttpServletRequest); + aaiUiProxyProcessor.proxyMessage(mockExchange); + + Mockito.verify(failureResultSpy).getFailureCause(); + assertEquals(Status.SERVER_ERROR_INTERNAL.getCode(), aaiUiProxyProcessor.getOperationResult().getResultCode()); + } + + private String getProxyRequestJson(String hashId) { + JSONObject root = new JSONObject(); + root.put("hashId", hashId); + return root.toString(); + + } + + @SuppressWarnings("unchecked") + private void initializeMocks(String requestPayload) { + + client = Mockito.mock(RestClient.class); + successResult = new OperationResult(200, successResponsePayload); + failureResult = new OperationResult(500, failureResponsePayload); + failureResult.setFailureCause(failureResponsePayload); + + Mockito.when(client.post(Mockito.eq(goodDrTargetUrl), Mockito.anyString(), Mockito.anyMap(), + Mockito.eq(MediaType.APPLICATION_JSON_TYPE), Mockito.eq(MediaType.APPLICATION_JSON_TYPE))) + .thenReturn(successResult); + + Mockito.when(client.post(AdditionalMatchers.not(Matchers.eq(goodDrTargetUrl)), + Mockito.anyString(), Mockito.anyMap(), Mockito.eq(MediaType.APPLICATION_JSON_TYPE), + Mockito.eq(MediaType.APPLICATION_JSON_TYPE))).thenReturn(failureResult); + + Mockito.when(mockHttpServletRequest.getRequestURI()).thenReturn("fakeUri"); + Mockito.when(mockHttpServletRequest.getLocalPort()).thenReturn(8001); + + Mockito.when(mockExchange.getIn()).thenReturn(mockRequestMessage); + Mockito.when(mockExchange.getOut()).thenReturn(mockResponseMessage); + } + +} + diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfigTest.java new file mode 100644 index 0000000..eb1e3f8 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/rest/config/RestEndpointConfigTest.java @@ -0,0 +1,76 @@ +package org.onap.aai.sparky.dal.rest.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; + +import org.onap.aai.restclient.enums.RestAuthenticationMode; + +public class RestEndpointConfigTest { + + private RestEndpointConfig restEndpointConfig; + @Before + public void init() throws Exception{ + + restEndpointConfig = new RestEndpointConfig(); + + + } + + @Test + public void updateValues(){ + restEndpointConfig.setValidateServerCertChain(true); + assertTrue(restEndpointConfig.isValidateServerCertChain()); + restEndpointConfig.setValidateServerHostname(true); + assertTrue(restEndpointConfig.isValidateServerHostname()); + restEndpointConfig.setEndpointIpAddress("10.147.110.199"); + assertNotNull(restEndpointConfig.getEndpointIpAddress()); + restEndpointConfig.setEndpointServerPort("9885"); + assertNotNull(restEndpointConfig.getEndpointServerPort()); + restEndpointConfig.setNumRequestRetries(5); + assertEquals(restEndpointConfig.getNumRequestRetries(),5); + restEndpointConfig.setBasicAuthUserName("sparky"); + assertNotNull(restEndpointConfig.getBasicAuthUserName()); + restEndpointConfig.setBasicAuthPassword("sparky"); + assertNotNull(restEndpointConfig.getBasicAuthPassword()); + restEndpointConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + assertEquals(restEndpointConfig.getRestAuthenticationMode(),RestAuthenticationMode.SSL_BASIC); + restEndpointConfig.setConnectTimeoutInMs(6000); + assertEquals(restEndpointConfig.getConnectTimeoutInMs(),6000); + restEndpointConfig.setCertFileName("aai-client-cert.p12"); + assertNotNull(restEndpointConfig.getCertFileName()); + restEndpointConfig.setReadTimeoutInMs(4000); + assertEquals(restEndpointConfig.getReadTimeoutInMs(),4000); + restEndpointConfig.setCertPassword("1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o"); + assertNotNull(restEndpointConfig.getCertPassword()); + restEndpointConfig.setTruststoreFileName("synchronizer.jks"); + assertNotNull(restEndpointConfig.getTruststoreFileName()); + assertNotNull(restEndpointConfig.toString()); + + restEndpointConfig.setEndpointIpAddress(null); + assertNull(restEndpointConfig.getEndpointIpAddress()); + restEndpointConfig.setEndpointServerPort(null); + assertNull(restEndpointConfig.getEndpointServerPort()); + restEndpointConfig.setCertFileName(null); + assertNull(restEndpointConfig.getCertFileName()); + restEndpointConfig.setTruststoreFileName(null); + assertNull(restEndpointConfig.getTruststoreFileName()); + restEndpointConfig.setRestAuthenticationMode(null); + assertNull(restEndpointConfig.getRestAuthenticationMode()); + restEndpointConfig.setCertPassword(null); + assertNull(restEndpointConfig.getCertPassword()); + restEndpointConfig.setBasicAuthUserName(null); + assertNull(restEndpointConfig.getBasicAuthUserName()); + restEndpointConfig.setBasicAuthPassword(null); + assertNull(restEndpointConfig.getBasicAuthPassword()); + assertNotNull(restEndpointConfig.toString()); + + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfigTest.java new file mode 100644 index 0000000..712ae30 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/config/SearchServiceConfigTest.java @@ -0,0 +1,21 @@ +package org.onap.aai.sparky.dal.sas.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + + +import org.junit.Before; +import org.junit.Test; + +public class SearchServiceConfigTest { + + @Before + public void init() throws Exception{ + + } + + @Test + public void updateValues(){ + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/DocumentEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/DocumentEntity.java new file mode 100644 index 0000000..b9349a9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/DocumentEntity.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import java.util.HashMap; +import java.util.Map; + +public class DocumentEntity { + private String etag; + private String url; + private Map<String,String> content; + + public DocumentEntity() { + content = new HashMap<String,String>(); + } + + + public String getEtag() { + return etag; + } + + public void setEtag(String etag) { + this.etag = etag; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Map<String, String> getContent() { + return content; + } + + public void setContent(Map<String, String> content) { + this.content = content; + } + + public void addContent(String key, String value) { + content.put(key, value); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/EntityCountResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/EntityCountResponse.java new file mode 100644 index 0000000..20002d2 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/EntityCountResponse.java @@ -0,0 +1,55 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import java.util.HashMap; +import java.util.Map; + +public class EntityCountResponse { + + private Map<String,String> shards; + private int count; + + public EntityCountResponse() { + this.shards = new HashMap<String,String>(); + } + + public Map<String, String> getShards() { + return shards; + } + + public void setShards(Map<String, String> shards) { + this.shards = shards; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationEntity.java new file mode 100644 index 0000000..4a3bbf4 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationEntity.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.dal.elasticsearch.entity.BucketEntity; + +public class GroupByAggregationEntity { + private int totalChartHits; + List<BucketEntity> buckets; + + public GroupByAggregationEntity() { + this.buckets = new ArrayList<BucketEntity>(); + } + + public int getTotalChartHits() { + return totalChartHits; + } + + public void setTotalChartHits(int totalChartHits) { + this.totalChartHits = totalChartHits; + } + + public List<BucketEntity> getBuckets() { + return buckets; + } + + public void setBuckets(List<BucketEntity> buckets) { + this.buckets = buckets; + } + + public void addBucket(BucketEntity bucket) { + this.buckets.add(bucket); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationResponseEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationResponseEntity.java new file mode 100644 index 0000000..fd2e17d --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/GroupByAggregationResponseEntity.java @@ -0,0 +1,48 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class GroupByAggregationResponseEntity { + + @JsonProperty("groupby_aggregation") + private GroupByAggregationEntity aggEntity; + + public GroupByAggregationResponseEntity() { + + } + + public GroupByAggregationEntity getAggEntity() { + return aggEntity; + } + + public void setAggEntity(GroupByAggregationEntity aggEntity) { + this.aggEntity = aggEntity; + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/HitEntity.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/HitEntity.java new file mode 100644 index 0000000..73b2d12 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/HitEntity.java @@ -0,0 +1,48 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +public class HitEntity { + + private String score; + private DocumentEntity document; + + public String getScore() { + return score; + } + + public void setScore(String score) { + this.score = score; + } + + public DocumentEntity getDocument() { + return document; + } + + public void setDocument(DocumentEntity document) { + this.document = document; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionEntityBuilder.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionEntityBuilder.java new file mode 100644 index 0000000..88dff42 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionEntityBuilder.java @@ -0,0 +1,294 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import java.util.ArrayList; +import java.util.List; + +public class SearchAbstractionEntityBuilder { + + + public static HitEntity getHitSample1() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.073963"); + + doc.addContent("entityPrimaryKeyValue", "example-vnf-id-val-4394"); + doc.addContent("entityType", "vpe"); + doc.addContent("searchTags", "example-vnf-id-val-4394;example-vnf-name-val-4394;example-vnf-name2-val-4394"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/vpes/vpe/example-vnf-id-val-4394"); + doc.addContent("searchTagIDs", "0;1;2"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:20:48.072-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/e317a35256717f10e88d1b2c995efcdddfc911bf350c73e37e8afca6dfb11553"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample2() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.073963"); + + doc.addContent("entityPrimaryKeyValue", "vpe-vnf-id-team4-11"); + doc.addContent("entityType", "vpe"); + doc.addContent("searchTags", "vpe-vnf-id-team4-11;example-vnf-name-val-9512;example-vnf-name2-val-9512"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/vpes/vpe/vpe-vnf-id-team4-11"); + doc.addContent("searchTagIDs", "0;1;2"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:20:48.175-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/80f6d1a252e047e50e0adbeb90ad30876bb5b63cf70c9dd53f3fe46aeb50c74b"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample3() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.030035"); + + doc.addContent("entityPrimaryKeyValue", "example-vnf-id-val-6176"); + doc.addContent("entityType", "generic-vnf"); + doc.addContent("searchTags", "example-vnf-id-val-6176;example-vnf-name-val-6176;example-vnf-name2-val-6176"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/generic-vnfs/generic-vnf/example-vnf-id-val-6176"); + doc.addContent("searchTagIDs", "0;1;2"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:29:39.889-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/8dfd1136f943296508fee11efcda35a0719aa490aa60e9abffecce0b220d8c94"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample4() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.01174"); + + doc.addContent("entityPrimaryKeyValue", "vnf-id-team4-11"); + doc.addContent("entityType", "newvce"); + doc.addContent("searchTags", "vnf-id-team4-11;example-vnf-name-val-5313;example-vnf-name2-val-5313"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/newvces/newvce/vnf-id-team4-11"); + doc.addContent("searchTagIDs", "0;1;2"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:21:08.142-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/83dcab92d75b20eb94578039c8cec5e7b6b4717791e3c367d8af5069ce76dc90"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample5() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.01174"); + + doc.addContent("entityPrimaryKeyValue", "example-vnf-id2-val-9501"); + doc.addContent("entityType", "newvce"); + doc.addContent("searchTags", "example-vnf-id2-val-9501;example-vnf-name-val-9501;example-vnf-name2-val-9501"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/newvces/newvce/example-vnf-id2-val-9501"); + doc.addContent("searchTagIDs", "0;1;2"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:21:23.323-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/461816ba8aa94d01f2c978999b843dbaf10e0509db58d1945d6f5999d6db8f5e"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample6() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("17.01174"); + + doc.addContent("entityPrimaryKeyValue", "vnf-id-dm-auto-10"); + doc.addContent("entityType", "vce"); + doc.addContent("searchTags", "vpe-id-dm-auto-10;vnf-id-dm-auto-10;vnf-name-dm-auto-10;vnf-name2-dm-auto-10"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/vces/vce/vnf-id-dm-auto-10"); + doc.addContent("searchTagIDs", "0;1;2;3"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:24:57.209-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/1ead4512e65ee0eafb24e0156cc1abdf97368f08dfe065f02580aa09661bbcd8"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample7() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("13.940832"); + + doc.addContent("entityPrimaryKeyValue", "e3e59c5b-ad48-44d0-b3e4-80eacdcee4c7"); + doc.addContent("entityType", "generic-vnf"); + doc.addContent("searchTags", "e3e59c5b-ad48-44d0-b3e4-80eacdcee4c7;VNF_Test_vNF_modules_01"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/generic-vnfs/generic-vnf/e3e59c5b-ad48-44d0-b3e4-80eacdcee4c7"); + doc.addContent("searchTagIDs", "0;1"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:26:34.603-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/1462582e8fd7786f72f26548e4247b72ab6cd101cca0bbb68a60dd3ad16500d0"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample8() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("13.940832"); + + doc.addContent("entityPrimaryKeyValue", "fusion-jitsi-vnf-001"); + doc.addContent("entityType", "generic-vnf"); + doc.addContent("searchTags", "fusion-jitsi-vnf-001;fusion-jitsi-vnf"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/generic-vnfs/generic-vnf/fusion-jitsi-vnf-001"); + doc.addContent("searchTagIDs", "0;1"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:28:14.293-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/b79ddfec9a00184445174c91e7490a0d407f351983bba4ae53bfec0584f73ee3"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample9() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("13.940832"); + + doc.addContent("entityPrimaryKeyValue", "vnfm0003v"); + doc.addContent("entityType", "generic-vnf"); + doc.addContent("searchTags", "vnfm0003v;vnfm0003v"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/generic-vnfs/generic-vnf/vnfm0003v"); + doc.addContent("searchTagIDs", "0;1"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:29:39.594-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/52ae232ea5506d6de8ef35c4f46a1ceafe35f3717ff578b83531bc7615870b12"); + doc.setEtag("1"); + + + return hitEntity; + + } + + public static HitEntity getHitSample10() { + + HitEntity hitEntity = new HitEntity(); + DocumentEntity doc = new DocumentEntity(); + + hitEntity.setDocument(doc); + hitEntity.setScore("13.928098"); + + doc.addContent("entityPrimaryKeyValue", "amist456vnf"); + doc.addContent("entityType", "generic-vnf"); + doc.addContent("searchTags", "amist456vnf;amist456vnf"); + doc.addContent("link", "https://ext1.test.onap.com:8443/aai/v9/network/generic-vnfs/generic-vnf/amist456vnf"); + doc.addContent("searchTagIDs", "0;1"); + doc.addContent("lastmodTimestamp", "2017-04-18T17:28:28.163-0400"); + + doc.setUrl("services/search-data-service/v1/search/indexes/entitysearchindex-localhost-ist-apr18/documents/3424afea5963696380a0fdc78ee5320cf5fa9bc0459f1f9376db208d31196434"); + doc.setEtag("1"); + + + return hitEntity; + + } + + + + public static SearchAbstractionResponse getSuccessfulEntitySearchResponse() { + + SearchAbstractionResponse sasResponse = new SearchAbstractionResponse(); + + SearchResult searchResult = new SearchResult(); + sasResponse.setSearchResult(searchResult); + + searchResult.setTotalHits(3257); + + List<HitEntity> hits = new ArrayList<HitEntity>(); + + hits.add(getHitSample1()); + hits.add(getHitSample2()); + hits.add(getHitSample3()); + hits.add(getHitSample4()); + hits.add(getHitSample5()); + hits.add(getHitSample6()); + hits.add(getHitSample7()); + hits.add(getHitSample8()); + hits.add(getHitSample9()); + hits.add(getHitSample10()); + + searchResult.setHits(hits); + + return sasResponse; + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionResponse.java new file mode 100644 index 0000000..b937f66 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchAbstractionResponse.java @@ -0,0 +1,39 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +public class SearchAbstractionResponse { + + private SearchResult searchResult; + + public SearchResult getSearchResult() { + return searchResult; + } + + public void setSearchResult(SearchResult searchResult) { + this.searchResult = searchResult; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchResult.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchResult.java new file mode 100644 index 0000000..a656491 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/dal/sas/entity/SearchResult.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.dal.sas.entity; + +import java.util.List; + +public class SearchResult { + + private int totalHits; + private List<HitEntity> hits; + + public int getTotalHits() { + return totalHits; + } + public void setTotalHits(int totalHits) { + this.totalHits = totalHits; + } + public List<HitEntity> getHits() { + return hits; + } + public void setHits(List<HitEntity> hits) { + this.hits = hits; + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/AttributeUpdaterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/AttributeUpdaterTest.java new file mode 100644 index 0000000..63b3ff4 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/AttributeUpdaterTest.java @@ -0,0 +1,164 @@ +package org.onap.aai.sparky.editattributes; +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +/* +package org.openecomp.sparky.editattributes; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Map; + +import javax.ws.rs.core.Response.Status; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.aai.config.ActiveInventoryConfig; +import org.onap.aai.sparky.editattributes.AttributeUpdater; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfigUtil; + + +/** + * The Class AttributeUpdaterTest. + * +public class AttributeUpdaterTest { + + /** + * Sets the up. + * + * @throws Exception the exception + * + @Before + public void setUp() throws Exception {} + + /** + * @throws Exception + */ + /* + @Test + public void testUpdateObjectAttribute() throws Exception { + + OxmEntityDescriptor desc = new OxmEntityDescriptor(); + desc.addPrimaryKeyName("hostname"); + desc.setEntityName("pserver"); + + OxmEntityLookup entityLookup = OxmEntityLookup.getInstance(); + entityLookup.addEntityDescriptor("pserver", desc); + + AttributeUpdater updater = new AttributeUpdater(new OxmModelLoader(), entityLookup, + new ActiveInventoryConfig(ActiveInventoryConfigUtil.getValidTestProperties())); + Map<String, Object> attributes = new HashMap<>(); + attributes.put("prov-status", "PREPROV"); + attributes.put("in-maint", "true"); + OperationResult result = updater.updateObjectAttribute( + "cloud-infrastructure/pservers/pserver/something", attributes, "someid"); + assertEquals(Status.FORBIDDEN.getStatusCode(), result.getResultCode()); + } + */ + + // This needs the OXM file in place to work. + /** + * Test get edit object from uri. + * + * @throws Exception the exception + */ + //@Test + /*public void testGetEditObjectFromUri() throws Exception { + + OxmModelLoader loader = new OxmModelLoader(); + loader.setLatestVersionNum(11); + + OxmEntityDescriptor desc = new OxmEntityDescriptor(); + desc.addPrimaryKeyName("hostname"); + desc.setEntityName("pserver"); + + OxmEntityLookup entityLookup = OxmEntityLookup.getInstance(); + entityLookup.addEntityDescriptor("pserver", desc); + + + DynamicType mockType = Mockito.mock(DynamicType.class); + Class<? extends DynamicEntity> mockDynamicEntity = Mockito.mock(DynamicEntity.class); + + Mockito.when(mockType.getJavaClass()).thenReturn(mockDynamicEntity); + + + + + HashMap<String, DynamicType> typeLookup = new HashMap<String,DynamicType>(); + typeLookup.put("pserver", mockType); + + entityLookup.setEntityTypeLookup(typeLookup); + + + AttributeUpdater updater = new AttributeUpdater(new OxmModelLoader(), entityLookup, + new ActiveInventoryConfig(ActiveInventoryConfigUtil.getValidTestProperties())); + AaiEditObject result = + updater.getEditObjectFromUri("cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("Pserver", result.getObjectType()); + assertEquals("pserver", result.getRootElement()); + assertEquals("hostname", result.getKeyName()); + assertEquals("mtznjtax101", result.getKeyValue()); + }*/ + + /** + * Test get relative uri. + * + * @throws Exception the exception + */ + /* + @Test + public void testGetRelativeUri() throws Exception { + + OxmEntityDescriptor desc = new OxmEntityDescriptor(); + desc.addPrimaryKeyName("hostname"); + desc.setEntityName("pserver"); + + OxmEntityLookup entityLookup = OxmEntityLookup.getInstance(); + entityLookup.addEntityDescriptor("pserver", desc); + + AttributeUpdater updater = new AttributeUpdater(new OxmModelLoader(), entityLookup, + new ActiveInventoryConfig(ActiveInventoryConfigUtil.getValidTestProperties())); + // Test entity uri without "/aai/version/" + String result = updater.getRelativeUri("cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("/cloud-infrastructure/pservers/pserver/mtznjtax101", result); + result = updater.getRelativeUri("/aai/v8/cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("/cloud-infrastructure/pservers/pserver/mtznjtax101", result); + + result = updater.getRelativeUri("/v8/cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("/cloud-infrastructure/pservers/pserver/mtznjtax101", result); + + result = updater.getRelativeUri("aai/v88/cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("/cloud-infrastructure/pservers/pserver/mtznjtax101", result); + + result = updater.getRelativeUri("/cloud-infrastructure/pservers/pserver/mtznjtax101"); + assertEquals("/cloud-infrastructure/pservers/pserver/mtznjtax101", result); + } + +} +*/
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/EditAttributesTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/EditAttributesTest.java new file mode 100644 index 0000000..4e7e80a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/EditAttributesTest.java @@ -0,0 +1,171 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.editattributes; + +import java.io.IOException; +import java.io.InputStream; + +import javax.servlet.ReadListener; +import javax.servlet.ServletInputStream; + +import org.apache.commons.io.IOUtils; +import org.junit.Before; +import org.junit.BeforeClass; + + +/** + * The Class EditAttributesTest. + */ +public class EditAttributesTest { + String sampleJsonRequest = + "{ \"entity-uri\" : \"some/uri/value/here\", \"entity-type\" : \"complex\"," + + " \"attributes\" : { \"prov-status\" : \"PREPROV\", \"inMaint\" : \"true\"," + + " \"isClosedLoop\" : \"false\" }}"; + + /** + * Sets the up before class. + * + * @throws Exception the exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception {} + + /** + * Sets the up. + * + * @throws Exception the exception + */ + @Before + public void setUp() throws Exception {} + + + /** + * Test analyze edit request body. + */ + /* + @Test + public void testAnalyzeEditRequestBody() { + AttributeEditProcessor aes = new AttributeEditProcessor(); + EditRequest request = aes.analyzeEditRequestBody(sampleJsonRequest); + System.out.println("JSON Body : " + sampleJsonRequest); + assertNotNull(request); + assertEquals("URI should match", "some/uri/value/here", request.getEntityUri()); + assertEquals("Entity Type should match", "complex", request.getEntityType()); + assertEquals("Attribute ProvStatus should match", "PREPROV", + request.getAttributes().get("prov-status")); + assertEquals("Attribute inMaint should be true", "true", + request.getAttributes().get("inMaint")); + assertEquals("Attribute isClosedLoop should be false", "false", + request.getAttributes().get("isClosedLoop")); + + } + */ + + + /** + * Test edit request. + * + * @throws IOException Signals that an I/O exception has occurred. + * @throws ServletException the servlet exception + * @throws JSONException the JSON exception + */ + /* + @Test + public void testEditRequest() throws IOException, ServletException, JSONException { + HttpServletRequest mockRequest = mock(HttpServletRequest.class); + HttpServletResponse mockResponse = mock(HttpServletResponse.class); + ServletOutputStream mockOutput = mock(ServletOutputStream.class); + ServletInputStream mockInput = new MockServletInputStream(sampleJsonRequest); + + when(mockRequest.getRequestURI()).thenReturn("editAttributes"); + when(mockResponse.getOutputStream()).thenReturn(mockOutput); + + when(mockRequest.getInputStream()).thenReturn(mockInput); + + Principal princip = new UserPrincipal("ds1150"); + + when(mockRequest.getUserPrincipal()).thenReturn(princip); + + PrintWriter writer = new PrintWriter("editServletTest.txt"); + when(mockResponse.getWriter()).thenReturn(writer); + AttributeEditProcessor aes = new AttributeEditProcessor(); + aes.doPost(mockRequest, mockResponse); + JSONObject result = null; + try { + writer.close(); + result = new JSONObject(FileUtils.readFileToString(new File("editServletTest.txt"), "UTF-8")); + } catch (JSONException ex) { + // Nothing to catch + } + assertNotNull(result); + // assertEquals("Attributes updated successfully (just need PATCH !!!)", result.get("result")); + } + */ + + + + /** + * The Class MockServletInputStream. + */ + class MockServletInputStream extends ServletInputStream { + InputStream inputStream; + + /** + * Instantiates a new mock servlet input stream. + * + * @param string the string + */ + MockServletInputStream(String string) { + this.inputStream = IOUtils.toInputStream(string); + } + + /* (non-Javadoc) + * @see java.io.InputStream#read() + */ + @Override + public int read() throws IOException { + return inputStream.read(); + } + + @Override + public boolean isFinished() { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isReady() { + // TODO Auto-generated method stub + return false; + } + + @Override + public void setReadListener(ReadListener readListener) { + // TODO Auto-generated method stub + + } + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserAuthorizationReader.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserAuthorizationReader.java new file mode 100644 index 0000000..4f99b6d --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserAuthorizationReader.java @@ -0,0 +1,113 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.editattributes; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.junit.Assert.assertThat; + +import java.io.File; +import java.nio.file.Paths; +import java.util.List; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.onap.aai.sparky.editattributes.UserAuthorizationReader; + +/** + * The Class TestUserAuthorizationReader. + */ +public class TestUserAuthorizationReader { + + private static File userAuthFile; + private static File userAuthFileEmpty; + + /** + * Sets the up before class. + * + * @throws Exception the exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + userAuthFile = Paths.get(TestData.USER_AUTH_FILE.getFilename()).toFile(); + userAuthFileEmpty = Paths.get(TestData.USER_AUTH_FILE_EMPTY.getFilename()).toFile(); + } + + /** + * The Enum TestData. + */ + enum TestData { + // @formatter:off + USER_AUTH_FILE( + "src/test/resources/user-auth-reader/authorized-users.config"), USER_AUTH_FILE_EMPTY( + "src/test/resources/user-auth-reader/authorized-users-empty.config"); + + private String filename; + + /** + * Instantiates a new test data. + * + * @param filename the filename + */ + TestData(String filename) { + this.filename = filename; + } + + public String getFilename() { + return this.filename; + } + // @formatter:on + } + + /** + * Test get users. + * + * @throws Exception the exception + */ + @Test + public void testGetUsers() throws Exception { + UserAuthorizationReader userAuthorizationReader = new UserAuthorizationReader(userAuthFile); + + // Method under test + List<String> userList = userAuthorizationReader.getUsers(); + + assertThat(userList, containsInAnyOrder("user1", "user2 user3", "user4")); + } + + /** + * Test get users passing empty config. + * + * @throws Exception the exception + */ + @Test + public void testGetUsersPassingEmptyConfig() throws Exception { + UserAuthorizationReader userConfigReader = new UserAuthorizationReader(userAuthFileEmpty); + + List<String> userList = userConfigReader.getUsers(); + + assertThat(userList, empty()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserValidator.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserValidator.java new file mode 100644 index 0000000..e20defa --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/TestUserValidator.java @@ -0,0 +1,137 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.editattributes; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.nio.file.Paths; + +import org.apache.log4j.BasicConfigurator; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.editattributes.UserAuthorizationReader; +import org.onap.aai.sparky.editattributes.UserValidator; + +/** + * The Class TestUserValidator. + */ +@RunWith(MockitoJUnitRunner.class) +public class TestUserValidator { + + @Mock + private UserAuthorizationReader userAuthorizationReader; + + @InjectMocks + private UserValidator userValidator; + + private static File userAuthFile; + private static File missingUserAuthFile; + + /** + * Sets the up before class. + * + * @throws Exception the exception + */ + @BeforeClass + public static void setUpBeforeClass() throws Exception { + BasicConfigurator.configure(); + userAuthFile = Paths.get(TestData.USER_AUTH_FILE.getFilename()).toFile(); + missingUserAuthFile = Paths.get(TestData.MISSING_USER_AUTH_FILE.getFilename()).toFile(); + } + + /** + * The Enum TestData. + */ + enum TestData { + // @formatter:off + USER_AUTH_FILE( + "src/test/resources/user-validator/authorized-users.config"), MISSING_USER_AUTH_FILE( + "src/test/resources/user-validator/missing.config"); + + private String filename; + + /** + * Instantiates a new test data. + * + * @param filename the filename + */ + TestData(String filename) { + this.filename = filename; + } + + public String getFilename() { + return this.filename; + } + // @formatter:on + } + + /** + * Test is authorized user. + * + * @throws Exception the exception + */ + @Test + public void testIsAuthorizedUser() throws Exception { + when(userAuthorizationReader.getUsers()).thenCallRealMethod(); + when(userAuthorizationReader.getUserAuthorizationFile()).thenReturn(userAuthFile); + + boolean isAuthUser = userValidator.isAuthorizedUser("user1"); + assertThat(isAuthUser, is(true)); + + boolean isAuthUser2 = userValidator.isAuthorizedUser("user2"); + assertThat(isAuthUser2, is(false)); + + boolean isAuthUser3 = userValidator.isAuthorizedUser("user3"); + assertThat(isAuthUser3, is(false)); + + boolean isAuthUser4 = userValidator.isAuthorizedUser("not-in-file"); + assertThat(isAuthUser4, is(false)); + + boolean isAuthUser5 = userValidator.isAuthorizedUser("user4"); + assertThat(isAuthUser5, is(true)); + } + + /** + * Test not authorized if file not present. + * + * @throws Exception the exception + */ + @Test + public void testNotAuthorizedIfFileNotPresent() throws Exception { + when(userAuthorizationReader.getUsers()).thenCallRealMethod(); + when(userAuthorizationReader.getUserAuthorizationFile()).thenReturn(missingUserAuthFile); + + boolean isAuthUser = userValidator.isAuthorizedUser("user1"); + assertThat(isAuthUser, is(false)); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/entity/EditRequestTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/entity/EditRequestTest.java new file mode 100644 index 0000000..5f87a27 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/editattributes/entity/EditRequestTest.java @@ -0,0 +1,43 @@ +package org.onap.aai.sparky.editattributes.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.util.HashMap; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.EntityTypeAggregation; +import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; + +import com.fasterxml.jackson.databind.node.JsonNodeFactory; + +public class EditRequestTest { + + private EditRequest editAttribute; + private HashMap<String,Object> attributes; + + + + @Before + public void init() throws Exception { + + editAttribute = new EditRequest(); + attributes = new HashMap<String,Object>(); + + } + + + @Test + public void updateValues() { + + editAttribute.setEntityUri(""); + assertNotNull(editAttribute.getEntityUri()); + editAttribute.setEntityType(""); + assertNotNull(editAttribute.getEntityType()); + editAttribute.setAttributes(attributes); + assertNotNull(editAttribute.getAttributes()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilderTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilderTest.java new file mode 100644 index 0000000..9cd7781 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/EntityHistoryQueryBuilderTest.java @@ -0,0 +1,34 @@ +package org.onap.aai.sparky.inventory; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +//import java.security.SecureRandom; + +import org.junit.Before; +import org.junit.Test; +//import org.openecomp.sparky.analytics.HistoricalCounter; +import org.onap.aai.sparky.inventory.EntityHistoryQueryBuilder; + +public class EntityHistoryQueryBuilderTest { + + private EntityHistoryQueryBuilder entityHistoryQueryBuilder; + + @Before + public void init() throws Exception { + entityHistoryQueryBuilder = new EntityHistoryQueryBuilder(); + } + + @SuppressWarnings("static-access") + @Test + public void successfullBuild() { + entityHistoryQueryBuilder.getQuery("table"); + assertNotNull(entityHistoryQueryBuilder.createTableQuery()); + entityHistoryQueryBuilder.getQuery("graph"); + assertNotNull(entityHistoryQueryBuilder.createGraphQuery()); + entityHistoryQueryBuilder.getQuery("tree"); + assertNull(entityHistoryQueryBuilder.getQuery("tree")); + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/GeoIndexDocumentTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/GeoIndexDocumentTest.java new file mode 100644 index 0000000..2e39725 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/GeoIndexDocumentTest.java @@ -0,0 +1,121 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.inventory; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.inventory.entity.GeoIndexDocument; + +/** + * The Class GeoIndexDocumentTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class GeoIndexDocumentTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception {} + + /** + * Checks if is valid geo index document success path. + */ + @Test + public void isValidGeoIndexDocument_successPath() { + + GeoIndexDocument geoDoc = new GeoIndexDocument(); + + geoDoc.setEntityPrimaryKeyName("pkeyName"); + geoDoc.setEntityPrimaryKeyValue("pkeyValue"); + geoDoc.setEntityType("type"); + geoDoc.setId("12312"); + geoDoc.setLatitude("-45.123"); + geoDoc.setLongitude("181.321"); + geoDoc.setSelfLink("https://server.somewhere.com:8443/aai/v7/id"); + + assertTrue(geoDoc.isValidGeoDocument()); + + } + + /** + * Checks if is valid geo index document fail no geo coordinates. + */ + @Test + public void isValidGeoIndexDocument_fail_no_geoCoordinates() { + + GeoIndexDocument geoIndexDoc = new GeoIndexDocument(); + + geoIndexDoc.setEntityPrimaryKeyName("pkeyName"); + geoIndexDoc.setEntityPrimaryKeyValue("pkeyValue"); + geoIndexDoc.setEntityType("type"); + geoIndexDoc.setId("12312"); + geoIndexDoc.setSelfLink("https://server.somewhere.com:8443/aai/v7/id"); + + assertFalse(geoIndexDoc.isValidGeoDocument()); + + } + + /** + * Checks if is valid geo index document fail invalid geo coordinates. + */ + @Test + public void isValidGeoIndexDocument_fail_invalid_geoCoordinates() { + + GeoIndexDocument geoIndexDoc = new GeoIndexDocument(); + + geoIndexDoc.setEntityPrimaryKeyName("pkeyName"); + geoIndexDoc.setEntityPrimaryKeyValue("pkeyValue"); + geoIndexDoc.setEntityType("type"); + geoIndexDoc.setId("12312"); + geoIndexDoc.setLatitude("not_a_valid"); + geoIndexDoc.setLongitude("geo point"); + + geoIndexDoc.setSelfLink("https://server.somewhere.com:8443/aai/v7/id"); + + assertFalse(geoIndexDoc.isValidGeoDocument()); + + } + + /** + * Checks if is valid geo index document fail nothing set. + */ + @Test + public void isValidGeoIndexDocument_fail_nothing_set() { + + GeoIndexDocument geoIndexDoc = new GeoIndexDocument(); + + assertFalse(geoIndexDoc.isValidGeoDocument()); + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/entity/TopographicalEntityTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/entity/TopographicalEntityTest.java new file mode 100644 index 0000000..c878077 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/inventory/entity/TopographicalEntityTest.java @@ -0,0 +1,50 @@ +package org.onap.aai.sparky.inventory.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class TopographicalEntityTest { + + private TopographicalEntity topographicalEntity; + + + @Before + public void init() throws Exception { + topographicalEntity = new TopographicalEntity(); + } + + + @SuppressWarnings("static-access") + @Test + public void updateValues() { + + topographicalEntity.setEntityType(""); + assertNotNull(topographicalEntity.getEntityType()); + topographicalEntity.setEntityPrimaryKeyValue(""); + assertNotNull(topographicalEntity.getEntityPrimaryKeyValue()); + topographicalEntity.setEntityPrimaryKeyName(""); + assertNotNull(topographicalEntity.getEntityPrimaryKeyName()); + topographicalEntity.setLatitude(""); + assertNotNull(topographicalEntity.getLatitude()); + topographicalEntity.setLongitude(""); + assertNotNull(topographicalEntity.getLongitude()); + topographicalEntity.setSelfLink(""); + assertNotNull(topographicalEntity.getSelfLink()); + topographicalEntity.setId(""); + assertNotNull(topographicalEntity.getId()); + assertNotNull(topographicalEntity.getSerialversionuid()); + assertNotNull(topographicalEntity.toString()); + + //assertNotNull(topographicalEntity.getAsJson()); + //assertNotNull(topographicalEntity.generateUniqueShaDigest("","","")); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/logging/util/LoggingUtilsTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/logging/util/LoggingUtilsTest.java new file mode 100644 index 0000000..4dc30a9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/logging/util/LoggingUtilsTest.java @@ -0,0 +1,25 @@ +package org.onap.aai.sparky.logging.util; + +import static org.junit.Assert.assertEquals; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.logging.util.LoggingUtils; + +public class LoggingUtilsTest { + + private LoggingUtils durationTester; + @Before + public void init() throws Exception { + durationTester = new LoggingUtils(); + } + + @Test + public void durationTester() { + + + assertEquals("1425",durationTester.setDuration(3575, 5000)); + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityCountHistoryProcessorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityCountHistoryProcessorTest.java new file mode 100644 index 0000000..9629ae8 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityCountHistoryProcessorTest.java @@ -0,0 +1,154 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search; +/* +import static org.junit.Assert.assertEquals; + +import org.apache.camel.Exchange; +import org.apache.camel.Message; +import org.apache.camel.component.restlet.RestletConstants; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.search.EntityCountHistoryProcessor; +import org.onap.aai.sparky.util.RestletUtils; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.ClientInfo; +import org.restlet.data.Reference; +import org.restlet.data.Status; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +public class EntityCountHistoryProcessorTest { + private EntityCountHistoryProcessor entityCountHistoryProcessor; + private Exchange mockExchange; + private Message mockRequestMessage; + private Message mockResponseMessage; + private Request mockRestletRequest; + private Response mockRestletResponse; + private ClientInfo requestClientInfo; + private RestletUtils mockRestletUtils; + + @Before + public void init() throws Exception { + mockExchange = Mockito.mock(Exchange.class); + mockRequestMessage = Mockito.mock(Message.class); + mockResponseMessage = Mockito.mock(Message.class); + mockRestletRequest = Mockito.mock(Request.class); + mockRestletResponse = Mockito.mock(Response.class); + mockRestletUtils = Mockito.mock(RestletUtils.class); + + entityCountHistoryProcessor = new EntityCountHistoryProcessor(); + entityCountHistoryProcessor.setRestletUtils(mockRestletUtils); + + requestClientInfo = new ClientInfo(); + + Mockito.when(mockRestletRequest.getClientInfo()).thenReturn(requestClientInfo); + + Mockito.when(mockRequestMessage.getHeader(RestletConstants.RESTLET_REQUEST, Request.class)) + .thenReturn(mockRestletRequest); + Mockito.when(mockRequestMessage.getHeader(RestletConstants.RESTLET_RESPONSE, Response.class)) + .thenReturn(mockRestletResponse); + Mockito.when(mockExchange.getIn()).thenReturn(mockRequestMessage); + Mockito.when(mockExchange.getOut()).thenReturn(mockResponseMessage); + } + + public String getStubbedData_getEntityCountHistory_success() { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode entityCountEntry = mapper.createObjectNode(); + + entityCountEntry.put("doc_count", 2069); + entityCountEntry.put("key", "complex"); + + ArrayNode arrayNode = mapper.createArrayNode(); + arrayNode.add(entityCountEntry); + + ObjectNode resultNode = mapper.createObjectNode(); + resultNode.set("result", arrayNode); + + return resultNode.toString(); + } + + @Test + public void testGetEntityCountHistory_success() { + EntityCountHistoryProcessor spyEntityCountHistoryProcessor = + Mockito.spy(entityCountHistoryProcessor); + OperationResult operationResult = new OperationResult(); + + String result = getStubbedData_getEntityCountHistory_success(); + + operationResult.setResult(Status.SUCCESS_OK.getCode(), result); + + Mockito.when(mockExchange.getIn().getHeader("CamelHttpQuery", String.class)).thenReturn(""); + + Mockito.doReturn("table").when(spyEntityCountHistoryProcessor).getTypeParameter(Mockito.any()); + Mockito.doReturn(operationResult).when(spyEntityCountHistoryProcessor).getResults(Mockito.any(), + Mockito.any()); + + spyEntityCountHistoryProcessor.getEntityCountHistory(mockExchange); + + ArgumentCaptor<String> entityCaptor = ArgumentCaptor.forClass(String.class); + Mockito.verify(mockRestletResponse, Mockito.atLeast(1)).setEntity(entityCaptor.capture(), + Mockito.any()); + assertEquals(operationResult.getResult(), entityCaptor.getValue()); + } + + @Test + public void testGetEntityCountHistory_failure_noTypeParameter() { + Mockito.when(mockRestletRequest.getOriginalRef()).thenReturn(new Reference()); + + EntityCountHistoryProcessor spyEntityCountHistoryProcessor = + Mockito.spy(entityCountHistoryProcessor); + OperationResult operationResult = new OperationResult(); + + String result = getStubbedData_getEntityCountHistory_success(); + + operationResult.setResult(Status.SUCCESS_OK.getCode(), result); + + Mockito.when(mockExchange.getIn().getHeader("CamelHttpQuery", String.class)).thenReturn(""); + + Mockito.doReturn("").when(spyEntityCountHistoryProcessor).getTypeParameter(Mockito.any()); + Mockito.doReturn(operationResult).when(spyEntityCountHistoryProcessor).getResults(Mockito.any(), Mockito.any()); + + spyEntityCountHistoryProcessor.getEntityCountHistory(mockExchange); + + ArgumentCaptor<String> entityCaptor = ArgumentCaptor.forClass(String.class); + Mockito.verify(mockRestletResponse, Mockito.atLeast(1)).setEntity(entityCaptor.capture(), + Mockito.any()); + assertEquals("{ \"errorMessage\" : Unsupported request. Resource not found. }", + entityCaptor.getValue()); + + ArgumentCaptor<Status> responseCodeCaptor = ArgumentCaptor.forClass(Status.class); + Mockito.verify(mockRestletResponse, Mockito.atLeast(1)).setStatus(responseCodeCaptor.capture()); + assertEquals(Status.CLIENT_ERROR_NOT_FOUND, responseCodeCaptor.getValue()); + } +} +*/
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryBucketTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryBucketTest.java new file mode 100644 index 0000000..31f0c04 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryBucketTest.java @@ -0,0 +1,31 @@ +package org.onap.aai.sparky.search; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class EntityTypeSummaryBucketTest { + +private EntityTypeSummaryBucket entityTypeSummaryBucket; + + @Before + public void init() throws Exception { + entityTypeSummaryBucket = new EntityTypeSummaryBucket(); + + } + + + @Test + public void updateValues() { + + entityTypeSummaryBucket.setKey("54566"); + assertNotNull(entityTypeSummaryBucket.getKey()); + entityTypeSummaryBucket.setCount(3); + assertEquals(3,entityTypeSummaryBucket.getCount()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryTest.java new file mode 100644 index 0000000..de8929e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/EntityTypeSummaryTest.java @@ -0,0 +1,37 @@ +package org.onap.aai.sparky.search; + +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class EntityTypeSummaryTest { + + + private EntityTypeSummary entityTypeSummaryBucket; + private ArrayList<EntityTypeSummaryBucket> bucketList; + private EntityTypeSummaryBucket bucket; + + @Before + public void init() throws Exception { + entityTypeSummaryBucket = new EntityTypeSummary(); + bucketList = new ArrayList<EntityTypeSummaryBucket>(); + bucket = new EntityTypeSummaryBucket(); + + } + + + @Test + public void updateValues() { + + entityTypeSummaryBucket.setTotalChartHits(8); + assertNotNull(entityTypeSummaryBucket.getTotalChartHits()); + entityTypeSummaryBucket.setBuckets(bucketList); + assertNotNull(entityTypeSummaryBucket.getBuckets()); + entityTypeSummaryBucket.addBucket(bucket); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchEntityPropertiesTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchEntityPropertiesTest.java new file mode 100644 index 0000000..2958525 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchEntityPropertiesTest.java @@ -0,0 +1,34 @@ +package org.onap.aai.sparky.search; + +import static org.junit.Assert.assertNotNull; + +import java.util.HashMap; + +import org.junit.Before; +import org.junit.Test; + +public class SearchEntityPropertiesTest { + + private SearchEntityProperties searchEntityProperties; + private HashMap<String,String> fields; + + @Before + public void init() throws Exception { + searchEntityProperties = new SearchEntityProperties(); + fields = new HashMap<String,String>(); + + } + + + @Test + public void updateValues() { + + searchEntityProperties.setFields(fields); + assertNotNull(searchEntityProperties.getFields()); + searchEntityProperties.setType("VNF-Stack"); + assertNotNull(searchEntityProperties.getType()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchServiceAdapterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchServiceAdapterTest.java new file mode 100644 index 0000000..7963528 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/SearchServiceAdapterTest.java @@ -0,0 +1,47 @@ +package org.onap.aai.sparky.search; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; + +public class SearchServiceAdapterTest { + + private SearchServiceAdapter searchServiceAdapter; + private RestEndpointConfig endpointConfig; + + @Before + public void init() throws Exception { + + endpointConfig = new RestEndpointConfig(); + endpointConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + searchServiceAdapter = new SearchServiceAdapter(endpointConfig,"1.0"); + } + + + @Test + public void updateValues() { + + searchServiceAdapter.setServiceApiVersion("2.0"); + assertNotNull(searchServiceAdapter.getServiceApiVersion()); + searchServiceAdapter.setEndpointConfig(endpointConfig); + assertNotNull(searchServiceAdapter.getEndpointConfig()); + assertNotNull(searchServiceAdapter.doPost("https://10.247.40.25:8000","{maxResults:10, queryStr: f}","application/json")); + assertNotNull(searchServiceAdapter.doGet("https://10.247.40.25:8000","application/json")); + assertNotNull(searchServiceAdapter.doPut("https://10.247.40.25:8000","{maxResults:10, queryStr: f}","application/json")); + assertNotNull(searchServiceAdapter.doDelete("https://10.247.40.25:8000","application/json")); + assertNotNull(searchServiceAdapter.getTxnHeader()); + assertNotNull(searchServiceAdapter.buildSearchServiceQueryUrl("searchentity-localhost")); + assertNotNull(searchServiceAdapter.buildSearchServiceUrlForApi("searchentity-localhost","2.0")); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/UnifiedSearchProcessorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/UnifiedSearchProcessorTest.java new file mode 100644 index 0000000..b0aa22b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/UnifiedSearchProcessorTest.java @@ -0,0 +1,445 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.Message; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.impl.DefaultExchange; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; +import org.onap.aai.sparky.search.api.SearchProvider; +import org.onap.aai.sparky.search.entity.MockSearchResponse; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; +import org.onap.aai.sparky.search.entity.SearchSuggestion; +import org.onap.aai.sparky.search.registry.SearchProviderRegistry; +import org.onap.aai.sparky.util.HttpServletHelper; + +import com.fasterxml.jackson.databind.ObjectMapper; + + +public class UnifiedSearchProcessorTest { + + public interface Suggester { + public void addSuggestion( SearchSuggestion suggestion ); + } + + private abstract class AbstractDummySearchProvider implements SearchProvider, Suggester { + + protected List<SearchSuggestion> suggestions; + + protected AbstractDummySearchProvider() { + suggestions = new ArrayList<SearchSuggestion>(); + } + + public List<SearchSuggestion> getSuggestions() { + return suggestions; + } + + @Override + public List<SearchSuggestion> search(QuerySearchEntity queryRequest) { + return getSuggestions(); + } + } + + private class AlphaSearchProvider extends AbstractDummySearchProvider { + public AlphaSearchProvider() { + super(); + } + + @Override + public void addSuggestion(SearchSuggestion suggestion) { + if (suggestion != null) { + suggestions.add(suggestion); + } + } + } + + private class BravoSearchProvider extends AbstractDummySearchProvider { + public BravoSearchProvider() { + super(); + } + + @Override + public void addSuggestion(SearchSuggestion suggestion) { + if (suggestion != null) { + suggestions.add(suggestion); + } + } + } + + private class GammaSearchProvider extends AbstractDummySearchProvider { + public GammaSearchProvider() { + super(); + } + + @Override + public void addSuggestion(SearchSuggestion suggestion) { + if (suggestion != null) { + suggestions.add(suggestion); + } + } + } + + private SearchServiceAdapter mockSearchAdapter; + + private UnifiedSearchProcessor unifiedSearchProcessor; + private Message mockRequestMessage; + private ObjectMapper mapper; + + private Exchange exchange; + private CamelContext camelContext; + + @Before + public void init() { + + camelContext = new DefaultCamelContext(); + exchange = new DefaultExchange(camelContext); + + mockRequestMessage = Mockito.mock(Message.class); + exchange.setIn(mockRequestMessage); + + unifiedSearchProcessor = new UnifiedSearchProcessor(); + unifiedSearchProcessor.setUseOrderedSearchProviderKeys(true); + + mapper = new ObjectMapper(); + + mockSearchAdapter = Mockito.mock(SearchServiceAdapter.class); + } + + + @Test + public void validateDefaultConstructor() { + + // initially it should be null until the bean wiring initializes it + assertNull(unifiedSearchProcessor.getSearchProviderRegistry()); + + } + + + @Test + public void validateAccessors() { + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + // initially it should be null until the bean wiring initializes it + assertNotNull(unifiedSearchProcessor.getSearchProviderRegistry()); + assertEquals(0, searchProviderRegistry.getSearchProviders().size()); + + } + + private void initializeSearchMocks(String requestPayload) { + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + HttpServletHelper.setRequestPayload(request, "application/json", requestPayload); + + Mockito.when(request.getRequestURI()).thenReturn("fakeUri"); + Mockito.when(request.getLocalPort()).thenReturn(8001); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(HttpServletRequest.class))).thenReturn(request); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(String.class))).thenReturn(requestPayload); + + } + + private String getSearchRequestJson(String queryString, int maxResults) { + + JSONObject root = new JSONObject(); + root.put("queryStr", queryString); + root.put("maxResults", maxResults); + + return root.toString(); + + } + + private String getExternalSearchRequestJson() { + JSONObject root = new JSONObject(); + + root.put("view", "testView"); + root.put("entityId", "thisIsAnId"); + root.put("entityType", "pserver"); + + return root.toString(); + } + + + @Test + public void testSearch_search_when_noSearchProviders() throws IOException { + + // mock env setup + + initializeSearchMocks(getSearchRequestJson("vnfs",10)); + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + // method under test + unifiedSearchProcessor.search(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + MockSearchResponse searchResponse = mapper.readValue(entityPayload, MockSearchResponse.class); + + assertEquals(0, searchResponse.getTotalFound()); + assertEquals(0, searchResponse.getSuggestions().size()); + + } + + @Test + public void testSearch_search_when_ThreeSearchProviders_no_suggestions() throws IOException { + + // mock env setup + + initializeSearchMocks(getSearchRequestJson("vnfs",10)); + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + + AlphaSearchProvider alpha = new AlphaSearchProvider(); + BravoSearchProvider bravo = new BravoSearchProvider(); + GammaSearchProvider gamma = new GammaSearchProvider(); + + searchProviderRegistry.addSearchProvider(alpha); + searchProviderRegistry.addSearchProvider(bravo); + searchProviderRegistry.addSearchProvider(gamma); + + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + // method under test + unifiedSearchProcessor.search(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + /* + * With a null view name, an empty filter set should be returned - there should be 0 filters + */ + + SearchResponse searchResponse = mapper.readValue(entityPayload, SearchResponse.class); + + assertEquals(0, searchResponse.getTotalFound()); + assertEquals(0, searchResponse.getSuggestions().size()); + + } + + private void addSuggestions(int numSuggestions, String suggestionPrefix, Suggester suggester) { + for ( int x = 0; x < numSuggestions; x++ ){ + CommonSearchSuggestion suggestion = new CommonSearchSuggestion(); + suggestion.setText(suggestionPrefix + "-" + x); + suggester.addSuggestion(suggestion); + } + } + + private int countSuggestions(String suggestionPrefix, SearchResponse response) { + + int totalFound = 0; + + for ( SearchSuggestion suggestion : response.getSuggestions()) { + + if ( suggestion.getText() != null && suggestion.getText().startsWith(suggestionPrefix)) { + totalFound++; + } + } + + return totalFound; + + } + + private int countSuggestions(String suggestionPrefix, MockSearchResponse response) { + + int totalFound = 0; + + for ( SearchSuggestion suggestion : response.getSuggestions()) { + + if ( suggestion.getText() != null && suggestion.getText().startsWith(suggestionPrefix)) { + totalFound++; + } + } + + return totalFound; + + } + + + @Test + public void testSearch_search_when_ThreeSearchProviders_5suggestions_each() throws IOException { + + // mock env setup + + initializeSearchMocks(getSearchRequestJson("vnfs",10)); + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + + AlphaSearchProvider alpha = new AlphaSearchProvider(); + BravoSearchProvider bravo = new BravoSearchProvider(); + GammaSearchProvider gamma = new GammaSearchProvider(); + + addSuggestions(5, "alpha", alpha); + addSuggestions(5, "bravo", bravo); + addSuggestions(5, "gamma", gamma); + + searchProviderRegistry.addSearchProvider(alpha); + searchProviderRegistry.addSearchProvider(bravo); + searchProviderRegistry.addSearchProvider(gamma); + + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + // method under test + unifiedSearchProcessor.search(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + MockSearchResponse searchResponse = mapper.readValue(entityPayload, MockSearchResponse.class); + + assertEquals(10, searchResponse.getTotalFound()); + assertEquals(10, searchResponse.getSuggestions().size()); + + assertEquals( 4, countSuggestions("alpha", searchResponse)); + assertEquals( 3, countSuggestions("bravo", searchResponse)); + assertEquals( 3, countSuggestions("gamma", searchResponse)); + + } + + @Test + public void testSearch_search_when_ThreeSearchProviders_mixedNumSuggestions() throws IOException { + + // mock env setup + + initializeSearchMocks(getSearchRequestJson("vnfs",13)); + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + + AlphaSearchProvider alpha = new AlphaSearchProvider(); + BravoSearchProvider bravo = new BravoSearchProvider(); + GammaSearchProvider gamma = new GammaSearchProvider(); + + searchProviderRegistry.addSearchProvider(alpha); + searchProviderRegistry.addSearchProvider(bravo); + searchProviderRegistry.addSearchProvider(gamma); + + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + addSuggestions(45,"alpha",alpha); + addSuggestions(1,"bravo",bravo); + addSuggestions(99,"gamma",gamma); + + // method under test + unifiedSearchProcessor.search(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + MockSearchResponse searchResponse = mapper.readValue(entityPayload, MockSearchResponse.class); + + assertEquals(13, searchResponse.getTotalFound()); + assertEquals(13, searchResponse.getSuggestions().size()); + + /** + * There should be an even divide of suggestions per search provider relative + * to the suggestions available per search provider. + * Alpha has 45 suggestions + * Bravo has 1 suggestion + * Gamma has 99 suggestions + * + * We only asked for 13 suggestions to be returned, so based on the suggestion + * distribution algorithm we will get a fair distribution of suggestions per provider + * relative to what each provider has available. Resulting in: + * 6 from Alpha + * 1 from Bravo + * 6 from Gamma + * + */ + + assertEquals( 6, countSuggestions("alpha", searchResponse)); + assertEquals( 1, countSuggestions("bravo", searchResponse)); + assertEquals( 6, countSuggestions("gamma", searchResponse)); + + } + + @Test + public void testSearch_search_when_ThreeSearchProviders_wantedMoreSuggestionsThanAvailable() throws IOException { + + // mock env setup + + initializeSearchMocks(getSearchRequestJson("vnfs",13)); + + SearchProviderRegistry searchProviderRegistry = new SearchProviderRegistry(); + + AlphaSearchProvider alpha = new AlphaSearchProvider(); + BravoSearchProvider bravo = new BravoSearchProvider(); + GammaSearchProvider gamma = new GammaSearchProvider(); + + searchProviderRegistry.addSearchProvider(alpha); + searchProviderRegistry.addSearchProvider(bravo); + searchProviderRegistry.addSearchProvider(gamma); + + unifiedSearchProcessor.setSearchProviderRegistry(searchProviderRegistry); + + addSuggestions(1,"alpha",alpha); + addSuggestions(4,"bravo",bravo); + addSuggestions(0,"gamma",gamma); + + // method under test + unifiedSearchProcessor.search(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + MockSearchResponse searchResponse = mapper.readValue(entityPayload, MockSearchResponse.class); + + assertEquals(5, searchResponse.getTotalFound()); + assertEquals(5, searchResponse.getSuggestions().size()); + + assertEquals( 1, countSuggestions("alpha", searchResponse)); + assertEquals( 4, countSuggestions("bravo", searchResponse)); + assertEquals( 0, countSuggestions("gamma", searchResponse)); + + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/config/SuggestionConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/config/SuggestionConfigTest.java new file mode 100644 index 0000000..864b7ff --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/config/SuggestionConfigTest.java @@ -0,0 +1,42 @@ +package org.onap.aai.sparky.search.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.HashMap; + +import org.junit.Before; +import org.junit.Test; +public class SuggestionConfigTest { + + + private SuggestionConfig suggestionsConfig; + private ArrayList<String> stopWords; + private HashMap<String,String> pairs; + @Before + public void init() throws Exception{ + + suggestionsConfig = new SuggestionConfig(); + stopWords = new ArrayList<String>(); + pairs = new HashMap<String,String>(); + + + } + + @Test + public void updateValues(){ + + suggestionsConfig.setStopWords(stopWords); + assertNotNull(suggestionsConfig.getStopWords()); + suggestionsConfig.setPairingList(pairs); + assertNotNull(suggestionsConfig.getPairingList()); + suggestionsConfig.setDefaultPairingValue("with"); + assertNotNull(suggestionsConfig.getDefaultPairingValue()); + suggestionsConfig.setViSuggestionRoute("schema"); + assertNotNull(suggestionsConfig.getViSuggestionRoute()); + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/entity/MockSearchResponse.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/entity/MockSearchResponse.java new file mode 100644 index 0000000..02a1aee --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/entity/MockSearchResponse.java @@ -0,0 +1,71 @@ +package org.onap.aai.sparky.search.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.onap.aai.sparky.common.search.CommonSearchSuggestion; + +public class MockSearchResponse { + private long processingTimeInMs; + private int totalFound; + + private List<CommonSearchSuggestion> suggestions; + + /** + * Instantiates a new search response. + */ + public MockSearchResponse() { + this.suggestions = new ArrayList<CommonSearchSuggestion>(); + this.processingTimeInMs = 0; + this.totalFound = 0; + } + + public long getProcessingTimeInMs() { + return processingTimeInMs; + } + + public void setProcessingTimeInMs(long processingTimeInMs) { + this.processingTimeInMs = processingTimeInMs; + } + + public int getTotalFound() { + return totalFound; + } + + public void setTotalFound(int totalFound) { + this.totalFound = totalFound; + } + + public List<CommonSearchSuggestion> getSuggestions() { + return suggestions; + } + + public void setSuggestions(List<CommonSearchSuggestion> suggestions) { + this.suggestions = suggestions; + } + + /** + * Adds the entity entry. + * + * @param suggestionEntry that will be converted to JSON + */ + public void addSuggestion(CommonSearchSuggestion suggestionEntity){ + suggestions.add(suggestionEntity); + } + + /** + * Increments the total number of hits for this SearchResponse by + * the value passed in. + * + * @param additionalCount - Count to increment the total found + */ + public void addToTotalFound(int additionalCount) { + totalFound += additionalCount; + } + + @Override + public String toString() { + return "DummySearchResponse [processingTimeInMs=" + processingTimeInMs + ", totalFound=" + + totalFound + ", suggestions=" + suggestions + "]"; + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilterProcessorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilterProcessorTest.java new file mode 100644 index 0000000..0644a53 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilterProcessorTest.java @@ -0,0 +1,374 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search.filters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonObject; +import javax.json.JsonReader; +import javax.json.stream.JsonParsingException; +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.Message; +import org.apache.camel.component.restlet.RestletConstants; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.impl.DefaultExchange; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.search.filters.FilterProcessor; +import org.onap.aai.sparky.search.filters.FilteredSearchHelper; +import org.onap.aai.sparky.search.filters.config.UiFilterConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterListItemConfig; +import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig; +import org.onap.aai.sparky.search.filters.config.FiltersDetailsConfig; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.search.filters.config.UiViewListItemConfig; +import org.onap.aai.sparky.search.filters.entity.DiscoverFiltersRequest; +import org.onap.aai.sparky.search.filters.entity.ViewConfiguration; +import org.onap.aai.sparky.search.filters.entity.ViewFilter; +import org.onap.aai.sparky.util.HttpServletHelper; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.SparkyTestConstants; +import org.onap.aai.sparky.search.filters.config.FiltersForViewsConfig; +import org.restlet.Request; +import org.restlet.Response; +import org.restlet.data.MediaType; +import org.restlet.data.Status; +import org.springframework.core.io.DefaultResourceLoader; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class FilterProcessorTest { + + private FilterProcessor filterProcessor; + private FilteredSearchHelper filteredSearchHelper; + private ObjectMapper mapper; + private RestEndpointConfig restEndpointConfig; + + private Exchange exchange; + private CamelContext camelContext; + private Message mockRequestMessage; + + + protected UiViewListItemConfig generateViewConfig(ViewConfiguration viewConfig) { + + UiViewListItemConfig uiViewConfig = new UiViewListItemConfig(viewConfig.getViewName(), null); + + List<UiFilterListItemConfig> filters = new ArrayList<UiFilterListItemConfig>(); + + for (ViewFilter viewFilter : viewConfig.getViewFilters()) { + filters.add(new UiFilterListItemConfig(viewFilter.getId(), viewFilter.getDefaultValue())); + } + + uiViewConfig.setListOfFilters(filters); + + return uiViewConfig; + } + + protected FiltersConfig generateDefaultViewsFilterConfig() { + + FiltersForViewsConfig uiViewsConfig = new FiltersForViewsConfig(); + FiltersConfig viewFilterConfig = new FiltersConfig(); + + List<UiViewListItemConfig> views = new ArrayList<UiViewListItemConfig>(); + + // Default filter value to use below + UiFilterOptionsValuesConfig defaultValue = new UiFilterOptionsValuesConfig("Today", "Last 0 hours"); + + // VNF View - 4 Filters + ViewConfiguration vnfViewConfig = new ViewConfiguration("vnfSearch"); + vnfViewConfig.addViewFilter(new ViewFilter("1", null)); + vnfViewConfig.addViewFilter(new ViewFilter("2", null)); + vnfViewConfig.addViewFilter(new ViewFilter("7", null)); + vnfViewConfig.addViewFilter(new ViewFilter("8", null)); + views.add(generateViewConfig(vnfViewConfig)); + + // View and Inspect (Schema) - 0 Filters + ViewConfiguration viewInspectConfig = new ViewConfiguration("viewInspect"); + views.add(generateViewConfig(viewInspectConfig)); + + // Data Integrity - 4 Filters + ViewConfiguration dataIntegrityConfig = new ViewConfiguration("dataIntegrity"); + dataIntegrityConfig.addViewFilter(new ViewFilter("3", null)); + dataIntegrityConfig.addViewFilter(new ViewFilter("4", null)); + dataIntegrityConfig.addViewFilter(new ViewFilter("5", defaultValue)); + dataIntegrityConfig.addViewFilter(new ViewFilter("6", null)); + views.add(generateViewConfig(dataIntegrityConfig)); + + uiViewsConfig.setViews(views); + + viewFilterConfig.setViewsConfig(uiViewsConfig); + + List<UiFilterOptionsValuesConfig> optionsValues = new ArrayList<UiFilterOptionsValuesConfig>(); + optionsValues.add(new UiFilterOptionsValuesConfig("Today", "Last 0 hours")); + optionsValues.add(new UiFilterOptionsValuesConfig("Since Yesterday", "Last 24 hours")); + optionsValues.add(new UiFilterOptionsValuesConfig("Since Last Week", "Last 7 days")); + optionsValues.add(new UiFilterOptionsValuesConfig("Since Last Month", "Last 30 days")); + optionsValues.add(new UiFilterOptionsValuesConfig("Custom Range", "Custom Range")); + + + + FiltersDetailsConfig uiFiltersConfig = new FiltersDetailsConfig(); + + List<UiFilterConfig> uiFilterConfig = new ArrayList<UiFilterConfig>(); + uiFilterConfig.add(new UiFilterConfig("1", "filterName1", "Display Name 1", "dropDown", "false", "Any 1", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("2", "filterName2", "Display Name 2", "dropDown", "false", "Any 2", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("3", "filterName3", "Display Name 3", "dropDown", "false", "Any 3", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("4", "filterName4", "Display Name 4", "dropDown", "false", "Any 4", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("5", "filterName5", "Display Name 5", "date", "false", "Any 5", defaultValue, "dynamicOptions", optionsValues, null)); + uiFilterConfig.add(new UiFilterConfig("6", "filterName6", "Display Name 6", "dropDown", "false", "Any 6", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("7", "filterName7", "Display Name 7", "dropDown", "false", "Any 7", null, "options", null, null)); + uiFilterConfig.add(new UiFilterConfig("8", "filterName8", "Display Name 8", "dropDown", "false", "Any 8", null, "options", null, null)); + + uiFiltersConfig.setFilters(uiFilterConfig); + + viewFilterConfig.setFiltersConfig(uiFiltersConfig); + + return viewFilterConfig; + } + + @Before + public void init()throws RestClientConstructionException { + + camelContext = new DefaultCamelContext(); + exchange = new DefaultExchange(camelContext); + + mockRequestMessage = Mockito.mock(Message.class); + + exchange.setIn(mockRequestMessage); + + //TODO-> edit the following:FilteredSearchHelper & FilterProcessor to pass in the correct parameters + restEndpointConfig = new RestEndpointConfig(); + restEndpointConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_BASIC); + filteredSearchHelper = new FilteredSearchHelper(new FiltersConfig(), + new FilterElasticSearchAdapter(new ElasticSearchAdapter(restEndpointConfig))); + filterProcessor = new FilterProcessor(); + + mapper = new ObjectMapper(); + } + + + @Test + public void validateDefaultConstructor() { + assertNotNull(filterProcessor.getMapper()); + assertNull(filterProcessor.getFilteredSearchHelper()); + } + + private void verifyResponseAndNumFiltersForBadRequest(Exchange exchange, Status expectedStatus, int numExpectedFilters) + throws JsonParseException, JsonMappingException, IOException { + + assertEquals(expectedStatus.getCode(), exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + JsonReader jsonReader = Json.createReader(new StringReader(entityPayload)); + JsonObject responsePayload = jsonReader.readObject(); + + JsonObject filters = responsePayload.getJsonObject("filters"); + assertEquals(0, filters.size()); + } + + private void initializeMocks(String requestPayload) throws IOException { + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + if ( requestPayload != null) { + HttpServletHelper.setRequestPayload(request, "application/json", requestPayload); + } + + Mockito.when(request.getRequestURI()).thenReturn("fakeUri"); + Mockito.when(request.getLocalPort()).thenReturn(8001); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(HttpServletRequest.class))).thenReturn(request); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(String.class))).thenReturn(requestPayload); + + SparkyResourceLoader resourceLoader = new SparkyResourceLoader(); + resourceLoader.setResourceLoader(new DefaultResourceLoader()); + + filterProcessor.setFilteredSearchHelper(filteredSearchHelper); + + FiltersConfig filtersConfig = new FiltersConfig(); + filtersConfig.initializeFiltersDetailsConfig(resourceLoader.getResourceAsFile(SparkyTestConstants.FILTERS_JSON_FILE, false)); + filtersConfig.initializeFiltersForViewsConfig(resourceLoader.getResourceAsFile(SparkyTestConstants.VIEWS_JSON_FILE, false)); + + filteredSearchHelper.setFiltersConfig(filtersConfig); + + } + + + @Test + public void testGetFiltersWithValues_success_path() throws IOException { + + // Initialize for call against 'vnfSearch' + DiscoverFiltersRequest vnfSearchrequest = new DiscoverFiltersRequest(); + vnfSearchrequest.setViewName("vnfSearch"); + + initializeMocks(NodeUtils.convertObjectToJson(vnfSearchrequest, false)); + + // Test call against 'vnfSearch' + filterProcessor.getFiltersWithValues(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + String entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + JsonReader vnfJsonReader = Json.createReader(new StringReader(entityPayload)); + JsonObject vnfResponsePayload = vnfJsonReader.readObject(); + + JsonObject vnfFilters = vnfResponsePayload.getJsonObject("filters"); + assertNotNull(vnfFilters); + assertEquals(0, vnfFilters.size()); + + //JsonObject filterOne = vnfFilters.getJsonObject("1"); + //assertNotNull(filterOne); + //assertEquals("Display Name 1", filterOne.getString("label")); + + JsonObject filterEight = vnfFilters.getJsonObject("8"); + //assertNotNull(filterEight); + // JsonObject eightInnerControl = filterEight.getJsonObject("controls").getJsonObject("filterName8"); + // assertEquals(4, eightInnerControl.size()); + //assertEquals("dropDown", eightInnerControl.getString("type")); + //assertEquals("false", eightInnerControl.getString("multiSelect")); + //assertEquals("Any 8", eightInnerControl.getString("watermark")); + //assertEquals(0, eightInnerControl.getJsonArray("options").size()); + + // Initialize for call against 'dataIntegrity' + DiscoverFiltersRequest dataIntegrityRequest = new DiscoverFiltersRequest(); + dataIntegrityRequest.setViewName("dataIntegrity"); + + initializeMocks(NodeUtils.convertObjectToJson(dataIntegrityRequest, false)); + + // Test call against 'dataIntegrity' + filterProcessor.getFiltersWithValues(exchange); + + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + entityPayload = exchange.getOut().getBody(String.class); + assertNotNull(entityPayload); + + JsonReader dIJsonReader = Json.createReader(new StringReader(entityPayload)); + JsonObject dIResponsePayload = dIJsonReader.readObject(); + + JsonObject dIFilters = dIResponsePayload.getJsonObject("filters"); + assertNotNull(dIFilters); + // assertEquals(4, dIFilters.size()); + + //JsonObject filterFour = dIFilters.getJsonObject("4"); + //// assertNotNull(filterFour); + // assertEquals("Display Name 4", filterFour.getString("label")); + + JsonObject filterFive = dIFilters.getJsonObject("5"); + //assertNotNull(filterFive); + //JsonObject fiveInnerControl = filterFive.getJsonObject("controls").getJsonObject("filterName5"); + //assertEquals(5, fiveInnerControl.size()); + //assertEquals("date", fiveInnerControl.getString("type")); + // assertEquals("false", fiveInnerControl.getString("multiSelect")); + //assertEquals("Any 5", fiveInnerControl.getString("watermark")); + // JsonArray dynamicOptions = fiveInnerControl.getJsonArray("dynamicOptions"); + //assertEquals(5, dynamicOptions.size()); + //JsonObject today = dynamicOptions.getJsonObject(0); + //assertEquals("Today", today.getString("decode")); + } + + @Test + public void testGetFiltersWithValues_viewNameNull() throws IOException { + + DiscoverFiltersRequest discoverFiltersRequest = new DiscoverFiltersRequest(); + initializeMocks(NodeUtils.convertObjectToJson(discoverFiltersRequest, false)); + + // Method under test + filterProcessor.getFiltersWithValues(exchange); + + verifyResponseAndNumFiltersForBadRequest(exchange, Status.CLIENT_ERROR_NOT_FOUND, 0); + } + + @Test + public void testGetFiltersWithValues_viewNameEmptyString() throws IOException { + + DiscoverFiltersRequest discoverFiltersRequest = new DiscoverFiltersRequest(); + discoverFiltersRequest.setViewName(""); + + initializeMocks(NodeUtils.convertObjectToJson(discoverFiltersRequest, false)); + + // Method under test + filterProcessor.getFiltersWithValues(exchange); + + verifyResponseAndNumFiltersForBadRequest(exchange, Status.CLIENT_ERROR_NOT_FOUND, 0); + } + + @Test + public void testGetFiltersWithValues_requestPayloadIsNull() throws IOException { + + initializeMocks(null); + + // Method under test + filterProcessor.getFiltersWithValues(exchange); + + verifyResponseAndNumFiltersForBadRequest(exchange, Status.CLIENT_ERROR_NOT_FOUND, 0); + } + + @Test + public void testGetFiltersWithValues_requestPayloadIsEmptyString() throws IOException { + + initializeMocks(""); + + // Method under test + filterProcessor.getFiltersWithValues(exchange); + + verifyResponseAndNumFiltersForBadRequest(exchange, Status.CLIENT_ERROR_NOT_FOUND, 0); + } + + @Test(expected=JsonParsingException.class) + public void testGetFiltersWithValues_requestPayloadCausesException() throws IOException { + + initializeMocks("{"); + + // Method under test + filterProcessor.getFiltersWithValues(exchange); + + verifyResponseAndNumFiltersForBadRequest(exchange, Status.SERVER_ERROR_INTERNAL, 0); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilteredSearchHelperTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilteredSearchHelperTest.java new file mode 100644 index 0000000..56afcf9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/FilteredSearchHelperTest.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search.filters; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.onap.aai.sparky.search.filters.FilteredSearchHelper; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +/* +public class FilteredSearchHelperTest { + + private static FilteredSearchHelper filteredSearchHelper; + + @BeforeClass + public static void init() throws IOException { + FiltersConfig config = FiltersConfig.getInstance(); + config.setFilterMappingsFileName("src/test/resources/filters/aaiui_views.json"); + config.setFiltersFileName("src/test/resources/filters/aaiui_filters.json"); + System.out.println("SETTING UIVIEWSCONFIG"); + config.setViewsConfig(config.readUiViewsConfig()); + System.out.println("SETTING UIFILTERSCONFIG"); + config.setFiltersConfig(config.readUiFiltersConfig()); + + filteredSearchHelper = new FilteredSearchHelper(config); + } + + @Test + public void testDoFilterDiscovery_validViewName() { + assertEquals(4, filteredSearchHelper.doFilterDiscovery("vnfSearch").getFilters().size()); + assertEquals(4, filteredSearchHelper.doFilterDiscovery("dataIntegrity").getFilters().size()); + } + + @Test + public void testDoFilterDiscovery_invalidViewName_nameGiven() { + assertEquals(0, filteredSearchHelper.doFilterDiscovery("InvalidViewName").getFilters().size()); + } + + @Test + public void testDoFilterDiscovery_invalidViewName_emptyString() { + assertEquals(0, filteredSearchHelper.doFilterDiscovery("").getFilters().size()); + } +}*/ diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersConfigTest.java new file mode 100644 index 0000000..54ebe99 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersConfigTest.java @@ -0,0 +1,47 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class FiltersConfigTest { + + private FiltersConfig filtersConfig; + private FiltersForViewsConfig viewsConfig; + private FiltersDetailsConfig filtersDetailsConfig; + + @Before + public void init() throws Exception { + filtersConfig = new FiltersConfig(); + viewsConfig = new FiltersForViewsConfig(); + filtersDetailsConfig = new FiltersDetailsConfig(); + + + } + + + @Test + public void updateValues() { + + filtersConfig.setViewsFileName(""); + assertNotNull(filtersConfig.getViewsFileName()); + filtersConfig.setFiltersFileName(""); + assertNotNull(filtersConfig.getFiltersFileName()); + filtersConfig.setViewsConfig(viewsConfig); + assertNotNull(filtersConfig.getViewsConfig()); + filtersConfig.setFiltersConfig(filtersDetailsConfig); + assertNotNull(filtersConfig.getFiltersConfig()); + assertNull(filtersConfig.getFilterById("")); + assertNull(filtersConfig.readUiViewsConfig()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfigTest.java new file mode 100644 index 0000000..3bf4842 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersDetailsConfigTest.java @@ -0,0 +1,40 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class FiltersDetailsConfigTest { + + private FiltersDetailsConfig filtersDetailsConfig; + private ArrayList<UiFilterConfig> filters; + private FiltersDetailsConfig filtersDetailsConfigs; + + @Before + public void init() throws Exception { + filtersDetailsConfig = new FiltersDetailsConfig(); + filters = new ArrayList<UiFilterConfig>(); + filtersDetailsConfigs = new FiltersDetailsConfig(filters); + + } + + + @Test + public void updateValues() { + + filtersDetailsConfig.setFilters(filters); + assertNotNull(filtersDetailsConfig.getFilters()); + assertNotNull(filtersDetailsConfig.toString()); + + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfigTest.java new file mode 100644 index 0000000..7740610 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/FiltersForViewsConfigTest.java @@ -0,0 +1,40 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class FiltersForViewsConfigTest { + + private FiltersForViewsConfig filtersForView; + private ArrayList<UiViewListItemConfig> views; + private FiltersForViewsConfig filtersForViews; + + @Before + public void init() throws Exception { + filtersForView = new FiltersForViewsConfig(); + views = new ArrayList<UiViewListItemConfig>(); + filtersForViews = new FiltersForViewsConfig(views); + + } + + + @Test + public void updateValues() { + + filtersForView.setViews(views); + assertNotNull(filtersForView.getViews()); + assertNotNull(filtersForView.toString()); + + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterConfigTest.java new file mode 100644 index 0000000..7cd539d --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterConfigTest.java @@ -0,0 +1,57 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class UiFilterConfigTest { + + private UiFilterConfig uiFiltersConfig; + private UiFilterOptionsValuesConfig defaultValue; + private UiFilterDataSourceConfig dataSource; + private ArrayList<UiFilterOptionsValuesConfig> optionValues; + + + @Before + public void init() throws Exception { + defaultValue = new UiFilterOptionsValuesConfig("",""); + dataSource = new UiFilterDataSourceConfig(); + optionValues = new ArrayList<UiFilterOptionsValuesConfig>(); + uiFiltersConfig = new UiFilterConfig("","","","","","",defaultValue,"",optionValues,dataSource); + + + + } + + + @Test + public void updateValues() { + + uiFiltersConfig.setFilterId(""); + assertNotNull(uiFiltersConfig.getFilterId()); + uiFiltersConfig.setFilterName(""); + assertNotNull(uiFiltersConfig.getFilterName()); + uiFiltersConfig.setDisplayName(""); + assertNotNull(uiFiltersConfig.getDisplayName()); + uiFiltersConfig.setDataType(""); + assertNotNull(uiFiltersConfig.getDataType()); + uiFiltersConfig.setMultiSelect(""); + assertNotNull(uiFiltersConfig.getMultiSelect()); + uiFiltersConfig.setWatermark(""); + assertNotNull(uiFiltersConfig.getWatermark()); + uiFiltersConfig.setOptionsType(""); + assertNotNull(uiFiltersConfig.getOptionsType()); + uiFiltersConfig.setDataSource(dataSource); + assertNotNull(uiFiltersConfig.getDataSource()); + uiFiltersConfig.setOptionsValues(optionValues); + assertNotNull(uiFiltersConfig.getOptionsValues()); + uiFiltersConfig.setDefaultValue(defaultValue); + assertNotNull(uiFiltersConfig.getDefaultValue()); + assertNotNull(uiFiltersConfig.toString()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfigTest.java new file mode 100644 index 0000000..e4402cf --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterDataSourceConfigTest.java @@ -0,0 +1,44 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class UiFilterDataSourceConfigTest { + + private UiFilterDataSourceConfig uiFiltersDataSourceConfig; + private UiFilterDataSourceConfig uiFiltersDataSourceConfigs; + + + @Before + public void init() throws Exception { + + uiFiltersDataSourceConfig = new UiFilterDataSourceConfig(); + uiFiltersDataSourceConfigs = new UiFilterDataSourceConfig("","","",""); + + + + } + + + @Test + public void updateValues() { + + uiFiltersDataSourceConfig.setIndexName(""); + assertNotNull(uiFiltersDataSourceConfig.getIndexName()); + uiFiltersDataSourceConfig.setDocType(""); + assertNotNull(uiFiltersDataSourceConfig.getDocType()); + uiFiltersDataSourceConfig.setFieldName(""); + assertNotNull(uiFiltersDataSourceConfig.getFieldName()); + uiFiltersDataSourceConfig.setPathToField(""); + assertNotNull(uiFiltersDataSourceConfig.getPathToField()); + assertNotNull(uiFiltersDataSourceConfig.toString()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfigTest.java new file mode 100644 index 0000000..48e7955 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterListItemConfigTest.java @@ -0,0 +1,34 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class UiFilterListItemConfigTest { + + private UiFilterListItemConfig uiFilterLstItem; + private UiFilterOptionsValuesConfig defaultValue; + + + @Before + public void init() throws Exception { + defaultValue = new UiFilterOptionsValuesConfig("",""); + uiFilterLstItem = new UiFilterListItemConfig("",defaultValue); + + + + } + + + @Test + public void updateValues() { + + uiFilterLstItem.setFilterId(""); + assertNotNull(uiFilterLstItem.getFilterId()); + uiFilterLstItem.setDefaultValue(defaultValue); + assertNotNull(uiFilterLstItem.getDefaultValue()); + assertNotNull(uiFilterLstItem.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfigTest.java new file mode 100644 index 0000000..ca15771 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiFilterOptionsValuesConfigTest.java @@ -0,0 +1,31 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class UiFilterOptionsValuesConfigTest { + + private UiFilterOptionsValuesConfig uiFilterOptionsValues; + + + @Before + public void init() throws Exception { + + uiFilterOptionsValues = new UiFilterOptionsValuesConfig("",""); + + } + + + @Test + public void updateValues() { + + uiFilterOptionsValues.setCode(""); + assertNotNull(uiFilterOptionsValues.getCode()); + uiFilterOptionsValues.setDecode(""); + assertNotNull(uiFilterOptionsValues.getDecode()); + assertNotNull(uiFilterOptionsValues.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfigTest.java new file mode 100644 index 0000000..39b919f --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/config/UiViewListItemConfigTest.java @@ -0,0 +1,38 @@ +package org.onap.aai.sparky.search.filters.config; + +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class UiViewListItemConfigTest { + + private ArrayList<UiFilterListItemConfig> uiFilterLstItem; + private UiViewListItemConfig uiViewLstItem; + + + @Before + public void init() throws Exception { + + uiFilterLstItem = new ArrayList<UiFilterListItemConfig>(); + uiViewLstItem = new UiViewListItemConfig("",uiFilterLstItem); + + + + } + + + @Test + public void updateValues() { + + uiViewLstItem.setViewName(""); + assertNotNull(uiViewLstItem.getViewName()); + uiViewLstItem.setListOfFilters(uiFilterLstItem); + assertNotNull(uiViewLstItem.getFilters()); + assertNotNull(uiViewLstItem.toString()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/DiscoverFiltersRequest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/DiscoverFiltersRequest.java new file mode 100644 index 0000000..336e3b1 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/DiscoverFiltersRequest.java @@ -0,0 +1,40 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search.filters.entity; + +public class DiscoverFiltersRequest { + + private String viewName; + + public String getViewName() { + return viewName; + } + + public void setViewName(String viewName) { + this.viewName = viewName; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewConfiguration.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewConfiguration.java new file mode 100644 index 0000000..f6c352b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewConfiguration.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search.filters.entity; + +import java.util.ArrayList; +import java.util.List; + +public class ViewConfiguration { + + private String viewName; + private List<ViewFilter> viewFilters; + + public ViewConfiguration() { + viewFilters = new ArrayList<ViewFilter>(); + } + + public ViewConfiguration(String viewName) { + viewFilters = new ArrayList<ViewFilter>(); + this.setViewName(viewName); + } + + public String getViewName() { + return viewName; + } + + public void setViewName(String viewName) { + this.viewName = viewName; + } + + public List<ViewFilter> getViewFilters() { + return viewFilters; + } + + public void setViewFilters(List<ViewFilter> viewFilters) { + this.viewFilters = viewFilters; + } + + public void addViewFilter(ViewFilter viewFilter) { + if (viewFilters != null) { + if (!viewFilters.contains(viewFilter)) { + viewFilters.add(viewFilter); + } + } + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewFilter.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewFilter.java new file mode 100644 index 0000000..94832d5 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/search/filters/entity/ViewFilter.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.search.filters.entity; + +import org.onap.aai.sparky.search.filters.config.UiFilterOptionsValuesConfig; + +public class ViewFilter { + + private String id; + private UiFilterOptionsValuesConfig defaultValue; + + public ViewFilter() {} + + public ViewFilter(String id, UiFilterOptionsValuesConfig defaultValue) { + this.id = id; + this.defaultValue = defaultValue; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public UiFilterOptionsValuesConfig getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(UiFilterOptionsValuesConfig defaultValue) { + this.defaultValue = defaultValue; + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/SecurityContextFactoryImplTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/SecurityContextFactoryImplTest.java new file mode 100644 index 0000000..0c3a8ce --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/SecurityContextFactoryImplTest.java @@ -0,0 +1,143 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.security; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.FileInputStream; + +import javax.net.ssl.SSLContext; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.sparky.security.SecurityContextFactory; +import org.onap.aai.sparky.security.SecurityContextFactoryImpl; +import org.onap.aai.sparky.util.LogValidator; + +import ch.qos.logback.classic.Level; + +/** + * The Class SecurityContextFactoryImplTest. + */ +public class SecurityContextFactoryImplTest { + + private LogValidator logValidator; + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + logValidator = new LogValidator(); + logValidator.initializeLogger(Level.WARN); + } + + /** + * Basic construction test. + * + * @throws Exception the exception + */ + @Test + public void basicConstructionTest() throws Exception { + + SecurityContextFactory sslContextFactory = new SecurityContextFactoryImpl(); + + assertEquals("TLS", sslContextFactory.getSslAlgorithm()); + assertEquals("SunX509", sslContextFactory.getKeyManagerAlgortihm()); + assertEquals("PKCS12", sslContextFactory.getKeyStoreType()); + assertEquals(false, sslContextFactory.isServerCertificationChainValidationEnabled()); + assertEquals(null, sslContextFactory.getClientCertFileInputStream()); + } + + /** + * Validate secure context. + * + * @throws Exception the exception + */ + @Test + public void validateSecureContext() throws Exception { + + SecurityContextFactory sslContextFactory = new SecurityContextFactoryImpl(); + + SSLContext sslContext = sslContextFactory.getSecureContext(); + + assertNotNull(sslContext); + } + + /** + * Validate secure context with server cert chain validation. + * + * @throws Exception the exception + */ + @Test + public void validateSecureContext_withServerCertChainValidation() throws Exception { + + SecurityContextFactory sslContextFactory = new SecurityContextFactoryImpl(); + sslContextFactory.setServerCertificationChainValidationEnabled(true); + sslContextFactory.setTrustStoreFileName("filename"); + + sslContextFactory.setClientCertFileName(null); + + SSLContext sslContext = sslContextFactory.getSecureContext(); + + assertNotNull(sslContext); + } + + /** + * Validate accessors. + * + * @throws Exception the exception + */ + @Test + public void validateAccessors() throws Exception { + + SecurityContextFactory sslContextFactory = new SecurityContextFactoryImpl(); + + FileInputStream mockInputStream = Mockito.mock(FileInputStream.class); + + sslContextFactory.setSslAlgorithm("sslAlgorithm"); + sslContextFactory.setKeyManagerAlgortihm("keyManagerAlgorithm"); + sslContextFactory.setKeyStoreType("keyStoreType"); + sslContextFactory.setClientCertFileInputStream(mockInputStream); + sslContextFactory.setServerCertificationChainValidationEnabled(true); + sslContextFactory.setTrustStoreFileName("truststoreFileName"); + sslContextFactory.setClientCertPassword("password"); + + assertEquals("sslAlgorithm", sslContextFactory.getSslAlgorithm()); + assertEquals("keyManagerAlgorithm", sslContextFactory.getKeyManagerAlgortihm()); + assertEquals("keyStoreType", sslContextFactory.getKeyStoreType()); + assertEquals(mockInputStream, sslContextFactory.getClientCertFileInputStream()); + assertEquals(true, sslContextFactory.isServerCertificationChainValidationEnabled()); + assertEquals("truststoreFileName", sslContextFactory.getTrustStoreFileName()); + assertEquals("password", sslContextFactory.getClientCertPassword()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestPortalRestAPIServiceImpl.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestPortalRestAPIServiceImpl.java new file mode 100644 index 0000000..bdd9093 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestPortalRestAPIServiceImpl.java @@ -0,0 +1,283 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + * + +package org.onap.aai.sparky.security.portal; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.onap.aai.sparky.security.portal.PortalRestAPIServiceImpl; +import org.onap.aai.sparky.security.portal.UserManager; +import org.onap.aai.sparky.security.portal.config.PortalAuthenticationConfig; +import org.onap.aai.sparky.security.portal.config.RolesConfig; +import org.openecomp.portalsdk.core.onboarding.exception.PortalAPIException; +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.reflect.Whitebox; + +@PowerMockIgnore({ "javax.crypto.*" }) +@RunWith(PowerMockRunner.class) +@PrepareForTest({ PortalAuthenticationConfig.class, RolesConfig.class }) +public class TestPortalRestAPIServiceImpl { + + private static File testUsersFile; + private static final String LOGINID_1 = "200"; + private static final String LOGINID_2 = "201"; + private static final String VIEW_ROLE = "View"; + + enum TestData { + // @formatter:off + TEST_USERS ("src/test/resources/portal/test-users.config"), + PORTAL_AUTHENTICATION_PROPERTIES ("src/test/resources/portal/portal-authentication.properties"), + ROLES_CONFIG_FILE ("src/test/resources/portal/roles.config"); + + private String filename; + TestData(String filename) {this.filename = filename;} + public String getFilename() {return this.filename;} + // @formatter:on + } + + @Mock + private UserManager userManager = new UserManager(testUsersFile); + + @InjectMocks + private PortalRestAPIServiceImpl portalApi = new PortalRestAPIServiceImpl(); + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + testUsersFile = Paths.get(TestData.TEST_USERS.getFilename()).toFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + Files.deleteIfExists(testUsersFile.toPath()); + } + + @Before + public void setUp() throws Exception { + Whitebox.setInternalState(RolesConfig.class, "ROLES_CONFIG_FILE", + TestData.ROLES_CONFIG_FILE.getFilename()); + } + + @After + public void tearDown() throws Exception { + Files.deleteIfExists(testUsersFile.toPath()); + } + + @Test + public void testPushAndGetUser() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + + portalApi.pushUser(user); + EcompUser storedUser = portalApi.getUser(user.getLoginId()); + + assertThat(storedUser.getLoginId(), is(user.getLoginId())); + } + + @Test(expected = PortalAPIException.class) + public void testCannotPushUserTwice() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + + portalApi.pushUser(user); + portalApi.pushUser(user); + } + + @Test(expected = PortalAPIException.class) + public void testGetUnknownUser() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + portalApi.pushUser(user); + + portalApi.getUser("does-not-exist"); + } + + @Test + public void testGetUsers() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + + EcompUser user2 = new EcompUser(); + user2.setLoginId(LOGINID_2); + + portalApi.pushUser(user); + portalApi.pushUser(user2); + + List<EcompUser> users = portalApi.getUsers(); + + assertThat(users.size(), is(2)); + assertThat(users.get(0).getLoginId(), is(LOGINID_1)); + assertThat(users.get(1).getLoginId(), is(LOGINID_2)); + } + + @Test + public void testEditUser() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + user.setFirstName("Bob"); + + portalApi.pushUser(user); + + user.setFirstName("Jen"); + portalApi.editUser(LOGINID_1, user); + + assertThat(portalApi.getUser(LOGINID_1).getFirstName(), is("Jen")); + } + + @Test(expected = PortalAPIException.class) + public void testEditUnknowUser() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + portalApi.pushUser(user); + + portalApi.editUser("does-no-exist", new EcompUser()); + } + + @Test + public void testGetRoles() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + user.setRoles(new HashSet<>(portalApi.getAvailableRoles())); + + portalApi.pushUser(user); + + List<EcompRole> userRoles = portalApi.getUserRoles(LOGINID_1); + + assertThat(userRoles.size(), is(1)); + assertThat(userRoles.get(0).getId(), is(1L)); + assertThat(userRoles.get(0).getName(), is(VIEW_ROLE)); + } + + @Test + public void testPushUserRoles() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + portalApi.pushUser(user); + + EcompUser storedUser = portalApi.getUser(LOGINID_1); + assertThat(storedUser.getRoles(), nullValue()); + + portalApi.pushUserRole(LOGINID_1, UserManager.getRoles()); + + Set<EcompRole> storedUserRoles = portalApi.getUser(LOGINID_1).getRoles(); + ArrayList<EcompRole> rolesList = new ArrayList<>(storedUserRoles); + + assertThat(rolesList.size(), is(1)); + assertThat(rolesList.get(0).getId(), is(1L)); + assertThat(rolesList.get(0).getName(), is(VIEW_ROLE)); + } + + @Test + public void testCannotPushRoleTwice() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + portalApi.pushUser(user); + + EcompUser storedUser = portalApi.getUser(LOGINID_1); + assertThat(storedUser.getRoles(), nullValue()); + + portalApi.pushUserRole(LOGINID_1, UserManager.getRoles()); + portalApi.pushUserRole(LOGINID_1, UserManager.getRoles()); + + Set<EcompRole> storedUserRoles = portalApi.getUser(LOGINID_1).getRoles(); + ArrayList<EcompRole> rolesList = new ArrayList<>(storedUserRoles); + + assertThat(rolesList.size(), is(1)); + assertThat(rolesList.get(0).getId(), is(1L)); + assertThat(rolesList.get(0).getName(), is(VIEW_ROLE)); + } + + @Test + public void testDeleteUserRoles() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + user.setFirstName("Bob"); + List<EcompRole> availableRoles = portalApi.getAvailableRoles(); + user.setRoles(new LinkedHashSet<EcompRole>(availableRoles)); + + portalApi.pushUser(user); + + portalApi.pushUserRole(LOGINID_1, new ArrayList<EcompRole>()); + + EcompUser userWithNoRoles = portalApi.getUser(LOGINID_1); + + assertThat(userWithNoRoles.getRoles(), empty()); + } + + @Test + public void testPushNullRoles() throws Exception { + EcompUser user = new EcompUser(); + user.setLoginId(LOGINID_1); + user.setFirstName("Bob"); + List<EcompRole> availableRoles = portalApi.getAvailableRoles(); + user.setRoles(new LinkedHashSet<EcompRole>(availableRoles)); + + portalApi.pushUser(user); + portalApi.pushUserRole(LOGINID_1, null); + + EcompUser userWithNoRoles = portalApi.getUser(LOGINID_1); + + assertThat(userWithNoRoles.getRoles(), empty()); + } + + @Test + public void testIsAppAuthenticated() throws Exception { + Whitebox.setInternalState(PortalAuthenticationConfig.class, "AUTHENTICATION_CONFIG_FILE", + TestData.PORTAL_AUTHENTICATION_PROPERTIES.getFilename()); + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + when(request.getHeader("username")).thenReturn("testuser"); + when(request.getHeader("password")).thenReturn("testpassword"); + + assertThat(portalApi.isAppAuthenticated(request), is(true)); + } +}*/
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestUserManager.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestUserManager.java new file mode 100644 index 0000000..c34854a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/security/portal/TestUserManager.java @@ -0,0 +1,265 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.security.portal; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.stream.Collectors; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.onap.aai.sparky.security.portal.UserManager; +import org.onap.aai.sparky.util.NodeUtils; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; + +@RunWith(PowerMockRunner.class) +// @PrepareForTest(RolesConfig.class) +public class TestUserManager { + + private static final String LOGINID_3 = "3"; + private static File noFile; + private static File concurrentUsers; + private static File concurrentEditUsers; + + private static final Gson GSON = new Gson(); + private static final String LOGINID_1 = "1"; + private static final String LOGINID_2 = "2"; + + private static Logger logger = LoggerFactory.getLogger(TestUserManager.class); + + enum TestData { + // @formatter:off + NO_FILE ("src/test/resources/portal/no-users.config"), + CONCURRENT_USERS ("src/test/resources/portal/concurrent-users.config"), + CONCURRENT_EDIT_USERS ("src/test/resources/portal/concurrent-edit-users.config"); +// ROLES_CONFIG_FILE ("src/test/resources/portal/roles.config"); + + private String filename; + TestData(String filename) {this.filename = filename;} + public String getFilename() {return this.filename;} + // @formatter:on + } + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + noFile = Paths.get(TestData.NO_FILE.getFilename()).toFile(); + concurrentUsers = Paths.get(TestData.CONCURRENT_USERS.getFilename()).toFile(); + concurrentEditUsers = Paths.get(TestData.CONCURRENT_EDIT_USERS.getFilename()).toFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + Files.deleteIfExists(concurrentUsers.toPath()); + Files.deleteIfExists(concurrentEditUsers.toPath()); + } + + @Before + public void setUp() throws Exception { + EcompUser user1 = new EcompUser(); + user1.setLoginId(LOGINID_1); + + EcompUser user2 = new EcompUser(); + user2.setLoginId(LOGINID_2); + + List<EcompUser> users = Arrays.asList(user1, user2); + Files.write(concurrentEditUsers.toPath(), GSON.toJson(users).getBytes()); + + // Whitebox.setInternalState(RolesConfig.class, "ROLES_CONFIG_FILE", + // TestData.ROLES_CONFIG_FILE.getFilename()); + } + + @After + public void tearDown() throws Exception { + Files.deleteIfExists(concurrentUsers.toPath()); + Files.deleteIfExists(concurrentEditUsers.toPath()); + } + + @Test + public void testGetUsersNoFile() throws Exception { + UserManager userManager = new UserManager(noFile); + List<EcompUser> users = userManager.getUsers(); + + assertThat(users, empty()); + } + + @Test + public void testConcurrentPush() throws Exception { + Callable<EcompUser> pushTask = () -> { + return pushTask(concurrentUsers, String.valueOf(NodeUtils.getRandomTxnId())); + }; + + List<Callable<EcompUser>> callables = Arrays.asList(pushTask, pushTask, pushTask, pushTask, + pushTask); + + ExecutorService executor = Executors.newWorkStealingPool(); + executor.invokeAll(callables).stream().map(future -> { + try { + return future.get(); + } catch (Exception e) { + throw new IllegalStateException(e); + } + }); + + UserManager userManager = new UserManager(concurrentUsers); + assertThat(userManager.getUsers().size(), is(5)); + } + + /** + * Concurrent push/edit with sequential retry on failure. + * + * @throws Exception + */ + @Test + public void testConcurrentPushAndEdit() throws Exception { + Callable<EcompUser> pushTaskRandomId = () -> { + return pushTask(concurrentEditUsers, String.valueOf(NodeUtils.getRandomTxnId())); + }; + + Callable<EcompUser> pushTaskId3 = () -> { + return pushTask(concurrentEditUsers, LOGINID_3); + }; + + Callable<EcompUser> editTaskId1 = () -> { + return editTask(LOGINID_1, "Bob"); + }; + + Callable<EcompUser> editTaskId2 = () -> { + return editTask(LOGINID_2, "Jen"); + }; + + Callable<EcompUser> editTaskId3 = () -> { + return editTask(LOGINID_3, "Amy"); + }; + + List<Callable<EcompUser>> callables = Arrays.asList(pushTaskRandomId, pushTaskRandomId, + pushTaskId3, editTaskId1, pushTaskRandomId, pushTaskRandomId, editTaskId3, editTaskId2, + pushTaskRandomId); + + ExecutorService executor = Executors.newWorkStealingPool(); + List<EcompUser> userTasks = executor.invokeAll(callables).stream().map(future -> { + try { + return future.get(); + } catch (Exception e) { + throw new IllegalStateException(e); + } + }).collect(Collectors.toList()); + + assertThat(userTasks.size(), is(9)); + + assertUserPushEdit(concurrentEditUsers); + } + + /** + * Retry push/edit if assert fails following concurrent attempt. + * + * @param userFile + * @throws Exception + */ + private void assertUserPushEdit(File userFile) throws Exception { + UserManager userManager = new UserManager(userFile); + try { + assertThat(userManager.getUsers().size(), is(8)); + } catch (Throwable t) { + int size = userManager.getUsers().size(); + logger.error("Failed to push all users. Only created: " + size + " users. " + t.getMessage()); + pushTask(concurrentEditUsers, String.valueOf(NodeUtils.getRandomTxnId())); + assertThat(userManager.getUsers().size(), is(size + 1)); + } + + try { + assertThat(userManager.getUser(LOGINID_1).get().getFirstName(), is("Bob")); + } catch (Throwable t) { + logger.error("Failed to edit user. " + t.getMessage()); + retryEdit(userManager, LOGINID_1, "Bob"); + } + + try { + assertThat(userManager.getUser(LOGINID_2).get().getFirstName(), is("Jen")); + } catch (Throwable t) { + logger.error("Failed to edit user. " + t.getMessage()); + retryEdit(userManager, LOGINID_2, "Jen"); + } + + try { + assertThat(userManager.getUser(LOGINID_3).isPresent(), is(true)); + } catch (Throwable t) { + logger.error("Failed to push user. " + t.getMessage()); + pushTask(concurrentEditUsers, LOGINID_3); + assertThat(userManager.getUser(LOGINID_3).isPresent(), is(true)); + } + + try { + assertThat(userManager.getUser(LOGINID_3).get().getFirstName(), is("Amy")); + } catch (Throwable t) { + logger.error("Failed to edit user. " + t.getMessage()); + retryEdit(userManager, LOGINID_3, "Amy"); + } + } + + private void retryEdit(UserManager userManager, String loginId, String firstName) + throws IOException { + editTask(loginId, firstName); + assertThat(userManager.getUser(loginId).get().getFirstName(), is(firstName)); + } + + private EcompUser pushTask(File fileStore, String loginId) throws IOException { + UserManager userManager = new UserManager(fileStore); + EcompUser user = new EcompUser(); + user.setLoginId(loginId); + userManager.pushUser(user); + return user; + } + + private EcompUser editTask(String loginId, String firstName) throws IOException { + UserManager userManager = new UserManager(concurrentEditUsers); + EcompUser user = new EcompUser(); + user.setLoginId(loginId); + user.setFirstName(firstName); + userManager.editUser(loginId, user); + return user; + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/config/SubscriptionConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/config/SubscriptionConfigTest.java new file mode 100644 index 0000000..6aaca1a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/config/SubscriptionConfigTest.java @@ -0,0 +1,54 @@ +package org.onap.aai.sparky.subscription.config; + + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + + +public class SubscriptionConfigTest { + + + + private SubscriptionConfig subscriptionConfig; + private ArrayList<String> annEntities; + + @Before + public void init() throws Exception { + subscriptionConfig = new SubscriptionConfig(); + annEntities = new ArrayList<String>(); + + } + + + @Test + public void updateValues() { + + subscriptionConfig.setSubscriptionTarget("portal"); + assertNotNull(subscriptionConfig.getSubscriptionTarget()); + subscriptionConfig.setSubscriptionOrigin("sparky"); + assertNotNull(subscriptionConfig.getSubscriptionOrigin()); + subscriptionConfig.setSubscriptionMessageType("subscribe"); + assertNotNull(subscriptionConfig.getSubscriptionMessageType()); + subscriptionConfig.setSubscriptionTopic("portal-dispalyNotification"); + assertNotNull(subscriptionConfig.getSubscriptionTopic()); + subscriptionConfig.setAnnEntitiyTypes(annEntities); + assertNotNull(subscriptionConfig.getAnnEntitiyTypes()); + subscriptionConfig.setLaunchOITarget("portal"); + assertNotNull(subscriptionConfig.getLaunchOITarget()); + subscriptionConfig.setLaunchOIOrigin("sparky"); + assertNotNull(subscriptionConfig.getLaunchOIOrigin()); + subscriptionConfig.setLaunchOIMessageType("publish"); + assertNotNull(subscriptionConfig.getLaunchOIMessageType()); + subscriptionConfig.setLaunchOITopic("portal-displayNotification"); + assertNotNull(subscriptionConfig.getLaunchOITopic()); + subscriptionConfig.setIsLaunchOIEnabled(true); + assertTrue(subscriptionConfig.getIsLaunchOIEnabled()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/MessageTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/MessageTest.java new file mode 100644 index 0000000..bd3297e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/MessageTest.java @@ -0,0 +1,32 @@ +package org.onap.aai.sparky.subscription.payload.entity; + +import static org.junit.Assert.assertNotNull; + + +import org.junit.Before; +import org.junit.Test; + +public class MessageTest { + + private Message message; + private Payload payLoad; + + @Before + public void init() throws Exception { + message = new Message(); + payLoad = new Payload(); + + } + + + @Test + public void updateValues() { + + message.setApplicationName("Network Navigator"); + assertNotNull(message.getApplicationName()); + message.setPayload(payLoad); + assertNotNull(message.getPayload()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayloadTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayloadTest.java new file mode 100644 index 0000000..1bbddcb --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ObjectInspectorPayloadTest.java @@ -0,0 +1,43 @@ +package org.onap.aai.sparky.subscription.payload.entity; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; + +public class ObjectInspectorPayloadTest { + + private ObjectInspectorPayload objectInspectorPayload; + private Message message; + + @Before + public void init() throws Exception { + objectInspectorPayload = new ObjectInspectorPayload(); + message = new Message(); + + } + + + @Test + public void updateValues() throws JsonParseException, JsonMappingException, IOException { + + objectInspectorPayload.setMessage(message); + assertNotNull(objectInspectorPayload.getMessage()); + objectInspectorPayload.setTopic("sparky"); + assertNotNull(objectInspectorPayload.getTopic()); + objectInspectorPayload.setMessageType("subscribe"); + assertNotNull(objectInspectorPayload.getMessageType()); + objectInspectorPayload.setOrigin("portal-dispalyNotification"); + assertNotNull(objectInspectorPayload.getOrigin()); + objectInspectorPayload.setTarget(""); + assertNotNull(objectInspectorPayload.getTarget()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ParamsTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ParamsTest.java new file mode 100644 index 0000000..eac6f74 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/ParamsTest.java @@ -0,0 +1,30 @@ +package org.onap.aai.sparky.subscription.payload.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class ParamsTest { + + + + private Params params; + + + @Before + public void init() throws Exception { + params = new Params(); + } + + + @Test + public void updateValues() { + + params.setObjectName(""); + assertNotNull(params.getObjectName()); + params.setExternalClassId("222"); + assertNotNull(params.getExternalClassId()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/PayloadTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/PayloadTest.java new file mode 100644 index 0000000..95d43e9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/subscription/payload/entity/PayloadTest.java @@ -0,0 +1,32 @@ +package org.onap.aai.sparky.subscription.payload.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class PayloadTest { + + + private Payload payload; + private Params params; + + @Before + public void init() throws Exception { + payload = new Payload(); + params = new Params(); + + } + + + @Test + public void updateValues() { + + payload.setAction("Launch-tab"); + assertNotNull(payload.getAction()); + payload.setParams(params); + assertNotNull(payload.getParams()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizerTest.java new file mode 100644 index 0000000..3bd4928 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AbstractEntitySynchronizerTest.java @@ -0,0 +1,463 @@ +package org.onap.aai.sparky.sync; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; + + + +public class AbstractEntitySynchronizerTest { + + public class QuantumSynchronizer extends AbstractEntitySynchronizer { + + private boolean amIDone; + private SearchableEntityLookup searchableEntityLookup; + + protected QuantumSynchronizer(Logger logger, String syncName, int numSyncWorkers, int numActiveInventoryWorkers, + int numElasticsearchWorkers, String indexName, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig) throws Exception { + super(logger, syncName, numSyncWorkers, numActiveInventoryWorkers, numElasticsearchWorkers, indexName, + aaiStatConfig, esStatConfig); + + amIDone = false; + + } + + public void setSearchableEntityLookup(SearchableEntityLookup searchableEntityLookup) { + this.searchableEntityLookup = searchableEntityLookup; + } + + public void initCounters() { + if (this.searchableEntityLookup != null) { + this.aaiEntityStats + .intializeEntityCounters(searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + this.esEntityStats + .intializeEntityCounters(searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + + } + } + + public void setSyncDone(boolean done) { + this.amIDone = done; + } + + @Override + protected boolean isSyncDone() { + return amIDone; + } + + } + + private static Logger LOG = LoggerFactory.getInstance().getLogger(AbstractEntitySynchronizerTest.class); + private static SecureRandom secureRandom = new SecureRandom(); + + private QuantumSynchronizer quantumSync; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + private ElasticSearchAdapter esAdapter; + private ActiveInventoryAdapter aaiAdapter; + private SearchableEntityLookup searchableEntityLookup; + + + @Before + public void init() throws Exception { + + aaiStatConfig = new NetworkStatisticsConfig(); + + aaiStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + aaiStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + aaiStatConfig.setBytesHistogramMaxYAxis(1000000L); + aaiStatConfig.setBytesHistogramNumBins(20); + aaiStatConfig.setBytesHistogramNumDecimalPoints(2); + + aaiStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + aaiStatConfig.setQueueLengthHistogramMaxYAxis(20000); + aaiStatConfig.setQueueLengthHistogramNumBins(20); + aaiStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + aaiStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + aaiStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + aaiStatConfig.setTaskAgeHistogramNumBins(20); + aaiStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + aaiStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + aaiStatConfig.setResponseTimeHistogramMaxYAxis(1000L); + aaiStatConfig.setResponseTimeHistogramNumBins(20); + aaiStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + aaiStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + aaiStatConfig.setTpsHistogramMaxYAxis(100); + aaiStatConfig.setTpsHistogramNumBins(20); + aaiStatConfig.setTpsHistogramNumDecimalPoints(2); + + esStatConfig = new NetworkStatisticsConfig(); + + esStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + esStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + esStatConfig.setBytesHistogramMaxYAxis(1000000L); + esStatConfig.setBytesHistogramNumBins(20); + esStatConfig.setBytesHistogramNumDecimalPoints(2); + + esStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + esStatConfig.setQueueLengthHistogramMaxYAxis(20000); + esStatConfig.setQueueLengthHistogramNumBins(20); + esStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + esStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + esStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + esStatConfig.setTaskAgeHistogramNumBins(20); + esStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + esStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + esStatConfig.setResponseTimeHistogramMaxYAxis(10000L); + esStatConfig.setResponseTimeHistogramNumBins(20); + esStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + esStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + esStatConfig.setTpsHistogramMaxYAxis(100); + esStatConfig.setTpsHistogramNumBins(20); + esStatConfig.setTpsHistogramNumDecimalPoints(2); + + esAdapter = Mockito.mock(ElasticSearchAdapter.class); + aaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + + } + + @Test + public void validateNetworkStatConfig() { + + // validate aaiStatConfig + + assertEquals(100, aaiStatConfig.getNumSamplesPerThreadForRunningAverage()); + + assertEquals("[Response Size In Bytes]",aaiStatConfig.getBytesHistogramLabel()); + assertEquals(1000000L,aaiStatConfig.getBytesHistogramMaxYAxis()); + assertEquals(20,aaiStatConfig.getBytesHistogramNumBins()); + assertEquals(2,aaiStatConfig.getBytesHistogramNumDecimalPoints()); + + assertEquals("[Queue Item Length]",aaiStatConfig.getQueueLengthHistogramLabel()); + assertEquals(20000,aaiStatConfig.getQueueLengthHistogramMaxYAxis()); + assertEquals(20,aaiStatConfig.getQueueLengthHistogramNumBins()); + assertEquals(2,aaiStatConfig.getQueueLengthHistogramNumDecimalPoints()); + + assertEquals("[Task Age In Ms]",aaiStatConfig.getTaskAgeHistogramLabel()); + assertEquals(600000L,aaiStatConfig.getTaskAgeHistogramMaxYAxis()); + assertEquals(20,aaiStatConfig.getTaskAgeHistogramNumBins()); + assertEquals(2,aaiStatConfig.getTaskAgeHistogramNumDecimalPoints()); + + assertEquals("[Response Time In Ms]",aaiStatConfig.getResponseTimeHistogramLabel()); + assertEquals(1000L,aaiStatConfig.getResponseTimeHistogramMaxYAxis()); + assertEquals(20,aaiStatConfig.getResponseTimeHistogramNumBins()); + assertEquals(2, aaiStatConfig.getResponseTimeHistogramNumDecimalPoints()); + + assertEquals("[Transactions Per Second]",aaiStatConfig.getTpsHistogramLabel()); + assertEquals(100,aaiStatConfig.getTpsHistogramMaxYAxis()); + assertEquals(20,aaiStatConfig.getTpsHistogramNumBins()); + assertEquals(2,aaiStatConfig.getTpsHistogramNumDecimalPoints()); + + // validate esStatConfig + + assertEquals(100, esStatConfig.getNumSamplesPerThreadForRunningAverage()); + + assertEquals("[Response Size In Bytes]",esStatConfig.getBytesHistogramLabel()); + assertEquals(1000000L,esStatConfig.getBytesHistogramMaxYAxis()); + assertEquals(20,esStatConfig.getBytesHistogramNumBins()); + assertEquals(2,esStatConfig.getBytesHistogramNumDecimalPoints()); + + assertEquals("[Queue Item Length]",esStatConfig.getQueueLengthHistogramLabel()); + assertEquals(20000,esStatConfig.getQueueLengthHistogramMaxYAxis()); + assertEquals(20,esStatConfig.getQueueLengthHistogramNumBins()); + assertEquals(2,esStatConfig.getQueueLengthHistogramNumDecimalPoints()); + + assertEquals("[Task Age In Ms]",esStatConfig.getTaskAgeHistogramLabel()); + assertEquals(600000L,esStatConfig.getTaskAgeHistogramMaxYAxis()); + assertEquals(20,esStatConfig.getTaskAgeHistogramNumBins()); + assertEquals(2,esStatConfig.getTaskAgeHistogramNumDecimalPoints()); + + assertEquals("[Response Time In Ms]",esStatConfig.getResponseTimeHistogramLabel()); + assertEquals(10000L,esStatConfig.getResponseTimeHistogramMaxYAxis()); + assertEquals(20,esStatConfig.getResponseTimeHistogramNumBins()); + assertEquals(2, esStatConfig.getResponseTimeHistogramNumDecimalPoints()); + + assertEquals("[Transactions Per Second]",esStatConfig.getTpsHistogramLabel()); + assertEquals(100,esStatConfig.getTpsHistogramMaxYAxis()); + assertEquals(20,esStatConfig.getTpsHistogramNumBins()); + assertEquals(2,esStatConfig.getTpsHistogramNumDecimalPoints()); + + } + + @Test + public void validateBasicConstruction() throws Exception { + + quantumSync = new QuantumSynchronizer(LOG, "quanumSynchronizer", 5, 5, 5, "quantum-search-index", aaiStatConfig, + esStatConfig); + + quantumSync.setAaiAdapter(aaiAdapter); + quantumSync.setElasticSearchAdapter(esAdapter); + + quantumSync.clearCache(); + + assertNotNull(quantumSync.getAaiAdapter()); + assertNotNull(quantumSync.getElasticSearchAdapter()); + assertEquals("quantum-search-index", quantumSync.getIndexName()); + + quantumSync.setIndexName("new-search-index-name"); + assertEquals("new-search-index-name", quantumSync.getIndexName()); + + quantumSync.shutdownExecutors(); + } + + private static String REST_STAT_LINE_FORMAT = ".*%s.*1XX:.*%d.*2XX:.*%d.*3XX:.*%d.*4XX:.*%d.*5XX:.*%d.*6XX:.*%d.*"; + private static String ENTITY_STATS_LINE_FORMAT = ".*%s.*TOTAL:.*%d.*FOUND:.*%d.*NO_PAYLOAD:.*%d.*NOT_FOUND:.*%d.*NUM_RETRIES:.*%d.*ERROR:.*%d.*"; + + private boolean reportContainsRestStatistics(String testString, HttpMethod httpMethod, long oneXX, long twoXX, + long threeXX, long fourXX, long fiveXX, long sixXX) { + + Pattern pattern = Pattern.compile(String.format(REST_STAT_LINE_FORMAT, httpMethod.toString(), oneXX, twoXX, + threeXX, fourXX, fiveXX, sixXX)); + + String lines[] = testString.split("\\r?\\n"); + + /* + * if we get a match on any of the lines in the report, then we + * succeeded + */ + + for (String line : lines) { + if (pattern.matcher(line).matches()) { + return true; + } + } + + return false; + } + + private boolean reportContainsEntityStatistics(String testString, String entityType, long total, long found, + long noPayload, long notFound, long numRetries, long numErrors) { + + Pattern pattern = Pattern.compile(String.format(ENTITY_STATS_LINE_FORMAT, entityType, total, found, + noPayload, notFound, numRetries, numErrors)); + + String lines[] = testString.split("\\r?\\n"); + + /* + * if we get a match on any of the lines in the report, then we + * succeeded + */ + + for (String line : lines) { + if (pattern.matcher(line).matches()) { + return true; + } + } + + return false; + } + + + @Test + public void validateStatisticTrackingAndReporting() throws Exception { + + quantumSync = new QuantumSynchronizer(LOG, "quanumSynchronizer", 5, 5, 5, "quantum-search-index", aaiStatConfig, + esStatConfig); + + quantumSync.setAaiAdapter(aaiAdapter); + quantumSync.setElasticSearchAdapter(esAdapter); + + searchableEntityLookup = new SearchableEntityLookup(); + + Map<String,SearchableOxmEntityDescriptor> searchableDescriptors = new HashMap<String,SearchableOxmEntityDescriptor>(); + + SearchableOxmEntityDescriptor complexDescriptor = new SearchableOxmEntityDescriptor(); + complexDescriptor.setEntityName("complex"); + List<String> pkeyNames = new ArrayList<String>(); + pkeyNames.add("physical-location-id"); + + complexDescriptor.setPrimaryKeyAttributeNames(pkeyNames); + complexDescriptor.setSearchableAttributes(pkeyNames); + + searchableDescriptors.put("complex", complexDescriptor); + + searchableEntityLookup.setSearchableEntityDescriptors(searchableDescriptors); + + quantumSync.setSearchableEntityLookup(searchableEntityLookup); + quantumSync.initCounters(); + + int randomMaxTimesToPegCounters = secureRandom.nextInt(1000); + + NetworkTransaction txn = null; + OperationResult opResult = null; + + + /* + * The result of this block is that for all HttpMethod types [ PUT, POST, GET, etc ] we'll peg a complex entity + * type counter a random number of times (set before the for loop, and for each status code category 1XX -> 6XX. + */ + + // GET, PUT, POST, DELETE, PATCH, HEAD + for (HttpMethod httpMethod : HttpMethod.values()) { + + // for randomMaxTimesToPegCounters + for (int numTimesToPegCounter = 0; numTimesToPegCounter < randomMaxTimesToPegCounters; numTimesToPegCounter++) { + txn = new NetworkTransaction(); + txn.setOperationType(httpMethod); + txn.setEntityType("complex"); + + /* + * set the txn optime to a random value between 0 and 10000 ms. + * Over thousands of counter statistics the random sample + * averages should be approximately uniform, but is highly + * dependent on the pseudo-RNG. + */ + txn.setOpTimeInMs(secureRandom.nextInt(10000)); + + // 1XX, 2XX, 3XX, 4XX, 5XX, 6XX + for ( int resultCode = 100; resultCode < 700; resultCode += 100) { + opResult = new OperationResult(); + opResult.setResultCode(resultCode); + + txn.setOperationResult(opResult); + + // peg both AAI and ES statistics + + quantumSync.updateActiveInventoryCounters(txn); + quantumSync.updateActiveInventoryCounters(httpMethod, "complex", opResult); + + quantumSync.updateElasticSearchCounters(txn); + quantumSync.updateElasticSearchCounters(httpMethod, "complex", opResult); + + } + } + } + + for (int numTimesToPegCounter = 0; numTimesToPegCounter < randomMaxTimesToPegCounters; numTimesToPegCounter++) { + quantumSync.incActiveInventoryWorkOnHandCounter(); + quantumSync.incElasticSearchWorkOnHandCounter(); + } + + assertEquals( randomMaxTimesToPegCounters, quantumSync.aaiWorkOnHand.get()); + assertEquals( randomMaxTimesToPegCounters, quantumSync.esWorkOnHand.get()); + + /* + * now we'll generate various reports and try to validate the output + */ + + String aaiStatsReport = quantumSync.getActiveInventoryStatisticsReport(); + + /* + * We double the expected validation check number because we peg each + * counter in each category twice (with different APIs for the same + * entity. + */ + + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.DELETE, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.PUT, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.POST, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.GET, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + long total = (randomMaxTimesToPegCounters*2)*6; + long found = randomMaxTimesToPegCounters*2; + long noPayload = (randomMaxTimesToPegCounters*2)*6; + long notFound = 0; + long numRetries = 0; + long numErrors = (randomMaxTimesToPegCounters*2)*5; + + assertTrue(reportContainsEntityStatistics(aaiStatsReport, "complex", total, found, + noPayload, notFound, numRetries, numErrors)); + + String esStatsReport = quantumSync.getElasticSearchStatisticsReport(); + + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.DELETE, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.PUT, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.POST, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.GET, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2, + randomMaxTimesToPegCounters * 2, randomMaxTimesToPegCounters * 2)); + + + assertNotNull(quantumSync.getStatReport(10000L, false)); + assertNotNull(quantumSync.getStatReport(20000L, true)); + + for (int numTimesToPegCounter = 0; numTimesToPegCounter < randomMaxTimesToPegCounters; numTimesToPegCounter++) { + quantumSync.decActiveInventoryWorkOnHandCounter(); + quantumSync.decElasticSearchWorkOnHandCounter(); + } + + assertEquals( 0, quantumSync.aaiWorkOnHand.get()); + assertEquals( 0, quantumSync.esWorkOnHand.get()); + + /* + * Validate stat reports display zero stats for complex + */ + + quantumSync.resetCounters(); + + aaiStatsReport = quantumSync.getActiveInventoryStatisticsReport(); + + /* + * We double the expected validation check number because we peg each + * counter in each category twice (with different APIs for the same + * entity. + */ + + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.DELETE, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.PUT, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.POST, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(aaiStatsReport, HttpMethod.GET, 0, 0, 0, 0, 0, 0)); + + assertTrue(reportContainsEntityStatistics(aaiStatsReport, "complex", 0, 0, 0, 0, 0, 0)); + + esStatsReport = quantumSync.getElasticSearchStatisticsReport(); + + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.DELETE, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.PUT, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.POST, 0, 0, 0, 0, 0, 0)); + assertTrue(reportContainsRestStatistics(esStatsReport, HttpMethod.GET, 0, 0, 0, 0, 0, 0)); + + quantumSync.shutdownExecutors(); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AggregationSynchronizerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AggregationSynchronizerTest.java new file mode 100644 index 0000000..65e6e70 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/AggregationSynchronizerTest.java @@ -0,0 +1,345 @@ +package org.onap.aai.sparky.sync; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Matchers; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.aggregation.sync.AggregationSynchronizer; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.util.TestResourceLoader; + +public class AggregationSynchronizerTest { + + //private static Logger LOG = LoggerFactory.getInstance().getLogger(AggregationSynchronizerTest.class); + private static SecureRandom secureRandom = new SecureRandom(); + private AggregationSynchronizer aggregationSynchronizer; + + private ElasticSearchSchemaConfig esSchemaConfig; + private NetworkStatisticsConfig aaiStatConfig; + private NetworkStatisticsConfig esStatConfig; + private OxmEntityLookup oxmEntityLookup; + private ElasticSearchAdapter esAdapter; + private ActiveInventoryAdapter aaiAdapter; + + + + @Before + public void init() throws Exception { + + esSchemaConfig = new ElasticSearchSchemaConfig(); + esSchemaConfig.setIndexDocType("default"); + esSchemaConfig.setIndexMappingsFileName(null); + esSchemaConfig.setIndexName("aggregation-index-name"); + esSchemaConfig.setIndexSettingsFileName(null); + + + aaiStatConfig = new NetworkStatisticsConfig(); + + aaiStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + aaiStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + aaiStatConfig.setBytesHistogramMaxYAxis(1000000L); + aaiStatConfig.setBytesHistogramNumBins(20); + aaiStatConfig.setBytesHistogramNumDecimalPoints(2); + + aaiStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + aaiStatConfig.setQueueLengthHistogramMaxYAxis(20000); + aaiStatConfig.setQueueLengthHistogramNumBins(20); + aaiStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + aaiStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + aaiStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + aaiStatConfig.setTaskAgeHistogramNumBins(20); + aaiStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + aaiStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + aaiStatConfig.setResponseTimeHistogramMaxYAxis(1000L); + aaiStatConfig.setResponseTimeHistogramNumBins(20); + aaiStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + aaiStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + aaiStatConfig.setTpsHistogramMaxYAxis(100); + aaiStatConfig.setTpsHistogramNumBins(20); + aaiStatConfig.setTpsHistogramNumDecimalPoints(2); + + esStatConfig = new NetworkStatisticsConfig(); + + esStatConfig.setNumSamplesPerThreadForRunningAverage(100); + + esStatConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + esStatConfig.setBytesHistogramMaxYAxis(1000000L); + esStatConfig.setBytesHistogramNumBins(20); + esStatConfig.setBytesHistogramNumDecimalPoints(2); + + esStatConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + esStatConfig.setQueueLengthHistogramMaxYAxis(20000); + esStatConfig.setQueueLengthHistogramNumBins(20); + esStatConfig.setQueueLengthHistogramNumDecimalPoints(2); + + esStatConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + esStatConfig.setTaskAgeHistogramMaxYAxis(600000L); + esStatConfig.setTaskAgeHistogramNumBins(20); + esStatConfig.setTaskAgeHistogramNumDecimalPoints(2); + + esStatConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + esStatConfig.setResponseTimeHistogramMaxYAxis(10000L); + esStatConfig.setResponseTimeHistogramNumBins(20); + esStatConfig.setResponseTimeHistogramNumDecimalPoints(2); + + esStatConfig.setTpsHistogramLabel("[Transactions Per Second]"); + esStatConfig.setTpsHistogramMaxYAxis(100); + esStatConfig.setTpsHistogramNumBins(20); + esStatConfig.setTpsHistogramNumDecimalPoints(2); + + oxmEntityLookup = new OxmEntityLookup(); + + esAdapter = Mockito.mock(ElasticSearchAdapter.class); + aaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + + Map<String,OxmEntityDescriptor> oxmEntityDescriptors = new HashMap<String,OxmEntityDescriptor>(); + + OxmEntityDescriptor complexDescriptor = new OxmEntityDescriptor(); + complexDescriptor.setEntityName("complex"); + List<String> pkeyNames = new ArrayList<String>(); + pkeyNames.add("physical-location-id"); + + complexDescriptor.setPrimaryKeyAttributeNames(pkeyNames); + + oxmEntityDescriptors.put("complex", complexDescriptor); + + oxmEntityLookup.setEntityDescriptors(oxmEntityDescriptors); + + + + } + + @Test + public void validateBasicConstruction() throws Exception { + + aggregationSynchronizer = new AggregationSynchronizer("complex", esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup); + + aggregationSynchronizer.setAaiAdapter(aaiAdapter); + aggregationSynchronizer.setElasticSearchAdapter(esAdapter); + + assertNotNull(aggregationSynchronizer.getAaiAdapter()); + assertNotNull(aggregationSynchronizer.getElasticSearchAdapter()); + + } + + @Test + public void validateSmallSync() throws Exception { + + aggregationSynchronizer = new AggregationSynchronizer("complex", esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup); + + aggregationSynchronizer.setAaiAdapter(aaiAdapter); + aggregationSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_complex_nodesQuery_response.json"); + + OperationResult complexSelfLinks = new OperationResult(); + + complexSelfLinks.setResultCode(200); + complexSelfLinks.setResult(nodesQueryResponse); + + Mockito.when( aaiAdapter.getSelfLinksByEntityType("complex")).thenReturn(complexSelfLinks); + + for (int x = 1; x <= 5; x++) { + + Mockito.when(aaiAdapter.repairSelfLink(Matchers.contains("complex" + x), Mockito.anyString())) + .thenReturn("https://server.proxy:8443/aai/v11/cloud-infrastructure/complexes/complex" + x); + + Mockito.when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("complex" + x), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/complex" + x + "_fullDepth_aaiEntityRetrieval.json"))); + + } + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())).thenReturn( + "http://localhost:9200/myindex/mytype/doc1", "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3", "http://localhost:9200/myindex/mytype/doc4", + "http://localhost:9200/myindex/mytype/doc5"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when( esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc4"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc5"), Mockito.any())).thenReturn(new OperationResult(404,null)); + + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + OperationState syncState = aggregationSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertEquals(SynchronizerState.IDLE, aggregationSynchronizer.getState()); + assertNotNull(aggregationSynchronizer.getStatReport(false)); + assertNotNull(aggregationSynchronizer.getStatReport(true)); + + aggregationSynchronizer.clearCache(); + aggregationSynchronizer.shutdown(); + + + } + + @Test + public void validateSmallSyncWithRetries() throws Exception { + + aggregationSynchronizer = new AggregationSynchronizer("complex", esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup); + + aggregationSynchronizer.setAaiAdapter(aaiAdapter); + aggregationSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_complex_nodesQuery_response.json"); + + OperationResult complexSelfLinks = new OperationResult(); + + complexSelfLinks.setResultCode(200); + complexSelfLinks.setResult(nodesQueryResponse); + + Mockito.when( aaiAdapter.getSelfLinksByEntityType("complex")).thenReturn(complexSelfLinks); + + for (int x = 1; x <= 5; x++) { + + Mockito.when(aaiAdapter.repairSelfLink(Matchers.contains("complex" + x), Mockito.anyString())) + .thenReturn("https://server.proxy:8443/aai/v11/cloud-infrastructure/complexes/complex" + x); + + Mockito.when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("complex" + x), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/complex" + x + "_fullDepth_aaiEntityRetrieval.json"))); + + } + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())).thenReturn( + "http://localhost:9200/myindex/mytype/doc1", "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3", "http://localhost:9200/myindex/mytype/doc4", + "http://localhost:9200/myindex/mytype/doc5"); + + /* + * Our initial gets from elastic search should be record-not-found + */ + Mockito.when( esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc4"), Mockito.any())).thenReturn(new OperationResult(404,null)); + Mockito.when( esAdapter.doGet(Matchers.contains("doc5"), Mockito.any())).thenReturn(new OperationResult(404,null)); + + + // 409 is the elastic search version conflict code, which will result in the entries being added + // to our retry queue and re-attempted a couple times. + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(409, null)); + + OperationState syncState = aggregationSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertEquals(SynchronizerState.IDLE, aggregationSynchronizer.getState()); + assertNotNull(aggregationSynchronizer.getStatReport(false)); + assertNotNull(aggregationSynchronizer.getStatReport(true)); + + aggregationSynchronizer.clearCache(); + aggregationSynchronizer.shutdown(); + + } + + @Test + public void validateSmallSyncWithDocumentElementMerges() throws Exception { + + aggregationSynchronizer = new AggregationSynchronizer("complex", esSchemaConfig, 5, 5, 5, aaiStatConfig, + esStatConfig, oxmEntityLookup); + + aggregationSynchronizer.setAaiAdapter(aaiAdapter); + aggregationSynchronizer.setElasticSearchAdapter(esAdapter); + + String nodesQueryResponse = TestResourceLoader + .getTestResourceDataJson("/sync/aai/activeInventory_complex_nodesQuery_response.json"); + + OperationResult complexSelfLinks = new OperationResult(); + + complexSelfLinks.setResultCode(200); + complexSelfLinks.setResult(nodesQueryResponse); + + Mockito.when( aaiAdapter.getSelfLinksByEntityType("complex")).thenReturn(complexSelfLinks); + + for (int x = 1; x <= 5; x++) { + + Mockito.when(aaiAdapter.repairSelfLink(Matchers.contains("complex" + x), Mockito.anyString())) + .thenReturn("https://server.proxy:8443/aai/v11/cloud-infrastructure/complexes/complex" + x); + + Mockito.when(aaiAdapter.queryActiveInventoryWithRetries(Matchers.contains("complex" + x), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/complex" + x + "_fullDepth_aaiEntityRetrieval.json"))); + + } + + Mockito.when(esAdapter.buildElasticSearchGetDocUrl(Mockito.anyString(), Mockito.anyString())).thenReturn( + "http://localhost:9200/myindex/mytype/doc1", "http://localhost:9200/myindex/mytype/doc2", + "http://localhost:9200/myindex/mytype/doc3", "http://localhost:9200/myindex/mytype/doc4", + "http://localhost:9200/myindex/mytype/doc5"); + + /* + * Our initial gets from elastic search return 200 ok with a found entity document requiring a doc update + */ + Mockito.when(esAdapter.doGet(Matchers.contains("doc1"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch1.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc2"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch2.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc3"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch3.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc4"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch4.json"))); + + Mockito.when(esAdapter.doGet(Matchers.contains("doc5"), Mockito.any())).thenReturn(new OperationResult(200, + TestResourceLoader.getTestResourceDataJson("/sync/ElasticSearch/docEntityFromElasticSearch5.json"))); + + Mockito.when(esAdapter.doPut(Matchers.contains("doc"), Mockito.any(), Mockito.any())) + .thenReturn(new OperationResult(200, null)); + + OperationState syncState = aggregationSynchronizer.doSync(); + assertEquals(OperationState.OK, syncState); + + assertEquals(SynchronizerState.IDLE, aggregationSynchronizer.getState()); + assertNotNull(aggregationSynchronizer.getStatReport(false)); + assertNotNull(aggregationSynchronizer.getStatReport(true)); + + aggregationSynchronizer.clearCache(); + aggregationSynchronizer.shutdown(); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleanerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleanerTest.java new file mode 100644 index 0000000..30394de --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/ElasticSearchIndexCleanerTest.java @@ -0,0 +1,122 @@ +package org.onap.aai.sparky.sync; + +import static org.junit.Assert.*; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.util.TestResourceLoader; + +public class ElasticSearchIndexCleanerTest { + + private ElasticSearchIndexCleaner esIndexCleaner; + + private ElasticSearchAdapter esAdapter; + private ElasticSearchEndpointConfig esRestEndpointConfig; + private ElasticSearchSchemaConfig esSchemaConfig; + + @Before + public void init() throws Exception { + + esAdapter = Mockito.mock( ElasticSearchAdapter.class); + esRestEndpointConfig = new ElasticSearchEndpointConfig(); + esSchemaConfig = new ElasticSearchSchemaConfig(); + + esRestEndpointConfig.setScrollContextBatchRequestSize(5000); + esRestEndpointConfig.setEsIpAddress("127.0.0.1"); + esRestEndpointConfig.setEsServerPort("9200"); + esRestEndpointConfig.setScrollContextTimeToLiveInMinutes(5); + + } + + @Test + public void validateBasicConstruction() throws Exception { + esIndexCleaner = new ElasticSearchIndexCleaner(esAdapter, esRestEndpointConfig, esSchemaConfig); + } + + @Test + public void validatePreOperationCollection() throws Exception { + + String beforeSyncScrollApiResponse = TestResourceLoader.getTestResourceDataJson( + "/sync/ElasticSearch/BeforeSync_ElasticSearch_ScrollApi_Successful.json"); + + OperationResult scrollApiOpResult = new OperationResult(); + scrollApiOpResult.setResultCode(200); + scrollApiOpResult.setResult(beforeSyncScrollApiResponse); + + Mockito.when(esAdapter.doPost(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject())) + .thenReturn(scrollApiOpResult); + + esIndexCleaner = new ElasticSearchIndexCleaner(esAdapter, esRestEndpointConfig, esSchemaConfig); + OperationState opState = esIndexCleaner.populatePreOperationCollection(); + + assertEquals(OperationState.OK, opState); + + } + + @Test + public void validatePostOperationCollection() throws Exception { + + String afterSyncScrollApiResponse = TestResourceLoader.getTestResourceDataJson( + "/sync/ElasticSearch/AfterSync_ElasticSearch_ScrollApi_Successful.json"); + + OperationResult scrollApiOpResult = new OperationResult(); + scrollApiOpResult.setResultCode(200); + scrollApiOpResult.setResult(afterSyncScrollApiResponse); + + Mockito.when(esAdapter.doPost(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject())) + .thenReturn(scrollApiOpResult); + + esIndexCleaner = new ElasticSearchIndexCleaner(esAdapter, esRestEndpointConfig, esSchemaConfig); + OperationState opState = esIndexCleaner.populatePostOperationCollection(); + + assertEquals(OperationState.OK, opState); + + } + + + @Test + public void validatePerformCleanup() throws Exception { + + String beforeSyncScrollApiResponse = TestResourceLoader.getTestResourceDataJson( + "/sync/ElasticSearch/BeforeSync_ElasticSearch_ScrollApi_Successful.json"); + + OperationResult beforeScrollApiOpResult = new OperationResult(); + beforeScrollApiOpResult.setResultCode(200); + beforeScrollApiOpResult.setResult(beforeSyncScrollApiResponse); + + String afterSyncScrollApiResponse = TestResourceLoader.getTestResourceDataJson( + "/sync/ElasticSearch/AfterSync_ElasticSearch_ScrollApi_Successful.json"); + + OperationResult afterScrollApiOpResult = new OperationResult(); + afterScrollApiOpResult.setResultCode(200); + afterScrollApiOpResult.setResult(afterSyncScrollApiResponse); + + Mockito.when(esAdapter.doPost(Mockito.anyString(), Mockito.anyString(), Mockito.anyObject())) + .thenReturn(beforeScrollApiOpResult,afterScrollApiOpResult); + + esIndexCleaner = new ElasticSearchIndexCleaner(esAdapter, esRestEndpointConfig, esSchemaConfig); + + OperationState beforeOpState = esIndexCleaner.populatePreOperationCollection(); + OperationState afterOpState = esIndexCleaner.populatePostOperationCollection(); + + assertEquals(OperationState.OK, beforeOpState); + assertEquals(OperationState.OK, afterOpState); + + /* + * Now we can start the test work + */ + + OperationState cleanupState = esIndexCleaner.performCleanup(); + assertEquals(OperationState.OK, cleanupState); + + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/SyncControllerImplTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/SyncControllerImplTest.java new file mode 100644 index 0000000..a02d5e4 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/SyncControllerImplTest.java @@ -0,0 +1,81 @@ +package org.onap.aai.sparky.sync; + +import static org.junit.Assert.*; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.IndexValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public class SyncControllerImplTest { + + private SyncControllerConfig syncControllerConfig; + + private IndexSynchronizer mockSynchronizer = Mockito.mock(IndexSynchronizer.class); + private IndexValidator mockValidator = Mockito.mock(IndexValidator.class); + private IndexCleaner mockCleaner = Mockito.mock(IndexCleaner.class); + + @Before + public void init() throws Exception { + + syncControllerConfig = new SyncControllerConfig(); + + syncControllerConfig.setSyncTaskDelayInMs(0); + syncControllerConfig.setSyncTaskFrequencyInDays(2); + syncControllerConfig.setTargetSyncStartTimeStamp("05:00:00 UTC+00:00"); + syncControllerConfig.setControllerName("Base-Sync-Controller-Impl"); + + syncControllerConfig.setPeriodicSyncEnabled(true); + syncControllerConfig.setRunOnceSyncEnabled(true); + + } + + + @Test + public void validateBasicConstruction() throws Exception { + + SyncControllerImpl syncController = new SyncControllerImpl(syncControllerConfig); + + assertTrue(syncController.isPeriodicSyncEnabled()); + assertTrue(syncController.isRunOnceSyncEnabled()); + assertEquals(0, syncController.getDelayInMs()); + + } + + @Test + public void validateSmallSync() throws Exception { + + SyncControllerImpl syncController = new SyncControllerImpl(syncControllerConfig); + + Mockito.when( mockSynchronizer.getIndexName() ).thenReturn("mock-sync-index"); + Mockito.when( mockCleaner.getIndexName()).thenReturn("mock-sync-index"); + Mockito.when( mockValidator.getIndexName()).thenReturn("mock-sync-index"); + + Mockito.when(mockSynchronizer.getStatReport(Boolean.TRUE)).thenReturn("mock-sync-index stat report"); + Mockito.when(mockValidator.exists()).thenReturn(false); + Mockito.when(mockSynchronizer.getState()).thenReturn(SynchronizerState.PERFORMING_SYNCHRONIZATION, + SynchronizerState.PERFORMING_SYNCHRONIZATION, SynchronizerState.PERFORMING_SYNCHRONIZATION, + SynchronizerState.PERFORMING_SYNCHRONIZATION, SynchronizerState.PERFORMING_SYNCHRONIZATION, + SynchronizerState.IDLE); + + syncController.registerEntitySynchronizer(mockSynchronizer); + syncController.registerIndexValidator(mockValidator); + syncController.registerIndexCleaner(mockCleaner); + + + + + syncController.performAction(SyncActions.SYNCHRONIZE); + + + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfigTest.java new file mode 100644 index 0000000..35dd0bd --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchEndpointConfigTest.java @@ -0,0 +1,39 @@ +package org.onap.aai.sparky.sync.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + + + + +import org.junit.Before; +import org.junit.Test; + + +public class ElasticSearchEndpointConfigTest { + + private ElasticSearchEndpointConfig elasticSearchEndpointConfig; + + @Before + public void init() throws Exception { + elasticSearchEndpointConfig = new ElasticSearchEndpointConfig(); + + } + + + @Test + public void updateValues() { + + elasticSearchEndpointConfig.setEsIpAddress("10.247.25.26"); + assertNotNull(elasticSearchEndpointConfig.getEsIpAddress()); + elasticSearchEndpointConfig.setEsServerPort("6585"); + assertNotNull(elasticSearchEndpointConfig.getEsServerPort()); + elasticSearchEndpointConfig.setScrollContextTimeToLiveInMinutes(3); + assertEquals(3,elasticSearchEndpointConfig.getScrollContextTimeToLiveInMinutes()); + elasticSearchEndpointConfig.setScrollContextBatchRequestSize(3); + assertEquals(3,elasticSearchEndpointConfig.getScrollContextBatchRequestSize()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfigTest.java new file mode 100644 index 0000000..88e96b0 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/ElasticSearchSchemaConfigTest.java @@ -0,0 +1,47 @@ +package org.onap.aai.sparky.sync.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import org.junit.Before; +import org.junit.Test; + +public class ElasticSearchSchemaConfigTest { + +private ElasticSearchSchemaConfig elasticSchemaConfig; + + @Before + public void init() throws Exception { + elasticSchemaConfig = new ElasticSearchSchemaConfig(); + + } + + + @Test + public void updateValues() { + + elasticSchemaConfig.setIndexName("entitysearchindex-localhost"); + assertNotNull(elasticSchemaConfig.getIndexName()); + elasticSchemaConfig.setIndexDocType("default"); + assertNotNull(elasticSchemaConfig.getIndexDocType()); + elasticSchemaConfig.setIndexSettingsFileName("/etc/es_settings.json"); + assertNotNull(elasticSchemaConfig.getIndexSettingsFileName()); + elasticSchemaConfig.setIndexMappingsFileName("/etc/es_mappings.json"); + assertNotNull(elasticSchemaConfig.getIndexMappingsFileName()); + assertNotNull(elasticSchemaConfig.toString()); + elasticSchemaConfig.setIndexName(null); + assertNull(elasticSchemaConfig.getIndexName()); + elasticSchemaConfig.setIndexDocType(null); + assertNull(elasticSchemaConfig.getIndexDocType()); + elasticSchemaConfig.setIndexSettingsFileName(null); + assertNull(elasticSchemaConfig.getIndexSettingsFileName()); + elasticSchemaConfig.setIndexMappingsFileName(null); + assertNull(elasticSchemaConfig.getIndexMappingsFileName()); + assertNotNull(elasticSchemaConfig.toString()); + + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfigTest.java new file mode 100644 index 0000000..4e97c6d --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/NetworkStatisticsConfigTest.java @@ -0,0 +1,74 @@ +package org.onap.aai.sparky.sync.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + + +import org.junit.Before; +import org.junit.Test; + +public class NetworkStatisticsConfigTest { + + private NetworkStatisticsConfig networkStatsConfig; + + @Before + public void init() throws Exception { + networkStatsConfig = new NetworkStatisticsConfig(); + + } + + + @Test + public void updateValues() { + + + + networkStatsConfig.setNumSamplesPerThreadForRunningAverage(300); + assertEquals(300,networkStatsConfig.getNumSamplesPerThreadForRunningAverage()); + networkStatsConfig.setBytesHistogramMaxYAxis(25000); + assertEquals(25000,networkStatsConfig.getBytesHistogramMaxYAxis()); + networkStatsConfig.setBytesHistogramLabel("[Response Size In Bytes]"); + assertNotNull(networkStatsConfig.getBytesHistogramLabel()); + networkStatsConfig.setQueueLengthHistogramLabel("[Queue Item Length]"); + assertNotNull(networkStatsConfig.getQueueLengthHistogramLabel()); + networkStatsConfig.setTaskAgeHistogramLabel("[Task Age In Ms]"); + assertNotNull(networkStatsConfig.getTaskAgeHistogramLabel()); + networkStatsConfig.setResponseTimeHistogramLabel("[Response Time In Ms]"); + assertNotNull(networkStatsConfig.getResponseTimeHistogramLabel()); + networkStatsConfig.setBytesHistogramNumBins(30); + assertEquals(30,networkStatsConfig.getBytesHistogramNumBins()); + networkStatsConfig.setBytesHistogramNumDecimalPoints(5); + assertEquals(5,networkStatsConfig.getBytesHistogramNumDecimalPoints()); + networkStatsConfig.setQueueLengthHistogramMaxYAxis(30000); + assertEquals(30000,networkStatsConfig.getQueueLengthHistogramMaxYAxis()); + networkStatsConfig.setQueueLengthHistogramNumBins(25); + assertEquals(25,networkStatsConfig.getQueueLengthHistogramNumBins()); + networkStatsConfig.setQueueLengthHistogramNumDecimalPoints(3); + assertEquals(3,networkStatsConfig.getQueueLengthHistogramNumDecimalPoints()); + networkStatsConfig.setTaskAgeHistogramMaxYAxis(2500000); + assertEquals(2500000,networkStatsConfig.getTaskAgeHistogramMaxYAxis()); + networkStatsConfig.setTaskAgeHistogramNumBins(3); + assertEquals(3,networkStatsConfig.getTaskAgeHistogramNumBins()); + networkStatsConfig.setTaskAgeHistogramNumDecimalPoints(25); + assertEquals(25,networkStatsConfig.getTaskAgeHistogramNumDecimalPoints()); + networkStatsConfig.setTpsHistogramLabel("[Transactions Per Second]"); + assertNotNull(networkStatsConfig.getTpsHistogramLabel()); + networkStatsConfig.setResponseTimeHistogramMaxYAxis(3); + assertEquals(3,networkStatsConfig.getResponseTimeHistogramMaxYAxis()); + networkStatsConfig.setResponseTimeHistogramNumBins(25); + assertEquals(25,networkStatsConfig.getResponseTimeHistogramNumBins()); + networkStatsConfig.setResponseTimeHistogramNumDecimalPoints(3); + assertEquals(3,networkStatsConfig.getResponseTimeHistogramNumDecimalPoints()); + networkStatsConfig.setTpsHistogramMaxYAxis(25); + assertEquals(25,networkStatsConfig.getTpsHistogramMaxYAxis()); + networkStatsConfig.setTpsHistogramNumBins(3); + assertEquals(3,networkStatsConfig.getTpsHistogramNumBins()); + networkStatsConfig.setTpsHistogramNumDecimalPoints(25); + assertEquals(25,networkStatsConfig.getTpsHistogramNumDecimalPoints()); + + + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/SyncControllerConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/SyncControllerConfigTest.java new file mode 100644 index 0000000..5aa4650 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/sync/config/SyncControllerConfigTest.java @@ -0,0 +1,64 @@ +package org.onap.aai.sparky.sync.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + + +import org.junit.Before; +import org.junit.Test; + + +public class SyncControllerConfigTest { + + private SyncControllerConfig syncControllerConfig; + + @Before + public void init() throws Exception { + syncControllerConfig = new SyncControllerConfig(); + + + } + + + @Test + public void updateValues() { + + + + + + syncControllerConfig.setNumInternalSyncWorkers(3); + assertEquals(3,syncControllerConfig.getNumInternalSyncWorkers()); + syncControllerConfig.setNumSyncElasticWorkers(5); + assertEquals(5,syncControllerConfig.getNumSyncElasticWorkers()); + syncControllerConfig.setNumSyncActiveInventoryWorkers(6); + assertEquals(6,syncControllerConfig.getNumSyncActiveInventoryWorkers()); + syncControllerConfig.setTargetSyncStartTimeStamp("05:00:00 UTC+00:00"); + assertNotNull(syncControllerConfig.getTargetSyncStartTimeStamp()); + syncControllerConfig.setControllerName("Historical-Entity-Sync-Controller"); + assertNotNull(syncControllerConfig.getControllerName()); + syncControllerConfig.setEnabled(true); + assertTrue(syncControllerConfig.isEnabled()); + syncControllerConfig.setSyncTaskDelayInMs(8); + assertEquals(8,syncControllerConfig.getSyncTaskDelayInMs()); + syncControllerConfig.setSyncTaskFrequencyInDays(3); + assertEquals(3,syncControllerConfig.getSyncTaskFrequencyInDays()); + syncControllerConfig.setNumSyncControllerWorkers(25); + assertEquals(25,syncControllerConfig.getNumSyncControllerWorkers()); + syncControllerConfig.setRunOnceSyncEnabled(true); + assertTrue(syncControllerConfig.isRunOnceSyncEnabled()); + syncControllerConfig.setPeriodicSyncEnabled(true); + assertTrue(syncControllerConfig.isPeriodicSyncEnabled()); + assertNotNull(syncControllerConfig.getSyncFrequencyInMs()); + assertNotNull(syncControllerConfig.getTargetSyncTime()); + assertNotNull(syncControllerConfig.getNextSyncTime()); + syncControllerConfig.setTimeZoneOfSyncStartTimeStamp("UTC+00:00"); + assertNotNull(syncControllerConfig.getTimeZoneOfSyncStartTimeStamp()); + + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/AsyncRateControlTester.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/AsyncRateControlTester.java new file mode 100644 index 0000000..00bec1e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/AsyncRateControlTester.java @@ -0,0 +1,242 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer; + +import java.util.concurrent.atomic.AtomicInteger; + +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The Class AsyncRateControlTester. + */ +public class AsyncRateControlTester { + + private static Logger logger = LoggerFactory.getLogger(AsyncRateControlTester.class); + + private long startTimeInMs; + + private AtomicInteger counter; + + protected boolean syncInProgress; + + /** + * Instantiates a new async rate control tester. + * + * @throws Exception the exception + */ + public AsyncRateControlTester() throws Exception { + + NetworkStatisticsConfig tpc = new NetworkStatisticsConfig(); + + tpc.setNumSamplesPerThreadForRunningAverage(100); + + tpc.setBytesHistogramLabel("bytesHistoLabel"); + tpc.setBytesHistogramMaxYAxis(1000000); + tpc.setBytesHistogramNumBins(20); + tpc.setBytesHistogramNumDecimalPoints(2); + + tpc.setQueueLengthHistogramLabel("queueHistoLabel"); + tpc.setQueueLengthHistogramMaxYAxis(1000000); + tpc.setQueueLengthHistogramNumBins(20); + tpc.setQueueLengthHistogramNumDecimalPoints(2); + + // ZeroDelayProcessor zdp = new ZeroDelayProcessor(LinkProcessorType.AAI, tpc); + // zdp.setStatCollector(this.aaiStatCollector); + /* + * zdp.setTaskProcessorConfig(tpc); + * + * this.resolver.registerProcessor(zdp); this.resolver.registerEventListener(this); this.counter + * = new AtomicInteger(0); this.syncInProgress = false; } + * + * @Override public void handleEvent(AsyncEvent event) { + * + * if(event.getEventType() == AsyncEventType.RESOLVER_IDLE) { + * + * if(syncInProgress) { long duration = System.currentTimeMillis() - startTimeInMs; + * System.out.println(getStatReport(duration)); syncInProgress = false; } + * + * // shutdown(); } else if(event.getEventType() == AsyncEventType.TRANSACTION_PROCESSED) { + * + * this.syncInProgress = true; + * + * ExternalResource resource = (ExternalResource)event.getPayload(); + * + * //aaiStatCollector.updateCounters(resource); + * + * counter.incrementAndGet(); + * + * } + * + * }; + * + * public void shutdown() { resolver.shutdown(); } + * + * private int getCounterValue(AtomicInteger counter) { + * + * if(counter == null) { return 0; } + * + * return counter.get(); } + * + * private void addActiveInventoryStatReport(StringBuilder sb) { + * + * if(sb == null) { return; } + * + * sb.append("\n\n ").append(LinkProcessorType.AAI.name()); + * + * sb.append("\n\n ").append("REST Operational Stats:"); + * + * /* Map<String, AtomicInteger> procOperationalCounters = + * aaiStatCollector.getActiveInventoryOperationalCounters(); + * + * if(procOperationalCounters != null) { + * + * int _1XX = + * getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_1XX)); int _2XX + * = getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_2XX)); int + * _3XX = getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_3XX)); + * int _4XX = + * getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_4XX)); int _5XX + * = getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_5XX)); int + * _6XX = getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.GET_6XX)); + * + * sb.append("\n ").append(String.format( + * "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.GET, + * _1XX, _2XX, _3XX, _4XX, _5XX, _6XX)); } + */ + + // sb.append("\n\n ").append("Entity Stats:"); + + /* + * sort entities, then sort nested op codes + */ + + /* + * TreeMap<String, HashMap<String, AtomicInteger>> activeInventoryEntitySortedTreeMap = new + * TreeMap<String, HashMap<String, AtomicInteger>>( new Comparator<String>() { + * + * public int compare(String o1, String o2) { return + * o1.toLowerCase().compareTo(o2.toLowerCase()); } }); + */ + + /* + * activeInventoryEntitySortedTreeMap.putAll(aaiStatCollector.getActiveInventoryEntityCounters() + * ); + * + * for(String counterEntityKey : activeInventoryEntitySortedTreeMap.keySet()) { + * + * HashMap<String, AtomicInteger> entityCounters = + * activeInventoryEntitySortedTreeMap.get(counterEntityKey); + * + * AtomicInteger total = entityCounters.get(ActiveInventoryStatCollector.TOTAL); AtomicInteger + * found = entityCounters.get(ActiveInventoryStatCollector.FOUND); AtomicInteger notFound = + * entityCounters.get(ActiveInventoryStatCollector.NOT_FOUND); AtomicInteger error = + * entityCounters.get(ActiveInventoryStatCollector.ERROR); + * + * int totalValue = (total == null) ? 0 : total.get(); int foundValue = (found == null) ? 0 : + * found.get(); int notFoundValue = (found == null) ? 0 : notFound.get(); int errorValue = + * (error == null) ? 0 : error.get(); + * + * sb.append("\n ").append(String.format( + * "%-30s TOTAL: %-12d FOUND: %-12d NOT_FOUND: %-12d ERROR: %-12d", counterEntityKey, + * totalValue, foundValue, notFoundValue, errorValue)); + * + * } + */ + + // sb.append("\n\n ").append("Task Processor Stats:"); + + // int totalRetries = + // getCounterValue(procOperationalCounters.get(ActiveInventoryStatCollector.NUM_RETRIES)); + // int currentQueueLength = resolver.getCurrentQueueLength(LinkProcessorType.AAI.name()); + + /* + * sb.append("\n " + * ).append(resolver.getProcessorTaskAgeStats(LinkProcessorType.AAI.name(), false, " " + * )); sb.append("\n " + * ).append(resolver.getProcessorResponseStats(LinkProcessorType.AAI.name(), false, " " + * )); sb.append("\n") + * .append(resolver.getQueueItemLengthHistogram(LinkProcessorType.AAI.name(), false, + * " ")); sb.append("\n") + * .append(resolver.getResponseByteSizeHistogram(LinkProcessorType.AAI.name(), false, + * " ")); sb.append("\n " + * ).append("TPS=").append(resolver.getTPS(LinkProcessorType.AAI.name())).append(", NumRetries=" + * ).append(totalRetries) .append(", CurrentQueueLength=").append(currentQueueLength); + */ + /* + * } + * + * private String getStatReport(long syncOpTimeInMs) { + * + * StringBuilder sb = new StringBuilder(128); + * + * sb.append("\n").append("Async Resolver Statistics: ( Sync Operation Duration = " + + * NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); + * + * addActiveInventoryStatReport(sb); + * + * return sb.toString(); + * + * } + * + * public void loadResolver(int numItems) { + * + * if(numItems <= 0) { return; } + * + * startTimeInMs = System.currentTimeMillis(); + * + * DummyPerformanceTask dpt = null; + * + * for(int i = 0; i < numItems; i++) { + * + * dpt = new DummyPerformanceTask(); dpt.setLinkProcessorType(LinkProcessorType.AAI); + * dpt.setResourceEntityType("DummyPerformanceEntity"); dpt.setOperationType(HttpMethod.GET); + * + * resolver.resolve(dpt); + * + * } + * + * } + * + * public static void main(String[] args) throws Exception { + * + * System.getProperties().setProperty("AJSC_HOME", "x:\\aaiui\\"); + * + * System.out.println("Available processors = " + Runtime.getRuntime().availableProcessors()); + * + * AsyncRateControlTester arcTester = new AsyncRateControlTester(); + * + * // give us time to instrument the jvm with jvisualvm // Thread.sleep(30000); + * Thread.sleep(5000); + * + * arcTester.loadResolver(1000); + * + * + * } + */ + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/GizmoEntitySummarizer.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/GizmoEntitySummarizer.java new file mode 100644 index 0000000..5ea5280 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/GizmoEntitySummarizer.java @@ -0,0 +1,251 @@ +package org.onap.aai.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TreeMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.restclient.enums.RestAuthenticationMode; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.dal.exception.ElasticSearchOperationException; +import org.onap.aai.sparky.dal.rest.RestClientConstructionException; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.OxmModelAndProcessorHelper; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; + +public class GizmoEntitySummarizer { + + protected ObjectMapper mapper; + protected OxmModelLoader oxmModelLoader; + private static final Logger logger = LoggerFactory.getInstance().getLogger(GizmoEntitySummarizer.class); + protected ExecutorService gizmoExecutor; + protected GizmoAdapter gizmoAdapter; + protected OxmModelAndProcessorHelper oxmHelper; + + /* + * We need to add another concept to the OxmModelLoader which is to generate + * a list of entity containers from the OXM JaxbContext + */ + + public GizmoEntitySummarizer() + throws ElasticSearchOperationException, IOException, RestClientConstructionException { + + OxmModelAndProcessorHelper.API_VERSION_OVERRIDE = 11; + + this.gizmoExecutor = NodeUtils.createNamedExecutor("GIZMO-WORKER", 5, logger); + + oxmHelper = OxmModelAndProcessorHelper.getInstance(); + this.oxmModelLoader = oxmHelper.getModelLoader(); + + this.mapper = new ObjectMapper(); + + RestEndpointConfig gizmoConfig = new RestEndpointConfig(); + + gizmoConfig.setEndpointIpAddress("10.147.138.153"); + gizmoConfig.setEndpointServerPort("9520"); + gizmoConfig.setNumRequestRetries(5); + gizmoConfig.setRestAuthenticationMode(RestAuthenticationMode.SSL_CERT); + gizmoConfig.setConnectTimeoutInMs(60000); + gizmoConfig.setReadTimeoutInMs(30000); + gizmoConfig.setCertFileName("client-cert-onap.p12"); + gizmoConfig.setCertPassword("OBF:1y0q1uvc1uum1uvg1pil1pjl1uuq1uvk1uuu1y10"); + gizmoConfig.setTruststoreFileName("synchronizer.jks"); + gizmoConfig.setValidateServerCertChain(false); + gizmoConfig.setValidateServerHostname(false); + + gizmoAdapter = new GizmoAdapter(oxmModelLoader, gizmoConfig); + + gizmoAdapter.setInventoryBasePath("/services/inventory/v12/"); + gizmoAdapter.setRelationshipsBasePath("/services/inventory/relationships/v12/"); + + } + + private Map<String, Integer> getNumEntitiesPerType() { + + Collection<String> containerTypes = oxmHelper.getOxmEntityContainerLookup().getEntityContainers(); + Collection<String> links = new ArrayList<String>(); + Map<String, Integer> entityTypeCounts = new TreeMap<String, Integer>(); + + final CountDownLatch latch = new CountDownLatch(containerTypes.size()); + + for (String entityType : containerTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + + OperationResult typeLinksResult = null; + try { + typeLinksResult = gizmoAdapter.queryGizmoWithRetries( + gizmoAdapter.getFullInventoryUrl(entityType), "application/json", 1); + + if (typeLinksResult != null) { + + if (typeLinksResult.wasSuccessful() && typeLinksResult.getResult() != null) { + + JsonNode rootNode = mapper.readValue(typeLinksResult.getResult(), JsonNode.class); + + if (rootNode.isArray()) { + ArrayNode arrayNode = (ArrayNode) rootNode; + entityTypeCounts.put(entityType, new Integer(arrayNode.size())); + } else { + entityTypeCounts.put(entityType, new Integer(-1)); + } + + } else { + // -1 + entityTypeCounts.put(entityType, new Integer(-1)); + } + + } + + } catch (Exception exc) { + entityTypeCounts.put(entityType, new Integer(-1)); + } + + return null; + } + + }, gizmoExecutor).whenComplete((result, error) -> { + + latch.countDown(); + + if (error != null) { + logger.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + + }); + + } + + // System.out.println("self links size = " + selflinks.size()); + + try { + latch.await(); + } catch (InterruptedException e) { + + } + + return entityTypeCounts; + } + + private Map<String, Integer> getNumRelationshipsPerType() { + + Map<String, Integer> entityTypeCounts = new TreeMap<String, Integer>(); + + final CountDownLatch latch = new CountDownLatch(1); + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + + OperationResult typeLinksResult = null; + try { + typeLinksResult = gizmoAdapter.queryGizmoWithRetries(gizmoAdapter.getFullRelationshipUrl("has"), + "application/json", 1); + + if (typeLinksResult != null) { + + if (typeLinksResult.wasSuccessful() && typeLinksResult.getResult() != null) { + + JsonNode rootNode = mapper.readValue(typeLinksResult.getResult(), JsonNode.class); + + if (rootNode.isArray()) { + ArrayNode arrayNode = (ArrayNode) rootNode; + entityTypeCounts.put("has", new Integer(arrayNode.size())); + } else { + entityTypeCounts.put("has", new Integer(-1)); + } + + } else { + // -1 + entityTypeCounts.put("has", new Integer(-1)); + } + + } else { + entityTypeCounts.put("has", new Integer(-1)); + } + + } catch (Exception exc) { + entityTypeCounts.put("has", new Integer(-1)); + } + + return null; + } + + }, gizmoExecutor).whenComplete((result, error) -> { + + latch.countDown(); + + if (error != null) { + logger.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + + }); + + // System.out.println("self links size = " + selflinks.size()); + + try { + latch.await(); + } catch (InterruptedException e) { + + } + + return entityTypeCounts; + } + + public void shutdown() { + this.gizmoExecutor.shutdown(); + } + + public static void main(String[] args) + throws ElasticSearchOperationException, IOException, RestClientConstructionException { + + System.setProperty("CONFIG_HOME", "X:\\2018_dev\\OSEAAI\\gizmo_integration\\onap_sparky-be\\appconfig-local\\"); + GizmoEntitySummarizer gizmoSummarizer = new GizmoEntitySummarizer(); + + Map<String, Integer> entityCounts = gizmoSummarizer.getNumEntitiesPerType(); + Map<String, Integer> relationshipCounts = gizmoSummarizer.getNumRelationshipsPerType(); + gizmoSummarizer.shutdown(); + + System.out.println("Gizmo Entities:"); + + for (Entry<String, Integer> entry : entityCounts.entrySet()) { + String key = entry.getKey(); + Integer value = entry.getValue(); + + System.out.printf("\t%s : %d\n", key, value); + } + + System.out.println("\nGizmo Relationships:"); + + for (Entry<String, Integer> entry : relationshipCounts.entrySet()) { + String key = entry.getKey(); + Integer value = entry.getValue(); + + System.out.printf("\t%s : %d\n", key, value); + } + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexDocumentTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexDocumentTest.java new file mode 100644 index 0000000..0451ec5 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexDocumentTest.java @@ -0,0 +1,104 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer; + +import java.io.IOException; +import java.security.NoSuchAlgorithmException; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.util.LogValidator; + +import ch.qos.logback.classic.Level; + + +/** + * The Class IndexDocumentTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class IndexDocumentTest { + + private LogValidator logValidator; + + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + logValidator = new LogValidator(); + logValidator.initializeLogger(Level.WARN); + } + + /** + * Validate basic construction. + * + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws IOException Signals that an I/O exception has occurred. + */ + @Test + public void validateBasicConstruction() throws NoSuchAlgorithmException, IOException { + + /* + * String testDate = "2016-12-21 00:00:00.00"; OxmEntityDescriptor d = new + * OxmEntityDescriptor(); d.setEntityName("service-instance"); + * d.setPrimaryKeyAttributeName(Arrays.asList("service-instance-id")); + * d.setSearchableAttributes(Arrays.asList("service-instance-id")); + * + * Mockito.when(oxmModelLoader.getEntityDescriptor(anyString())).thenReturn(d); + * + * SearchableEntity id1 = new SearchableEntity(oxmModelLoader); + * + * id1.setEntityType("service-instance"); id1.setEntityPrimaryKeyValue("DUP2"); + * id1.addSearchTagWithIdx("DUP2", String.valueOf(1)); + * + * id1.deriveFields(); id1.setEntityTimeStamp(testDate); ObjectMapper mapper = new + * ObjectMapper(); + * + * String objStr = id1.getIndexDocumentJson(); + * + * JsonNode indexDocNode = mapper.readTree(objStr); + * + * /// + * + * ObjectNode expectedNode = mapper.createObjectNode(); expectedNode.put("entityType", + * "service-instance"); expectedNode.put("entityPrimaryKeyValue", "DUP2"); + * expectedNode.put("searchTagIDs", "1"); expectedNode.put("searchTags", "DUP2"); + * expectedNode.put("crossEntityReferenceValues", ""); expectedNode.put("lastmodTimestamp", + * testDate); + * + * assertTrue(NodeUtils.isEqual(expectedNode, indexDocNode)); // Test if the timestamp is + * calculated when the node is being created + * assertTrue(NodeUtils.getNodeFieldAsText(indexDocNode, "lastmodTimestamp") != null); + */ + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexableCrossEntityReferenceTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexableCrossEntityReferenceTest.java new file mode 100644 index 0000000..a7c34d0 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/IndexableCrossEntityReferenceTest.java @@ -0,0 +1,72 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer; + +import java.io.IOException; +import java.security.NoSuchAlgorithmException; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.sync.entity.IndexableCrossEntityReference; + +public class IndexableCrossEntityReferenceTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + + } + + /** + * Validate basic construction. + * + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws IOException Signals that an I/O exception has occurred. + */ + @Test + public void validateBasicConstruction() throws NoSuchAlgorithmException, IOException { + + IndexableCrossEntityReference icer = new IndexableCrossEntityReference(); + + icer.setId("MyId"); + icer.setEntityType("Scott"); + icer.setEntityPrimaryKeyValue("woot"); + icer.setLink("me.link.yeeeeeaahhhh"); + + icer.addCrossEntityReferenceValue("meow"); + icer.addCrossEntityReferenceValue("kitty-goes"); + icer.addCrossEntityReferenceValue("kitty-goes-meow"); + + icer.deriveFields(); + + System.out.println(icer.getAsJson()); + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerBuilder.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerBuilder.java new file mode 100644 index 0000000..84d7fd2 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerBuilder.java @@ -0,0 +1,512 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer; + +/** + * The Class SyncControllerBuilder. + */ +public class SyncControllerBuilder { + + + /* + * We'll have to revisit this class, as the sync controllers are wired up pretty differently now + */ + + /** + * Test elastic search update api. + */ +/* public void testElasticSearchUpdateApi() { + try { + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + ElasticSearchDataProvider elasticSearchDataProvider = + new ElasticSearchAdapter(nonCachingRestProvider, esConfig); + + String payload = + "{ \"entityType\": \"complex\", \"pkey\": \"MORRISTOWN0075\", \"location\": { \"lat\": \"40.793414\", \"lon\": \"-74.480432\" }, \"selfLink\": \"https://aai-int1.test.att.com:8443/aai/v8/cloud-infrastructure/complexes/complex/MORRISTOWN0075?nodes-only\" }\n"; + + String updateRequest = elasticSearchDataProvider.buildBulkImportOperationRequest( + "topographysearchindex-localhost", "default", + "1e2a6ba9e09d5e1bcb016b3a0b8d50273b42828e47957bd2a2f3ce1854744f5f", "6", payload); + + OperationResult or = + elasticSearchDataProvider.doBulkOperation("http://localhost:9200/_bulk", updateRequest); + + System.out.println(or.toString()); + + /* + * String BULK_IMPORT_INDEX_TEMPLATE = + * "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n"; + * + * StringBuilder updateRequestPayload = new StringBuilder(128); + * updateRequestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, + * "topographysearchindex-localhost", "default", + * "1e2a6ba9e09d5e1bcb016b3a0b8d50273b42828e47957bd2a2f3ce1854744f5f", "5")); + * + * + * updateRequestPayload.append(payload); + * + * OperationResult or = nonCachingRestProvider.doRestfulOperation(HttpMethod.PUT, + * "http://localhost:9200/_bulk", updateRequestPayload.toString(), + * RestfulDataAccessor.APPLICATION_X_WWW_FORM_URL_ENCODED, + * RestfulDataAccessor.APPLICATION_JSON); + */ + + +/* + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + /** + * Do historical entity sync. + *//* + public void doHistoricalEntitySync() { + try { + SyncController syncController = new SyncControllerImpl("historicalEntityTestController"); + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider,esConfig); + + + IndexIntegrityValidator entityCounterHistoryValidator = + new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getEntityCountHistoryIndex(), + esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + esConfig.buildElasticSearchEntityCountHistoryTableConfig()); + + syncController.registerIndexValidator(entityCounterHistoryValidator); + + + ////// + + + + HistoricalEntitySummarizer historicalSummarizer = + new HistoricalEntitySummarizer(esConfig.getEntityCountHistoryIndex()); + historicalSummarizer.setAaiDataProvider(aaiAdapter); + historicalSummarizer.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(historicalSummarizer); + + //// + + /* + * IndexIntegrityValidator entitySearchIndexValidator = new IndexIntegrityValidator(new + * RestClientBuilder()); + * + * entitySearchIndexValidator.setIndexName("topographysearchindex-localhost"); + * entitySearchIndexValidator.setIndexType("default"); + * entitySearchIndexValidator.setIndexSettings(""); + * entitySearchIndexValidator.setIndexSettings(""); + * + * syncController.registerIndexValidator(entitySearchIndexValidator); + */ + + //// + + /* + * IndexCleaner index1Cleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + * "topographysearchindex-localhost", "default", "127.0.0.1", "9200", 5, 5000); + */ + + // syncController.registerIndexCleaner(index1Cleaner); + + /// +/* + for (int x = 0; x < 10; x++) { + + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + + System.out.println("sync controller state = " + syncController.getState()); + + Thread.sleep(1000); + } + } + + syncController.shutdown(); + + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + /** + * Do geo entity sync. + *//* + public void doGeoEntitySync() { + try { + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider,esConfig); + + IndexIntegrityValidator entitySearchIndexValidator = + new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getIndexName(), + esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + esConfig.buildElasticSearchTableConfig()); + + SyncController syncController = new SyncControllerImpl("geoEntitySyncTestController"); + syncController.registerIndexValidator(entitySearchIndexValidator); + + + ////// + + GeoSynchronizer geoSync = new GeoSynchronizer("topographysearchindex-localhost"); + geoSync.setAaiDataProvider(aaiAdapter); + geoSync.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(geoSync); + + //// + + /* + * IndexIntegrityValidator entitySearchIndexValidator = new IndexIntegrityValidator(new + * RestClientBuilder()); + * + * entitySearchIndexValidator.setIndexName("topographysearchindex-localhost"); + * entitySearchIndexValidator.setIndexType("default"); + * entitySearchIndexValidator.setIndexSettings(""); + * entitySearchIndexValidator.setIndexSettings(""); + * + * syncController.registerIndexValidator(entitySearchIndexValidator); + */ + + //// + + /* + * IndexCleaner index1Cleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + * "topographysearchindex-localhost", "default", "127.0.0.1", "9200", 5, 5000); + */ + + // syncController.registerIndexCleaner(index1Cleaner); + + /// +/* + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + syncController.shutdown(); + + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + /** + * Do searchable entitysync. + *//* + public void doSearchableEntitysync() { + try { + + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider,esConfig); + + ////// + + SyncController syncController = new SyncControllerImpl("searchtableEntityTestController"); + + ViewInspectEntitySynchronizer ses = + new ViewInspectEntitySynchronizer("entitysearchindex-localhost"); + ses.setAaiDataProvider(aaiAdapter); + ses.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(ses); + + //// + + /* + * IndexIntegrityValidator entitySearchIndexValidator = new IndexIntegrityValidator(new + * RestClientBuilder()); + * + * entitySearchIndexValidator.setIndexName("esi-sync2-localhost"); + * entitySearchIndexValidator.setIndexType("default"); + * + * syncController.registerIndexValidator(entitySearchIndexValidator); + */ + + //// + + /* + * IndexCleaner index1Cleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + * "entitysearchindex-localhost", "default", "127.0.0.1", "9200", 5, 5000); + * + * syncController.registerIndexCleaner(index1Cleaner); + */ + + /// +/* + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + syncController.shutdown(); + + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + /** + * Do cross entity reference sync. + *//* + public void doCrossEntityReferenceSync() { + try { + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider,esConfig); + + SyncController syncController = new SyncControllerImpl("crossEntityRefSyncController"); + + CrossEntityReferenceSynchronizer cers = + new CrossEntityReferenceSynchronizer("entitysearchindex-localhost", ActiveInventoryConfig.getConfig()); + cers.setAaiDataProvider(aaiAdapter); + cers.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(cers); + + ViewInspectEntitySynchronizer ses = + new ViewInspectEntitySynchronizer("entitysearchindex-localhost"); + ses.setAaiDataProvider(aaiAdapter); + ses.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(ses); + + ElasticSearchConfig config = ElasticSearchConfig.getConfig(); + + IndexIntegrityValidator entitySearchIndexValidator = new IndexIntegrityValidator( + nonCachingRestProvider, config.getIndexName(), config.getType(), config.getIpAddress(), + config.getHttpPort(), config.buildElasticSearchTableConfig()); + + syncController.registerIndexValidator(entitySearchIndexValidator); + + //// + + IndexCleaner index1Cleaner = + new ElasticSearchIndexCleaner(nonCachingRestProvider, config.getIndexName(), + config.getType(), config.getIpAddress(), config.getHttpPort(), 5, 5000); + + syncController.registerIndexCleaner(index1Cleaner); + + /// + + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + syncController.shutdown(); + + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: Failed to sync with message = " + exc.getMessage()); + } + } + + /** + * Do suggestion entitysync. + *//* + public void doSuggestionEntitySync() { + try { + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); + + SyncController syncController = new SyncControllerImpl("suggestionEntityTestController"); + + AutosuggestionSynchronizer ses = + new AutosuggestionSynchronizer("entityautosuggestindex-localhost"); + ses.setAaiDataProvider(aaiAdapter); + ses.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(ses); + + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + syncController.shutdown(); + + } catch (Exception exc) { + exc.printStackTrace(); + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + /* + * Do no op sync. + *//* + public void doNoOpSync() { + try { + SyncController syncController = new SyncControllerImpl("noopSyncTestController"); + + /* + * ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); + * + * aaiAdapter.setCacheEnabled(true); + * + * /*InMemoryEntityCache aaiInMemoryCache = new InMemoryEntityCache(); + * aaiAdapter.setEntityCache(aaiInMemoryCache); + */ + + /* + * PersistentEntityCache aaiDiskCache = new PersistentEntityCache(); + * aaiAdapter.setEntityCache(aaiDiskCache); + * + * ElasticSearchConfig config = ElasticSearchConfig.getConfig(); OXMModelLoader loader = + * OXMModelLoader.getInstance(); SyncAdapter syncAdapter = new SyncAdapter(new + * RestClientBuilder(), config, loader); + * + * ////// + * + * SearchableEntitySynchronizer ses = new SearchableEntitySynchronizer(); + * ses.setAaiDataProvider(aaiAdapter); ses.setEsDataProvider(syncAdapter); + * syncController.registerEntitySynchronizer(ses); + * + * //// + * + * IndexIntegrityValidator entitySearchIndexValidator = new IndexIntegrityValidator(new + * RestClientBuilder()); + * + * entitySearchIndexValidator.setIndexName("esi-sync2-localhost"); + * entitySearchIndexValidator.setIndexType("default"); + * entitySearchIndexValidator.setIndexSettings(""); + * entitySearchIndexValidator.setIndexSettings(""); + * + * syncController.registerIndexValidator(entitySearchIndexValidator); + * + * //// + * + * ElasticSearchEntityPurger p1 = new ElasticSearchEntityPurger(new RestClientBuilder()); + * p1.setIndexName("esi-blal-blah"); + * + * ElasticSearchEntityPurger p2 = new ElasticSearchEntityPurger(new RestClientBuilder()); + * p2.setIndexName("esi-topo-blah"); + */ + /// +/* + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + syncController.shutdown(); + + } catch (Exception exc) { + System.out.println("Error: failed to sync with message = " + exc.getMessage()); + } + } + + + /** + * The main method. + * + * @param args the arguments + *//* + public static void main(String[] args) { + //boolean runSearchableEntitySync = false; + //boolean runGeoEntitySync = true; + + //System.setProperty("AJSC_HOME", "e:\\dev"); + // System.getProperties().setProperty("AJSC_HOME", + // "c:\\rpo\\tier-support-ui\\target\\swm\\package\\nix\\" + // + "dist_files\\opt\\app\\ajsc-tier-support-ui"); + + System.setProperty("CONFIG_HOME", "appconfig-local"); + System.setProperty("AJSC_HOME", "x:\\1710_extensibility\\"); + + SyncControllerBuilder syncBuilder = new SyncControllerBuilder(); + + /* + * if (runSearchableEntitySync) syncBuilder.doSearchableEntitysync(); + */ +/* + syncBuilder.doSearchableEntitysync(); + // syncBuilder.doCrossEntityReferenceSync(); + // syncBuilder.doHistoricalEntitySync(); + // syncBuilder.doGeoEntitySync(); + //syncBuilder.doSuggestionEntitySync(); + //syncBuilder.doMasterEntitySync(); + + // syncBuilder.testElasticSearchUpdateAPI(); + + /* + * if (runGeoEntitySync) { syncBuilder.doGeoEntitySync(); } + */ + + + + //} +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerServiceTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerServiceTest.java new file mode 100644 index 0000000..d4d62ba --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/SyncControllerServiceTest.java @@ -0,0 +1,34 @@ +package org.onap.aai.sparky.synchronizer; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.SyncControllerService; + +public class SyncControllerServiceTest { + + private TestSyncController alpha; + private SyncControllerService syncService; + private SyncControllerRegistry syncControllerRegistry; + + @Before + public void init() { + alpha = new TestSyncController("alpha"); + syncControllerRegistry = new SyncControllerRegistry(); + syncService = new SyncControllerService(syncControllerRegistry, 5,5); + + } + + @Test + public void validateControllerRegistration() { + + syncControllerRegistry.registerSyncController( alpha ); + + syncService.startSync(); + + syncService.shutdown(); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/TestSyncController.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/TestSyncController.java new file mode 100644 index 0000000..c61f78b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/TestSyncController.java @@ -0,0 +1,177 @@ +package org.onap.aai.sparky.synchronizer; + +import java.util.Calendar; +import java.util.Date; +import java.util.concurrent.Semaphore; + +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.IndexValidator; +import org.onap.aai.sparky.sync.SyncController; +import org.onap.aai.sparky.sync.SyncControllerImpl.SyncActions; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; + +public class TestSyncController implements SyncController { + + private String controllerName; + private boolean periodic; + private SynchronizerState internalState; + private Semaphore gate; + + public TestSyncController(String name) { + this.controllerName = name; + this.internalState = SynchronizerState.IDLE; + this.gate = new Semaphore(1); + } + + + @Override + public String getControllerName() { + return this.controllerName; + } + + @Override + public OperationState performAction(SyncActions requestedAction) { + + if (gate.tryAcquire()) { + + internalState = SynchronizerState.PERFORMING_SYNCHRONIZATION; + + // System.out.println("performaAction = " + requestedAction); + + System.out.println("Sync started with thread = " + Thread.currentThread().getName() + + " at date = " + new Date(Calendar.getInstance().getTimeInMillis())); + + try { + Thread.sleep(10000L); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + System.out.println("Sync done with thread = " + Thread.currentThread().getName() + + " at date = " + new Date(Calendar.getInstance().getTimeInMillis())); + internalState = SynchronizerState.IDLE; + + System.out.println("Next Sync at = " + Thread.currentThread().getName() + + " at date = " + new Date(Calendar.getInstance().getTimeInMillis() + 30000L)); + + + gate.release(); + + return OperationState.OK; + } else { + return OperationState.IGNORED_SYNC_NOT_IDLE; + } + } + + @Override + public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { + // TODO Auto-generated method stub + + } + + @Override + public void registerIndexValidator(IndexValidator indexValidator) { + // TODO Auto-generated method stub + + } + + @Override + public void registerIndexCleaner(IndexCleaner indexCleaner) { + // TODO Auto-generated method stub + + } + + @Override + public void shutdown() { + // TODO Auto-generated method stub + // System.out.println("shutdown"); + } + + @Override + public SynchronizerState getState() { + // System.out.println("getState()"); + return SynchronizerState.IDLE; + } + + @Override + public long getDelayInMs() { + // TODO Auto-generated method stub + return 1000L; + } + + @Override + public void setDelayInMs(long delayInMs) { + // TODO Auto-generated method stub + + } + + @Override + public long getSyncFrequencyInMs() { + // TODO Auto-generated method stub + return 30000L; + } + + @Override + public void setSyncFrequencyInMs(long syncFrequencyInMs) { + // TODO Auto-generated method stub + + } + + @Override + public Date getSyncStartTime() { +// System.out.println("getSyncStateTime()"); + // TODO Auto-generated method stub + return null; + } + + @Override + public void setSyncStartTime(Date syncStartTime) { + // TODO Auto-generated method stub + + } + + @Override + public Date getLastExecutionDate() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void setLastExecutionDate(Date lastExecutionDate) { + // TODO Auto-generated method stub + + } + + + @Override + public Calendar getCreationTime() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getNextSyncTime() { + // TODO Auto-generated method stub + return null; + } + + + @Override + public boolean isPeriodicSyncEnabled() { + // TODO Auto-generated method stub + return false; + } + + + @Override + public boolean isRunOnceSyncEnabled() { + // TODO Auto-generated method stub + return false; + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntityTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntityTest.java new file mode 100644 index 0000000..ba3f38f --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/AggregationSuggestionEntityTest.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer.entity; + +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Scanner; + +import org.junit.Test; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.entity.AggregationSuggestionEntity; + +public class AggregationSuggestionEntityTest { + public String getResourceFileContents(String filePath) { + StringBuilder result = new StringBuilder(""); + + ClassLoader classLoader = getClass().getClassLoader(); + File file = new File(classLoader.getResource(filePath).getFile()); + + try (Scanner scanner = new Scanner(file)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + result.append(line).append("\n"); + } + scanner.close(); + } catch (IOException e) { + e.printStackTrace(); + } + + return result.toString(); + } + + @Test + public void testGetIndexDocumentJson() { + FiltersConfig filtersConfig = new FiltersConfig(); + AggregationSuggestionEntity aggregationSuggestionEntity = new AggregationSuggestionEntity(filtersConfig); + + List<String> filterIds = new ArrayList<>(Arrays.asList("1", "2", "7", "8")); + aggregationSuggestionEntity.setFilterIds(filterIds); + + String expectedFilterListPayload = getResourceFileContents( + "filters/AggregationSuggestionEntity_getIndexDocumentJson_expected.json"); + + assertTrue( + aggregationSuggestionEntity.getAsJson().contains(expectedFilterListPayload.trim())); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntityTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntityTest.java new file mode 100644 index 0000000..0f145f5 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/entity/SuggestionSearchEntityTest.java @@ -0,0 +1,187 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.synchronizer.entity; + +import java.io.IOException; +import java.util.ArrayList; + +import org.junit.BeforeClass; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class SuggestionSearchEntityTest { + private SuggestionSearchEntity suggestionSearchEntity; + ObjectMapper mapper = new ObjectMapper(); + private static FiltersConfig config = null; + + @BeforeClass + public static void init() throws IOException { + config = new FiltersConfig(); + config.setViewsFileName("src/test/resources/filters/aaiui_views.json"); + config.setFiltersFileName("src/test/resources/filters/aaiui_filters.json"); + config.setViewsConfig(config.readUiViewsConfig()); + config.setFiltersConfig(config.readUiFiltersConfig()); + + } + + public JsonNode getTestNodeForVnf_withProvAndOrchStatus() throws JsonProcessingException, IOException{ + String str = "{" + + "\"vnf-id\": \"1\"," + + "\"vnf-name\": \"2\"," + + "\"vnf-type\": \"3\"," + + "\"orchestration-status\": \"o1\"," + + "\"prov-status\": \"p1\"" + + "}"; + + return mapper.readTree(str); + } + + public JsonNode getTestNodeForVnf_withOrchStatus() throws JsonProcessingException, IOException{ + String str = "{" + + "\"vnf-id\": \"1\"," + + "\"vnf-name\": \"2\"," + + "\"vnf-type\": \"3\"," + + "\"orchestration-status\": \"o1\"" + + "}"; + + return mapper.readTree(str); + } + + public JsonNode getFilterListForOrchestrationStatusOnly(String orcStat) throws JsonProcessingException, IOException{ + String str = "{\"filterList\":[{\"filterId\":\"2\"},{\"filterId\":\"1\",\"filterValue\":" + + orcStat + "}]}"; + return mapper.readTree(str); + } + + public String getStrFilterListForOrchestrationStatusOnly(String orcStat) + throws JsonProcessingException, IOException{ + String str = "{\"filterList\":[{\"filterId\":\"2\"},{\"filterId\":\"1\",\"filterValue\":" + + orcStat + "}," + + "{\"filterId\":\"7\"}," + + "{\"filterId\":\"8\"}" + + "]}"; + return str; + } + + public String getStrFilterListForOrcStatAndProvStat(String orcStat, String provStat) + throws JsonProcessingException, IOException{ + String str = "{\"filterList\"" + + ":[{\"filterId\":\"2\"," + + "\"filterValue\":" + provStat + + "},{\"filterId\":\"1\",\"filterValue\":" + + orcStat + "}," + + "{\"filterId\":\"7\"}," + + "{\"filterId\":\"8\"}" + + "]}"; + return str; + } + + public ArrayList<String> getSingleElementOrcStatUniqueList(){ + ArrayList<String> list = new ArrayList<String>(); + list.add("orchestration-status"); + return list; + } + + public ArrayList<String> getTwoElementUniqueList(){ + ArrayList<String> list = new ArrayList<String>(); + list.add("prov-status"); + list.add("orchestration-status"); + return list; + } + + // Testing the filters payload (for ES) when only one suggestible attribute is present + // Use case: testing a single-element set from the power set of all attributes + /*@Test + public void test_params_for_suggestions_with_orcStat_o1(){ + suggestionSearchEntity = new SuggestionSearchEntity(SuggestionEntityLookup.getInstance(), config); + suggestionSearchEntity.setEntityType("generic-vnf"); + JsonNode node = null; + try{ + node = getTestNodeForVnf_withOrchStatus(); + suggestionSearchEntity.setFilterBasedPayloadFromResponse(node, + suggestionSearchEntity.getEntityType(), this.getSingleElementOrcStatUniqueList()); + JSONObject json = suggestionSearchEntity.getPayload(); + JSONObject exectedFilterPayload = new JSONObject( + this.getStrFilterListForOrchestrationStatusOnly("o1")); + + final JsonNode tree1 = mapper.readTree(json.toString()); + final JsonNode tree2 = mapper.readTree(exectedFilterPayload.toString()); + + assertTrue("Filter list not equal. Found: " + json + ". Expected: " + exectedFilterPayload, + tree1.equals(tree2)); + + Map<String, String> inputOutput = suggestionSearchEntity.getInputOutputData(); + Map<String, String> expectedInputOutput = new HashMap<String, String>(); + expectedInputOutput.put("orchestration-status", "o1"); + final JsonNode tree3 = mapper.readTree(mapper.writeValueAsString(inputOutput)); + final JsonNode tree4 = mapper.readTree(mapper.writeValueAsString(expectedInputOutput)); + + assertTrue("inputs for suggestions are not equal", tree3.equals(tree4)); + + } catch (Exception e){ + fail("Failed to get test node."); + } + }*/ + + //Testing the filters payload (for ES) when multiple suggestible attributes are present + // Use case: testing a 2-element set from the power set of all attributes + /*@Test + public void test_params_for_suggestions_with_orcStat_o1_provStat_p1(){ + suggestionSearchEntity = new SuggestionSearchEntity(); + suggestionSearchEntity.setEntityType("generic-vnf"); + JsonNode node = null; + try{ + node = getTestNodeForVnf_withProvAndOrchStatus(); + suggestionSearchEntity.setFilterBasedPayloadFromResponse(node, + suggestionSearchEntity.getEntityType(), this.getTwoElementUniqueList()); + JSONObject json = suggestionSearchEntity.getPayload(); + JSONObject exectedFilterPayload = new JSONObject( + this.getStrFilterListForOrcStatAndProvStat("o1", "p1")); + + final JsonNode tree1 = mapper.readTree(json.toString()); + final JsonNode tree2 = mapper.readTree(exectedFilterPayload.toString()); + + assertTrue("Filter list not equal. Found: " + json + ". Expected: " + exectedFilterPayload, + tree1.equals(tree2)); + + Map<String, String> inputOutput = suggestionSearchEntity.getInputOutputData(); + Map<String, String> expectedInputOutput = new HashMap<String, String>(); + expectedInputOutput.put("orchestration-status", "o1"); + expectedInputOutput.put("prov-status", "p1"); + final JsonNode tree3 = mapper.readTree(mapper.writeValueAsString(inputOutput)); + final JsonNode tree4 = mapper.readTree(mapper.writeValueAsString(expectedInputOutput)); + + assertTrue("inputs for suggestions are not equal", tree3.equals(tree4)); + + } catch (Exception e){ + fail("Failed to get node."); + } + }*/ +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrievalTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrievalTest.java new file mode 100644 index 0000000..60586bd --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/synchronizer/task/PerformActiveInventoryRetrievalTest.java @@ -0,0 +1,93 @@ +package org.onap.aai.sparky.synchronizer.task; + +import org.junit.BeforeClass; +import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval; +/* + * + * TODO-> RestClientBuilder is no longer being use neither is RestAuthenticationMode class + * need to be rewritten to use ONAP RestAuthenticationMode + */ +/* +public class PerformActiveInventoryRetrievalTest { + + private PerformActiveInventoryRetrieval sut; + + @BeforeClass + public static void initBeforeClass() throws Exception { + /* Mock aai.properties values which are used by repairSelfLink(), which is used both to build this + * test's expected URI and also used by PerformActiveInventoryRetrieval.get() which is the method under test + * + ActiveInventoryRestConfig aaiRestConfig = new ActiveInventoryRestConfig(null); + ActiveInventoryConfig.getConfig().setAaiRestConfig(aaiRestConfig); + ActiveInventoryConfig.getConfig().getAaiRestConfig().setHost("localhost"); + ActiveInventoryConfig.getConfig().getAaiRestConfig().setPort("8443"); + ActiveInventoryConfig.getConfig().getAaiRestConfig().setAuthenticationMode(RestAuthenticationMode.SSL_CERT); + } + + +/* @Test + public void testGet_relativeURI() throws Exception { + String uriToProcess = "/my/relative/uri"; // URI to perform test with + + Use uriToProcess + repairSelfLink() method to build the URI that we expect to see: + "https://localhost:8443/my/relative/uri" + String expectedURI = ActiveInventoryConfig.getConfig().repairSelfLink(uriToProcess); + + // Set up PerformActiveInventoryRetrieval dependencies + ActiveInventoryDataProvider aaiProvider = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + NetworkTransaction txn = new NetworkTransaction(); + txn.setLink(uriToProcess); + + sut = new PerformActiveInventoryRetrieval(txn, aaiProvider); + sut.setContextMap(new HashMap<>()); + + // Call method under test which should add the missing scheme/host/port to the relative path given in this test + sut.get(); + + assertEquals(expectedURI, txn.getOperationResult().getRequestLink()); + } + + @Test + public void testGet_relativeURIWithSchemaAndAuthority() throws Exception { + String uriToProcess = "https://localhost:8443/my/relative/uri"; // URI to perform test with + + Use uriToProcess + repairSelfLink() method to build the URI that we expect to see: + "https://localhost:8443/my/relative/uri" + String expectedURI = ActiveInventoryConfig.getConfig().repairSelfLink(uriToProcess); + + // Set up PerformActiveInventoryRetrieval dependencies + ActiveInventoryDataProvider aaiProvider = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + NetworkTransaction txn = new NetworkTransaction(); + txn.setLink(uriToProcess); + + sut = new PerformActiveInventoryRetrieval(txn, aaiProvider); + sut.setContextMap(new HashMap<>()); + + // Call method under test which shouldn't change the absolute path given in this test + sut.get(); + + assertEquals(expectedURI, txn.getOperationResult().getRequestLink()); + } + + @Test + public void testGet_emptyURI() throws Exception { + String uriToProcess = ""; // URI to perform test with + + Use uriToProcess + repairSelfLink() method to build the URI that we expect to see: + "https://localhost:8443" + String expectedURI = ActiveInventoryConfig.getConfig().repairSelfLink(uriToProcess); + + // Set up PerformActiveInventoryRetrieval dependencies + ActiveInventoryDataProvider aaiProvider = new ActiveInventoryAdapter(new OxmModelLoader(), new RestClientBuilder()); + NetworkTransaction txn = new NetworkTransaction(); + txn.setLink(uriToProcess); + + sut = new PerformActiveInventoryRetrieval(txn, aaiProvider); + sut.setContextMap(new HashMap<>()); + + // Call method under test which should add the missing scheme/host/port to the empty URI given in this test + sut.get(); + + assertEquals(expectedURI, txn.getOperationResult().getRequestLink()); + } +}*/ diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/CaptureLoggerAppender.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/CaptureLoggerAppender.java new file mode 100644 index 0000000..443328e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/CaptureLoggerAppender.java @@ -0,0 +1,247 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.util; + +import java.util.ArrayList; +import java.util.Deque; +import java.util.List; +import java.util.concurrent.ConcurrentLinkedDeque; + +import ch.qos.logback.classic.spi.LoggingEvent; +import ch.qos.logback.core.Appender; +import ch.qos.logback.core.Context; +import ch.qos.logback.core.LogbackException; +import ch.qos.logback.core.filter.Filter; +import ch.qos.logback.core.spi.FilterReply; +import ch.qos.logback.core.status.Status; + +/** + * A test class used to provide a concrete log stub of the Log4j API interface. The goal is to + * transparently capture logging paths so we can add log validation during the junit validation + * without post-analyzing on-disk logs. + * + * @author DAVEA + * + */ +@SuppressWarnings("rawtypes") +public class CaptureLoggerAppender implements Appender { + + private Deque<LoggingEvent> capturedLogs; + + /** + * Instantiates a new capture logger appender. + */ + public CaptureLoggerAppender() { + capturedLogs = new ConcurrentLinkedDeque<LoggingEvent>(); + } + + /** + * Drain all logs. + * + * @return the list + */ + public List<LoggingEvent> drainAllLogs() { + List<LoggingEvent> loggingEvents = new ArrayList<LoggingEvent>(); + + LoggingEvent event = null; + + while (capturedLogs.peek() != null) { + event = capturedLogs.pop(); + loggingEvents.add(event); + } + + return loggingEvents; + } + + /** + * Clears the capture logs double-ended queue and returns the size of the queue before it was + * cleared. + * + * @return int numCapturedLogs + */ + public int clearAllLogs() { + int numCapturedLogs = capturedLogs.size(); + capturedLogs.clear(); + return numCapturedLogs; + } + + + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.LifeCycle#start() + */ + @Override + public void start() {} + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.LifeCycle#stop() + */ + @Override + public void stop() {} + + @Override + public boolean isStarted() { + // TODO Auto-generated method stub + System.out.println("isStarted"); + return false; + } + + @Override + public void setContext(Context context) { + // TODO Auto-generated method stub + System.out.println("setContext"); + + } + + @Override + public Context getContext() { + // TODO Auto-generated method stub + System.out.println("getContext"); + return null; + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addStatus(ch.qos.logback.core.status.Status) + */ + @Override + public void addStatus(Status status) { + // TODO Auto-generated method stub + System.out.println("addStatus"); + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addInfo(java.lang.String) + */ + @Override + public void addInfo(String msg) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addInfo(java.lang.String, java.lang.Throwable) + */ + @Override + public void addInfo(String msg, Throwable ex) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addWarn(java.lang.String) + */ + @Override + public void addWarn(String msg) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addWarn(java.lang.String, java.lang.Throwable) + */ + @Override + public void addWarn(String msg, Throwable ex) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addError(java.lang.String) + */ + @Override + public void addError(String msg) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.ContextAware#addError(java.lang.String, java.lang.Throwable) + */ + @Override + public void addError(String msg, Throwable ex) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.FilterAttachable#addFilter(ch.qos.logback.core.filter.Filter) + */ + @Override + public void addFilter(Filter newFilter) { + // TODO Auto-generated method stub + + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.FilterAttachable#clearAllFilters() + */ + @Override + public void clearAllFilters() { + // TODO Auto-generated method stub + + } + + @Override + public List getCopyOfAttachedFiltersList() { + // TODO Auto-generated method stub + return null; + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.spi.FilterAttachable#getFilterChainDecision(java.lang.Object) + */ + @Override + public FilterReply getFilterChainDecision(Object event) { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getName() { + // TODO Auto-generated method stub + System.out.println("getName"); + return "MOCK"; + } + + /* (non-Javadoc) + * @see ch.qos.logback.core.Appender#doAppend(java.lang.Object) + */ + @Override + public void doAppend(Object event) throws LogbackException { + // TODO Auto-generated method stub + // System.out.println("doAppend(), event = " + event); + // System.out.println("event class = " + event.getClass().getSimpleName()); + capturedLogs.add((LoggingEvent) event); + } + + @Override + public void setName(String name) { + // TODO Auto-generated method stub + System.out.println("setName() name = " + name); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/ExceptionHelper.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/ExceptionHelper.java new file mode 100644 index 0000000..f05aed8 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/ExceptionHelper.java @@ -0,0 +1,62 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.util; + +/** + * The Class ExceptionHelper. + */ +public class ExceptionHelper { + + /** + * Extract stack trace elements. + * + * @param maxNumberOfElementsToCapture the max number of elements to capture + * @param exc the exc + * @return the string + */ + public static String extractStackTraceElements(int maxNumberOfElementsToCapture, Exception exc) { + StringBuilder sb = new StringBuilder(128); + + StackTraceElement[] stackTraceElements = exc.getStackTrace(); + + if (stackTraceElements != null) { + + /* + * We want to avoid an index out-of-bounds error, so we will make sure to only extract the + * number of frames from the stack trace that actually exist. + */ + + int numFramesToExtract = Math.min(maxNumberOfElementsToCapture, stackTraceElements.length); + + for (int x = 0; x < numFramesToExtract; x++) { + sb.append(stackTraceElements[x]).append("\n"); + } + + } + + return sb.toString(); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/HttpServletHelper.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/HttpServletHelper.java new file mode 100644 index 0000000..081c93e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/HttpServletHelper.java @@ -0,0 +1,162 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.util; + +import static org.junit.Assert.fail; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringReader; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; + +import javax.servlet.ReadListener; +import javax.servlet.ServletInputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.mockito.Mockito; + +/** + * The Class HttpServletHelper. + */ +public class HttpServletHelper { + + public static HttpServletRequest getMockHttpServletRequest() { + return Mockito.mock(HttpServletRequest.class); + } + + /** + * Sets the request payload. + * + * @param request the request + * @param mimeType the mime type + * @param payloadContent the payload content + */ + public static void setRequestPayload(HttpServletRequest request, String mimeType, + String payloadContent) { + + try { + Mockito.when(request.getContentType()).thenReturn(mimeType); + + + final ByteArrayInputStream bais = + new ByteArrayInputStream(payloadContent.getBytes(StandardCharsets.UTF_8)); + + ServletInputStream servletInputStream = new ServletInputStream() { + + @Override + public int read() throws IOException { + return bais.read(); + } + + @Override + public boolean isFinished() { + return true; + } + + @Override + public boolean isReady() { + return true; + } + + @Override + public void setReadListener(ReadListener readListener) { + // TODO Auto-generated method stub + + } + }; + + Mockito.when(request.getInputStream()).thenReturn(servletInputStream); + + Mockito.when(request.getReader()).thenReturn(new BufferedReader(new StringReader(payloadContent))); + + } catch (IOException ioe) { + fail(ExceptionHelper.extractStackTraceElements(5, ioe)); + } + + } + + /** + * Gets the mock http servlet response. + * + * @param printWriter the print writer + * @return the mock http servlet response + */ + public static HttpServletResponse getMockHttpServletResponse(PrintWriter printWriter) { + HttpServletResponse commonResponse = Mockito.mock(HttpServletResponse.class); + + /* + * Use the StringWriter wrapped in a PrintWriter to redirect output stream to an in-memory + * buffer instead of an on-disk file. + */ + + try { + Mockito.when(commonResponse.getWriter()).thenReturn(printWriter); + } catch (IOException ioe) { + fail(ExceptionHelper.extractStackTraceElements(5, ioe)); + } + + return commonResponse; + } + + /** + * Assign request uri. + * + * @param req the req + * @param requestUri the request uri + */ + public static void assignRequestUri(HttpServletRequest req, String requestUri) { + Mockito.when(req.getRequestURI()).thenReturn(requestUri); + } + + /** + * Assign request parameter name map. + * + * @param req the req + * @param paramNameValueMap the param name value map + */ + public static void assignRequestParameterNameMap(HttpServletRequest req, + Map<String, String> paramNameValueMap) { + if (paramNameValueMap != null) { + Mockito.when(req.getParameterNames()) + .thenReturn(Collections.enumeration(paramNameValueMap.keySet())); + + for (String key : paramNameValueMap.keySet()) { + Mockito.when(req.getParameter(key)).thenReturn(paramNameValueMap.get(key)); + } + + } + } + + public static void assignRequestHeader(HttpServletRequest req, String headerName, String headerValue) { + Mockito.when(req.getHeader(headerName)).thenReturn(headerValue); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/LogValidator.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/LogValidator.java new file mode 100644 index 0000000..a138846 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/LogValidator.java @@ -0,0 +1,85 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.util; + +import java.util.List; + +import org.slf4j.LoggerFactory; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.LoggingEvent; + +/** + * The Class LogValidator. + */ +public class LogValidator { + + protected CaptureLoggerAppender logger = null; + + /** + * Initialize logger. + * + * @param level the level + */ + @SuppressWarnings("unchecked") + public void initializeLogger(Level level) { + ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory + .getLogger(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME); + root.detachAndStopAllAppenders(); + logger = new CaptureLoggerAppender(); + root.setLevel(level); + root.addAppender(logger); + } + + public CaptureLoggerAppender getLogger() { + return logger; + } + + /** + * Dump and count logs. + * + * @param logToConsole the log to console + * @return the int + */ + public int dumpAndCountLogs(boolean logToConsole) { + + List<LoggingEvent> logs = logger.drainAllLogs(); + + if (logs == null) { + return 0; + } + + if (logToConsole) { + for (LoggingEvent e : logs) { + System.out.println(e); + } + } + + return logs.size(); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/OxmModelAndProcessorHelper.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/OxmModelAndProcessorHelper.java new file mode 100644 index 0000000..cebb494 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/OxmModelAndProcessorHelper.java @@ -0,0 +1,143 @@ +package org.onap.aai.sparky.util; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +import org.onap.aai.sparky.config.SparkyResourceLoader; +import org.onap.aai.sparky.config.oxm.CrossEntityReferenceLookup; +import org.onap.aai.sparky.config.oxm.GeoEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityContainerLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.config.oxm.OxmModelProcessor; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup; +import org.onap.aai.sparky.search.filters.config.FiltersConfig; +import org.springframework.core.io.DefaultResourceLoader; + +public class OxmModelAndProcessorHelper { + + public static int API_VERSION_OVERRIDE = -1; + + private OxmModelLoader modelLoader; + private Set<OxmModelProcessor> processors; + + private CrossEntityReferenceLookup crossEntityReferenceLookup; + private GeoEntityLookup geoEntityLookup; + private OxmEntityLookup oxmEntityLookup; + private SearchableEntityLookup searchableEntityLookup; + private SuggestionEntityLookup suggestionEntityLookup; + private OxmEntityContainerLookup oxmEntityContainerLookup; + private FiltersConfig filtersConfig; + + private static OxmModelAndProcessorHelper instance = null; + + private OxmModelAndProcessorHelper() throws IOException { + + SparkyResourceLoader resourceLoader = new SparkyResourceLoader(); + resourceLoader.setResourceLoader(new DefaultResourceLoader()); + + this.filtersConfig = new FiltersConfig(); + this.filtersConfig.initializeFiltersDetailsConfig(resourceLoader.getResourceAsFile(SparkyTestConstants.FILTERS_JSON_FILE, false)); + this.filtersConfig.initializeFiltersForViewsConfig(resourceLoader.getResourceAsFile(SparkyTestConstants.VIEWS_JSON_FILE, false)); + + this.crossEntityReferenceLookup = new CrossEntityReferenceLookup(); + this.geoEntityLookup = new GeoEntityLookup(); + this.oxmEntityLookup = new OxmEntityLookup(); + this.searchableEntityLookup = new SearchableEntityLookup(); + this.suggestionEntityLookup = new SuggestionEntityLookup(filtersConfig); + this.oxmEntityContainerLookup = new OxmEntityContainerLookup(); + + this.processors = new HashSet<OxmModelProcessor>(); + processors.add(crossEntityReferenceLookup); + processors.add(geoEntityLookup); + processors.add(oxmEntityLookup); + processors.add(searchableEntityLookup); + processors.add(suggestionEntityLookup); + processors.add(oxmEntityContainerLookup); + + this.modelLoader = new OxmModelLoader(API_VERSION_OVERRIDE, processors); + modelLoader.loadLatestOxmModel(); + } + + public static OxmModelAndProcessorHelper getInstance() throws IOException { + if (instance == null) { + instance = new OxmModelAndProcessorHelper(); + } + return instance; + } + + public OxmModelLoader getModelLoader() { + return modelLoader; + } + + public void setModelLoader(OxmModelLoader modelLoader) { + this.modelLoader = modelLoader; + } + + public Set<OxmModelProcessor> getProcessors() { + return processors; + } + + public void setProcessors(Set<OxmModelProcessor> processors) { + this.processors = processors; + } + + public CrossEntityReferenceLookup getCrossEntityReferenceLookup() { + return crossEntityReferenceLookup; + } + + public void setCrossEntityReferenceLookup(CrossEntityReferenceLookup crossEntityReferenceLookup) { + this.crossEntityReferenceLookup = crossEntityReferenceLookup; + } + + public GeoEntityLookup getGeoEntityLookup() { + return geoEntityLookup; + } + + public void setGeoEntityLookup(GeoEntityLookup geoEntityLookup) { + this.geoEntityLookup = geoEntityLookup; + } + + public OxmEntityLookup getOxmEntityLookup() { + return oxmEntityLookup; + } + + public void setOxmEntityLookup(OxmEntityLookup oxmEntityLookup) { + this.oxmEntityLookup = oxmEntityLookup; + } + + public SearchableEntityLookup getSearchableEntityLookup() { + return searchableEntityLookup; + } + + public void setSearchableEntityLookup(SearchableEntityLookup searchableEntityLookup) { + this.searchableEntityLookup = searchableEntityLookup; + } + + public SuggestionEntityLookup getSuggestionEntityLookup() { + return suggestionEntityLookup; + } + + public void setSuggestionEntityLookup(SuggestionEntityLookup suggestionEntityLookup) { + this.suggestionEntityLookup = suggestionEntityLookup; + } + + public FiltersConfig getFiltersConfig() { + return filtersConfig; + } + + public void setFiltersConfig(FiltersConfig filtersConfig) { + this.filtersConfig = filtersConfig; + } + + public OxmEntityContainerLookup getOxmEntityContainerLookup() { + return oxmEntityContainerLookup; + } + + public void setOxmEntityContainerLookup(OxmEntityContainerLookup oxmEntityContainerLookup) { + this.oxmEntityContainerLookup = oxmEntityContainerLookup; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SparkyTestConstants.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SparkyTestConstants.java new file mode 100644 index 0000000..ffb27fa --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SparkyTestConstants.java @@ -0,0 +1,19 @@ +package org.onap.aai.sparky.util; + +import java.nio.file.Path; +import java.nio.file.Paths; + +public class SparkyTestConstants { + + /** Default to unix file separator if system property file.separator is null */ + public static final String FILESEP = + (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator"); + + private static Path currentRelativePath = Paths.get(""); + public static final String PATH_TO_TEST_RESOURCES = currentRelativePath.toAbsolutePath().toString() + + FILESEP + "src" + FILESEP + "test" + FILESEP + "resources"; + + public static final String PATH_TO_FILTERS_CONFIG = PATH_TO_TEST_RESOURCES + FILESEP + "filters"; + public static final String FILTERS_JSON_FILE = PATH_TO_FILTERS_CONFIG + FILESEP + "aaiui_filters.json"; + public static final String VIEWS_JSON_FILE = PATH_TO_FILTERS_CONFIG + FILESEP + "aaiui_views.json"; +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/StringCollectionContainsMatcher.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/StringCollectionContainsMatcher.java new file mode 100644 index 0000000..e8b3258 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/StringCollectionContainsMatcher.java @@ -0,0 +1,39 @@ +package org.onap.aai.sparky.util; + +import java.util.List; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; + +public class StringCollectionContainsMatcher extends BaseMatcher<List<String>> { + + private String valueToCheck; + + @SuppressWarnings({"unused", "unchecked"}) + public StringCollectionContainsMatcher(String valToCheck) { + this.valueToCheck = valToCheck; + } + + @Override + public boolean matches(Object arg0) { + + @SuppressWarnings("unchecked") + List<String> argumentList = (List<String>) arg0; + + for ( String listItem : argumentList ) { + + if ( listItem.contains(valueToCheck)) { + return true; + } + } + + return false; + } + + @Override + public void describeTo(Description arg0) { + // TODO Auto-generated method stub + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SuggestionsPermutationTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SuggestionsPermutationTest.java new file mode 100644 index 0000000..6aa23b7 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/SuggestionsPermutationTest.java @@ -0,0 +1,130 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.util; + +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; + +import org.junit.Test; +import org.onap.aai.sparky.util.SuggestionsPermutation; + +public class SuggestionsPermutationTest { + + @Test + public void testGetUniqueListForSuggestions() { + List<String> inputList = new ArrayList<String>(); + inputList.add("str1"); + inputList.add("str2"); + inputList.add("str3"); + + List<List<String>> expectedListOfLists = new ArrayList<List<String>>(); + expectedListOfLists.add((new ArrayList<String>(){{add("str1");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str2");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str3");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str1");add("str2");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str1");add("str3");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str2");add("str3");}})); + expectedListOfLists.add((new ArrayList<String>(){{add("str1");add("str2");add("str3");}})); + + int expectedCount = expectedListOfLists.size(); + int actualCount = 0; + ArrayList<ArrayList<String>> actualListOfLists = SuggestionsPermutation.getNonEmptyUniqueLists(inputList); + + for (List<String> list: expectedListOfLists){ + for (ArrayList<String> actualList: actualListOfLists) { + if (new HashSet(list).equals (new HashSet(actualList)) ){ + actualCount++; + } + } + } + + assertTrue("Missing entries in the unique list of lists for input: " + inputList.toString() + + ". Found: "+ actualListOfLists.toString() + + " expected: " + expectedListOfLists.toString(), actualCount == expectedCount); + } + + @Test + public void testGetListPermutations() { + List<String> inputList = new ArrayList<String>(); + inputList.add("str1"); + inputList.add("str2"); + inputList.add("str3"); + + List<List<String>> expectedPermutations = new ArrayList<List<String>>(); + expectedPermutations.add((new ArrayList<String>(){{add("str1");add("str2");add("str3");}})); + expectedPermutations.add((new ArrayList<String>(){{add("str2");add("str1");add("str3");}})); + expectedPermutations.add((new ArrayList<String>(){{add("str2");add("str3");add("str1");}})); + expectedPermutations.add((new ArrayList<String>(){{add("str1");add("str3");add("str2");}})); + expectedPermutations.add((new ArrayList<String>(){{add("str3");add("str1");add("str2");}})); + expectedPermutations.add((new ArrayList<String>(){{add("str3");add("str2");add("str1");}})); + + int expectedCount = expectedPermutations.size(); + int actualCount = 0; + List<List<String>> actualPermutations = SuggestionsPermutation.getListPermutations(inputList); + + for (List<String> list: expectedPermutations){ + for (List<String> actualList: actualPermutations) { + if (list.toString().equals(actualList.toString()) ){ + actualCount++; + } + } + } + + assertTrue("Missing entries in the permutation of list: " + + inputList.toString() + ". Found: "+ actualPermutations.toString() + + " expected: " + expectedPermutations.toString(), actualCount == expectedCount); + } + + @Test + public void isValidSuggestionInputPermutation_verbose_successPath() { + + List<String> x = new ArrayList<>(Arrays.asList("A", "B", "C", "D")); + + ArrayList<ArrayList<String>> uniqueLists = SuggestionsPermutation.getNonEmptyUniqueLists(x); + + assertTrue(uniqueLists.get(0).toString().equals("[A, B, C, D]")); + assertTrue(uniqueLists.get(1).toString().equals("[B, C, D]")); + assertTrue(uniqueLists.get(2).toString().equals("[A, C, D]")); + assertTrue(uniqueLists.get(3).toString().equals("[C, D]")); + assertTrue(uniqueLists.get(4).toString().equals("[A, B, D]")); + assertTrue(uniqueLists.get(5).toString().equals("[B, D]")); + assertTrue(uniqueLists.get(6).toString().equals("[A, D]")); + assertTrue(uniqueLists.get(7).toString().equals("[D]")); + assertTrue(uniqueLists.get(8).toString().equals("[A, B, C]")); + assertTrue(uniqueLists.get(9).toString().equals("[B, C]")); + assertTrue(uniqueLists.get(10).toString().equals("[A, C]")); + assertTrue(uniqueLists.get(11).toString().equals("[C]")); + assertTrue(uniqueLists.get(12).toString().equals("[A, B]")); + assertTrue(uniqueLists.get(13).toString().equals("[B]")); + assertTrue(uniqueLists.get(14).toString().equals("[A]")); + assertTrue(uniqueLists.size() == 15); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TestResourceLoader.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TestResourceLoader.java new file mode 100644 index 0000000..7b1b198 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TestResourceLoader.java @@ -0,0 +1,31 @@ +package org.onap.aai.sparky.util; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URL; +import java.nio.file.Files; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; + +public class TestResourceLoader { + + public static String getTestResourceDataJson(String resourcePath) + throws JsonParseException, JsonMappingException, IOException { + + // will look for resource using "src/test/resources" as the base folder + URL url = TestResourceLoader.class.getResource(resourcePath); + File file = new File(url.getFile()); + + byte[] payload = Files.readAllBytes(file.toPath()); + + if (payload.length == 0) { + throw new FileNotFoundException("Could not load '" + resourcePath + "' test data"); + } + + return new String(payload); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TreeWalkerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TreeWalkerTest.java new file mode 100644 index 0000000..f6947cf --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/util/TreeWalkerTest.java @@ -0,0 +1,563 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.TreeWalker; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class TreeWalkerTest. + */ +public class TreeWalkerTest { + + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception { + } + + /** + * Validate json node conversion null input. + */ + @Test + public void validateJsonNodeConversionNullInput() { + + TreeWalker walker = new TreeWalker(); + + try { + JsonNode convertedNode = walker.convertJsonToNode(null); + assertNull("Converted node should have be null", convertedNode); + + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expeted + } + + } + + /** + * Validate json node conversion empty non json input. + */ + @Test + public void validateJsonNodeConversionEmptyNonJsonInput() { + + TreeWalker walker = new TreeWalker(); + + try { + JsonNode convertedNode = walker.convertJsonToNode(""); + assertNull("Converted node should have be null", convertedNode); + + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expeted + } + + } + + /** + * Validate json node conversion empty json input. + */ + @Test + public void validateJsonNodeConversionEmptyJsonInput() { + + TreeWalker walker = new TreeWalker(); + + try { + JsonNode convertedNode = walker.convertJsonToNode("{}"); + assertNotNull("Converted node should not be null", convertedNode); + + ObjectMapper objectMapper = new ObjectMapper(); + String convertedNodeAsStr = objectMapper.writeValueAsString(convertedNode); + + assertEquals("{}", convertedNodeAsStr); + + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expeted + } + + } + + /** + * Validate walk tree null input. + */ + @Test + public void validateWalkTreeNullInput() { + + TreeWalker walker = new TreeWalker(); + + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, null); + assertEquals(0, paths.size()); + + } + + /** + * Validate walk tree empty node. + */ + @Test + public void validateWalkTreeEmptyNode() { + + try { + TreeWalker walker = new TreeWalker(); + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, walker.convertJsonToNode("{}")); + assertEquals(0, paths.size()); + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + /** + * Validate walk tree one parent node. + */ + @Test + public void validateWalkTreeOneParentNode() { + + try { + TreeWalker walker = new TreeWalker(); + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, walker.convertJsonToNode("{ \"root\" : { } }")); + assertEquals(1, paths.size()); + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + /** + * Validate walk tree one parent node with object array. + */ + @Test + public void validateWalkTreeOneParentNodeWithObjectArray() { + + try { + String jsonStr = + "{\"Employee\":[{\"id\":\"101\",\"name\":\"Pushkar\",\"salary\":\"5000\"}," + + "{\"id\":\"102\",\"name\":\"Rahul\",\"salary\":\"4000\"}," + + "{\"id\":\"103\",\"name\":\"tanveer\",\"salary\":\"56678\"}]}"; + TreeWalker walker = new TreeWalker(); + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, walker.convertJsonToNode(jsonStr)); + assertEquals(9, paths.size()); + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + /** + * Validate walk tree one parent node with value array. + */ + @Test + public void validateWalkTreeOneParentNodeWithValueArray() { + + try { + String jsonStr = "{ \"colors\" : [ \"yellow\", \"blue\", \"red\" ] }"; + TreeWalker walker = new TreeWalker(); + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, walker.convertJsonToNode(jsonStr)); + + assertEquals(3, paths.size()); + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + /** + * Test walk for complex entity type aai entity node descriptors. + */ + @Test + public void testWalkForComplexEntityType_AaiEntityNodeDescriptors() { + + try { + String jsonStr = + "{ \"generalNodeClass\": { \"class\": \"aai-entity-node general-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\", " + + " \"svgAttributes\": { \"r\": \"10\" " + + "} }, { \"type\": \"text\", " + + "\"class\": \"id-type-label\", \"displayKey\": \"itemType\", " + + " \"shapeAttributes\": { \"offset\": { " + + " \"x\": \"0\", \"y\": \"30\" } " + + " } }, { \"type\": \"text\", " + + " \"class\": \"id-value-label\", \"displayKey\":" + + " \"itemNameValue\", \"shapeAttributes\": { " + + " \"offset\": { \"x\": \"0\", " + + " \"y\": \"40\" } } } ] " + + " }, \"searchedNodeClass\": { \"class\": \"aai-entity-node search-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": { " + + " \"r\": \"20\" } }, { " + + " \"type\": \"circle\", \"class\": \"inner\", " + + " \"svgAttributes\": { \"r\": \"10\" }" + + " }, { \"type\": \"text\", " + + "\"class\": \"id-type-label\", \"displayKey\": \"itemType\", " + + " \"shapeAttributes\": { \"offset\": { " + + " \"x\": \"0\", \"y\": \"30\" }" + + " } }, { \"type\": \"text\", " + + " \"class\": \"id-value-label\", " + + "\"displayKey\": \"itemNameValue\", \"shapeAttributes\": {" + + " \"offset\": { \"x\": \"0\"," + + " \"y\": \"40\" } }" + + " } ] }, \"selectedSearchedNodeClass\": { " + + "\"class\": \"aai-entity-node selected-search-node\", \"visualElements\": [" + + " { \"type\": \"circle\", " + + "\"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\" " + + " } }, { \"type\": \"text\", " + + " \"class\": \"id-type-label\", \"displayKey\": \"itemType\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"x\": \"0\", \"y\": \"30\"" + + " } } }, { " + + " \"type\": \"text\", \"class\": \"id-value-label\", " + + " \"displayKey\": \"itemNameValue\", \"shapeAttributes\": {" + + " \"offset\": { \"x\": \"0\", " + + " \"y\": \"40\" } } } ]" + + " }, \"selectedNodeClass\": { \"class\":" + + " \"aai-entity-node selected-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\" " + + " } }, { \"type\": \"text\", " + + " \"class\": \"id-type-label\", \"displayKey\": \"itemType\"," + + " \"shapeAttributes\": { \"offset\": " + + "{ " + + " \"x\": \"0\", \"y\": \"30\" } " + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\", \"displayKey\":" + + " \"itemNameValue\", \"shapeAttributes\": { " + + "\"offset\": { \"x\": \"0\", " + + "\"y\": \"40\" } } } ] }}"; + TreeWalker walker = new TreeWalker(); + List<String> paths = new ArrayList<String>(); + walker.walkTree(paths, walker.convertJsonToNode(jsonStr)); + + assertEquals(68, paths.size()); + + /* + * Example of expected value + * + * generalNodeClass.class=aai-entity-node general-node + * generalNodeClass.visualElements.type=circle generalNodeClass.visualElements.class=outer + * generalNodeClass.visualElements.svgAttributes.r=20 + * generalNodeClass.visualElements.type=circle generalNodeClass.visualElements.class=inner + * generalNodeClass.visualElements.svgAttributes.r=10 + * generalNodeClass.visualElements.type=text + * generalNodeClass.visualElements.class=id-type-label + * generalNodeClass.visualElements.displayKey=itemType + * generalNodeClass.visualElements.shapeAttributes.offset.x=0 + * generalNodeClass.visualElements.shapeAttributes.offset.y=30 + * generalNodeClass.visualElements.type=text + * generalNodeClass.visualElements.class=id-value-label + * generalNodeClass.visualElements.displayKey=itemNameValue + * generalNodeClass.visualElements.shapeAttributes.offset.x=0 + * generalNodeClass.visualElements.shapeAttributes.offset.y=40 + * searchedNodeClass.class=aai-entity-node search-node + * searchedNodeClass.visualElements.type=circle searchedNodeClass.visualElements.class=outer + * searchedNodeClass.visualElements.svgAttributes.r=20 + * searchedNodeClass.visualElements.type=circle searchedNodeClass.visualElements.class=inner + * searchedNodeClass.visualElements.svgAttributes.r=10 + * searchedNodeClass.visualElements.type=text + * searchedNodeClass.visualElements.class=id-type-label + * searchedNodeClass.visualElements.displayKey=itemType + * searchedNodeClass.visualElements.shapeAttributes.offset.x=0 + * searchedNodeClass.visualElements.shapeAttributes.offset.y=30 + * searchedNodeClass.visualElements.type=text + * searchedNodeClass.visualElements.class=id-value-label + * searchedNodeClass.visualElements.displayKey=itemNameValue + * searchedNodeClass.visualElements.shapeAttributes.offset.x=0 + * searchedNodeClass.visualElements.shapeAttributes.offset.y=40 + * selectedSearchedNodeClass.class=aai-entity-node selected-search-node + * selectedSearchedNodeClass.visualElements.type=circle + * selectedSearchedNodeClass.visualElements.class=outer + * selectedSearchedNodeClass.visualElements.svgAttributes.r=20 + * selectedSearchedNodeClass.visualElements.type=circle + * selectedSearchedNodeClass.visualElements.class=inner + * selectedSearchedNodeClass.visualElements.svgAttributes.r=10 + * selectedSearchedNodeClass.visualElements.type=text + * selectedSearchedNodeClass.visualElements.class=id-type-label + * selectedSearchedNodeClass.visualElements.displayKey=itemType + * selectedSearchedNodeClass.visualElements.shapeAttributes.offset.x=0 + * selectedSearchedNodeClass.visualElements.shapeAttributes.offset.y=30 + * selectedSearchedNodeClass.visualElements.type=text + * selectedSearchedNodeClass.visualElements.class=id-value-label + * selectedSearchedNodeClass.visualElements.displayKey=itemNameValue + * selectedSearchedNodeClass.visualElements.shapeAttributes.offset.x=0 + * selectedSearchedNodeClass.visualElements.shapeAttributes.offset.y=40 + * selectedNodeClass.class=aai-entity-node selected-node + * selectedNodeClass.visualElements.type=circle selectedNodeClass.visualElements.class=outer + * selectedNodeClass.visualElements.svgAttributes.r=20 + * selectedNodeClass.visualElements.type=circle selectedNodeClass.visualElements.class=inner + * selectedNodeClass.visualElements.svgAttributes.r=10 + * selectedNodeClass.visualElements.type=text + * selectedNodeClass.visualElements.class=id-type-label + * selectedNodeClass.visualElements.displayKey=itemType + * selectedNodeClass.visualElements.shapeAttributes.offset.x=0 + * selectedNodeClass.visualElements.shapeAttributes.offset.y=30 + * selectedNodeClass.visualElements.type=text + * selectedNodeClass.visualElements.class=id-value-label + * selectedNodeClass.visualElements.displayKey=itemNameValue + * selectedNodeClass.visualElements.shapeAttributes.offset.x=0 + * selectedNodeClass.visualElements.shapeAttributes.offset.y=40 + */ + + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + /** + * Test complex object inversion equality. + */ + @Test + public void testComplexObjectInversionEquality() { + + /** + * Dave Adams (1-Nov-2016): + * + * Ok.. I agree...weird title of the test-case. This test is focused on the isEqual equality + * test within the NodeUtils helper class which compares the sorted structural paths of two Json + * Object representations. I attempted to normalize unordered structures to produce an equality + * result, as there doesn't seem to be any natural equality test between two JsonNode objects + * that I could find to date. + * + * Basically, this test is confirming that if the same object values are present in different + * orders, they are effectively the same Json Object representation, and pass, at least my + * structural value equality test. + * + * I reordered the aaiEntityNodeDescriptors top level class types, and change the order of the + * x,y coordinates to be y,x. Same values different order. Once again, the expectation is that + * both representations are objectively equal, they just have different json representations. + */ + + try { + String n1Str = + "{ \"generalNodeClass\": { \"class\": \"aai-entity-node general-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\"" + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-type-label\", \"displayKey\":" + + " \"itemType\", \"shapeAttributes\": { \"offset\":" + + " { \"x\": \"0\", \"y\": \"30\"" + + " } } }, {" + + " \"type\": \"text\", \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\":" + + " { \"x\": \"0\", \"y\": \"40\"" + + " } } } ] }," + + " \"searchedNodeClass\": { \"class\": \"aai-entity-node search-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\"" + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-type-label\", \"displayKey\":" + + " \"itemType\", \"shapeAttributes\": { \"offset\": {" + + " \"x\": \"0\", \"y\": \"30\"" + + " } } }, {" + + " \"type\": \"text\", \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"x\": \"0\", \"y\": \"40\"" + + " } } } ] }," + + " \"selectedSearchedNodeClass\": { \"class\":" + + " \"aai-entity-node selected-search-node\", \"visualElements\": [" + + " { \"type\": \"circle\", \"class\":" + + " \"outer\", \"svgAttributes\": { \"r\": \"20\"" + + " } }, { \"type\": \"circle\"," + + " \"class\": \"inner\", \"svgAttributes\": {" + + " \"r\": \"10\" } }, {" + + " \"type\": \"text\", \"class\": \"id-type-label\"," + + " \"displayKey\": \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"x\": \"0\"," + + " \"y\": \"30\" } }" + + " }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"x\": \"0\", \"y\": \"40\"" + + " } } } ] }," + + " \"selectedNodeClass\": { \"class\": \"aai-entity-node selected-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\"" + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-type-label\", \"displayKey\":" + + " \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"x\": \"0\"," + + " \"y\": \"30\" }" + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\", \"displayKey\":" + + " \"itemNameValue\", \"shapeAttributes\": {" + + " \"offset\": { \"x\": \"0\"," + + " \"y\": \"40\" } }" + + " } ] }}"; + String n2Str = + "{ \"searchedNodeClass\": { \"class\": \"aai-entity-node search-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }," + + " { \"type\": \"circle\"," + + " \"class\": \"inner\", \"svgAttributes\": {" + + " \"r\": \"10\" } }, {" + + " \"type\": \"text\", \"class\": \"id-type-label\"," + + " \"displayKey\": \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"y\": \"30\"," + + " \"x\": \"0\" } }" + + " }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"y\": \"40\", \"x\": \"0\"" + + " } } } ] }," + + " \"selectedSearchedNodeClass\": { \"class\":" + + " \"aai-entity-node selected-search-node\", \"visualElements\": [" + + " { \"type\": \"circle\", \"class\":" + + " \"outer\", \"svgAttributes\": { \"r\": \"20\"" + + " } }, { \"type\": \"circle\"," + + " \"class\": \"inner\", \"svgAttributes\": {" + + " \"r\": \"10\" } }, {" + + " \"type\": \"text\", \"class\": \"id-type-label\"," + + " \"displayKey\": \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"y\": \"30\"," + + " \"x\": \"0\" } }" + + " }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"y\": \"40\", \"x\": \"0\"" + + " } } } ] }," + + " \"selectedNodeClass\": { \"class\": \"aai-entity-node selected-node\"," + + " \"visualElements\": [ { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }, {" + + " \"type\": \"circle\", \"class\": \"inner\"," + + " \"svgAttributes\": { \"r\": \"10\"" + + " } }, { \"type\": \"text\"," + + " \"class\": \"id-type-label\"," + + " \"displayKey\": \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"y\": \"30\"," + + " \"x\": \"0\" } }" + + " }, { \"type\": \"text\"," + + " \"class\": \"id-value-label\"," + + " \"displayKey\": \"itemNameValue\"," + + " \"shapeAttributes\": { \"offset\": {" + + " \"y\": \"40\", \"x\": \"0\"" + + " } } } ] }," + + " \"generalNodeClass\": { \"class\":" + + " \"aai-entity-node general-node\", \"visualElements\": [" + + " { \"type\": \"circle\"," + + " \"class\": \"outer\", \"svgAttributes\": {" + + " \"r\": \"20\" } }," + + " { \"type\": \"circle\"," + + " \"class\": \"inner\", \"svgAttributes\": {" + + " \"r\": \"10\" } }," + + " { \"type\": \"text\"," + + " \"class\": \"id-type-label\", \"displayKey\":" + + " \"itemType\", \"shapeAttributes\": {" + + " \"offset\": { \"y\": \"30\"," + + " \"x\": \"0\" }" + + " } }, {" + + " \"type\": \"text\"," + + " \"class\": \"id-value-label\", \"displayKey\":" + + " \"itemNameValue\", \"shapeAttributes\": {" + + " \"offset\": { \"y\": \"40\"," + + " \"x\": \"0\" }" + + " } } ] }}"; + + TreeWalker walker = new TreeWalker(); + List<String> n1Paths = new ArrayList<String>(); + List<String> n2Paths = new ArrayList<String>(); + + JsonNode n1 = walker.convertJsonToNode(n1Str); + JsonNode n2 = walker.convertJsonToNode(n2Str); + walker.walkTree(n1Paths, n1); + walker.walkTree(n2Paths, n2); + + assertEquals(68, n1Paths.size()); + assertEquals(68, n2Paths.size()); + + assertTrue(NodeUtils.isEqual(n1, n2)); + + } catch (JsonProcessingException exc) { + // expected + } catch (IOException exc) { + // expected + } + + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationContextTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationContextTest.java new file mode 100644 index 0000000..5c027da --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationContextTest.java @@ -0,0 +1,5 @@ +package org.onap.aai.sparky.viewandinspect; + +public class BaseVisualizationContextTest { + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationServiceTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationServiceTest.java new file mode 100644 index 0000000..e51c629 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/BaseVisualizationServiceTest.java @@ -0,0 +1,75 @@ +package org.onap.aai.sparky.viewandinspect; + +import static org.junit.Assert.assertEquals; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.util.OxmModelAndProcessorHelper; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; +import org.onap.aai.sparky.viewandinspect.services.BaseVisualizationService; +import org.onap.aai.sparky.viewandinspect.util.SchemaVisualizationTestDataBuilder; + +public class BaseVisualizationServiceTest { + private ActiveInventoryAdapter mockAaiAdapter; + private ElasticSearchAdapter mockEsAdapter; + private VisualizationConfigs visualizationConfigs; + private SubscriptionConfig subConfig; + private ElasticSearchEndpointConfig endpointEConfig; + private ElasticSearchSchemaConfig schemaEConfig; + private OxmEntityLookup oxmEntityLookup; + private GizmoAdapter mockGizmoAdapter; + + private BaseVisualizationService baseVisService; + + @Before + public void init() throws Exception { + this.mockAaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + this.mockAaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + this.mockEsAdapter = Mockito.mock(ElasticSearchAdapter.class); + this.mockGizmoAdapter = Mockito.mock(GizmoAdapter.class); + this.visualizationConfigs = new VisualizationConfigs(); + this.subConfig = new SubscriptionConfig(); + this.endpointEConfig = new ElasticSearchEndpointConfig(); + this.schemaEConfig = new ElasticSearchSchemaConfig(); + this.oxmEntityLookup = OxmModelAndProcessorHelper.getInstance().getOxmEntityLookup(); + + OxmModelLoader modelLoader = OxmModelAndProcessorHelper.getInstance().getModelLoader(); + + this.baseVisService = new BaseVisualizationService(modelLoader, visualizationConfigs, + mockAaiAdapter, mockGizmoAdapter, mockEsAdapter, endpointEConfig, schemaEConfig, 1, + oxmEntityLookup, subConfig); + } + + @Test + public void testAnalyzeQueryRequestBody() { + QueryRequest validResquest = baseVisService.analyzeQueryRequestBody(SchemaVisualizationTestDataBuilder.getQueryRequest()); + assertEquals(SchemaVisualizationTestDataBuilder.ROOT_NODE_HASH_ID, validResquest.getHashId()); + + QueryRequest nullRequest = baseVisService.analyzeQueryRequestBody("This String should make the request return null eh!"); + assertEquals(null, nullRequest); + } + + @Test + public void testBuildVisualizationUsingGenericQuery() { + + initializeMocksForBuildVisualizationUsingGenericQueryTest(); + + QueryRequest rootNodeQuery = baseVisService.analyzeQueryRequestBody(SchemaVisualizationTestDataBuilder.getQueryRequest()); + + } + + private void initializeMocksForBuildVisualizationUsingGenericQueryTest() { + Mockito.when(mockAaiAdapter.queryActiveInventoryWithRetries(Mockito.anyString(), Mockito.anyString(), Mockito.anyInt())).thenReturn(null); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessorTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessorTest.java new file mode 100644 index 0000000..83e3ee0 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SchemaVisualizationProcessorTest.java @@ -0,0 +1,78 @@ +package org.onap.aai.sparky.viewandinspect; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.Message; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.impl.DefaultExchange; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.onap.aai.sparky.util.HttpServletHelper; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; +import org.onap.aai.sparky.viewandinspect.services.VisualizationService; +import org.onap.aai.sparky.viewandinspect.util.SchemaVisualizationTestDataBuilder; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class SchemaVisualizationProcessorTest { + + SchemaVisualizationProcessor schemaVisProcessor; + VisualizationService mockVisualizationService; + + private Exchange exchange; + private CamelContext camelContext; + private Message mockRequestMessage; + + @Before + public void init() throws Exception { + schemaVisProcessor = new SchemaVisualizationProcessor(); + + mockVisualizationService = Mockito.mock(VisualizationService.class); + + camelContext = new DefaultCamelContext(); + exchange = new DefaultExchange(camelContext); + + mockRequestMessage = Mockito.mock(Message.class); + + exchange.setIn(mockRequestMessage); + + } + + @Test + public void testProcessVisualizationRequest() throws JsonParseException, JsonMappingException, IOException { + + String queryRequest = SchemaVisualizationTestDataBuilder.getQueryRequest(); + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + HttpServletHelper.setRequestPayload(request, "application/json", queryRequest); + + Mockito.when(request.getRequestURI()).thenReturn("fakeUri"); + Mockito.when(request.getLocalPort()).thenReturn(8001); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(HttpServletRequest.class))).thenReturn(request); + Mockito.when(mockRequestMessage.getBody(Mockito.eq(String.class))).thenReturn(queryRequest); + + ObjectMapper nonEmptyMapper = new ObjectMapper(); + nonEmptyMapper.setSerializationInclusion(Include.NON_EMPTY); + QueryRequest queryBody = nonEmptyMapper.readValue(queryRequest, QueryRequest.class); + + Mockito.when(mockVisualizationService.analyzeQueryRequestBody(Mockito.anyString())).thenReturn(queryBody); + Mockito.when(mockVisualizationService.buildVisualizationUsingGenericQuery(Mockito.anyObject())).thenReturn(SchemaVisualizationTestDataBuilder.getSchemaVisResult()); + + schemaVisProcessor.setVisualizationService(mockVisualizationService); + schemaVisProcessor.processVisualizationRequest(exchange); + + assertEquals("{}", exchange.getOut().getBody(String.class)); + assertEquals(200, exchange.getOut().getHeader(Exchange.HTTP_RESPONSE_CODE)); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchAdapterTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchAdapterTest.java new file mode 100644 index 0000000..5ddf07e --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchAdapterTest.java @@ -0,0 +1,87 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect; +/* +import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.same; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import org.junit.Before; +import org.onap.aai.sparky.dal.rest.RestClientBuilder; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.WebResource.Builder; + + +/** + * The Class SearchAdapterTest. + * + * + *TODO -> rewrite the test as it no longer uses RestClientBuilder +public class SearchAdapterTest { + + private RestClientBuilder clientBuilderMock; + private Client mockClient; + private ClientResponse mockClientResponse; + private WebResource mockWebResource; + private Builder mockBuilder; + + + + /** + * Inits the. + * + * @throws Exception the exception + * + @Before + public void init() throws Exception { + + /* + * common collaborator mocking setup + * + + clientBuilderMock = mock(RestClientBuilder.class); + mockClient = mock(Client.class); + mockClientResponse = mock(ClientResponse.class); + mockWebResource = mock(WebResource.class); + mockBuilder = mock(Builder.class); + + doReturn(mockClient).when(clientBuilderMock).getClient(); + doReturn(mockWebResource).when(mockClient).resource(anyString()); + doReturn(mockBuilder).when(mockWebResource).accept(anyString()); + doReturn(mockBuilder).when(mockBuilder).header(anyString(), anyObject()); + + doReturn(mockClientResponse).when(mockBuilder).get(same(ClientResponse.class)); + doReturn(mockClientResponse).when(mockBuilder).put(same(ClientResponse.class), anyObject()); + doReturn(mockClientResponse).when(mockBuilder).post(same(ClientResponse.class), anyObject()); + doReturn(mockClientResponse).when(mockBuilder).delete(same(ClientResponse.class)); + } + +}*/
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchResponseTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchResponseTest.java new file mode 100644 index 0000000..7975c13 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchResponseTest.java @@ -0,0 +1,92 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.search.SearchResponse; +import org.onap.aai.sparky.viewandinspect.entity.EntityEntry; + +/** + * The Class SearchResponseTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class SearchResponseTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception {} + + /** + * Validate basic construction. + */ + @Test + public void validateBasicConstruction() { + + SearchResponse response = new SearchResponse(); + + //response.setNumReturned(1); + response.setProcessingTimeInMs(512); + //response.setTotalFound(50); + + List<EntityEntry> entities = new ArrayList<EntityEntry>(); + //response.setEntities(entities); + + EntityEntry e1 = new EntityEntry(); + e1.setEntityPrimaryKeyValue("e1"); + e1.setEntityType("e1"); + e1.setSearchTags("e1"); + + //response.addEntityEntry(e1); + + EntityEntry e2 = new EntityEntry(); + + e2.setEntityPrimaryKeyValue("e2"); + e2.setEntityType("e2"); + e2.setSearchTags("e2"); + + //response.addEntityEntry(e2); + + //assertEquals(1, response.getNumReturned()); + //assertEquals(512, response.getProcessingTimeInMs()); + //assertEquals(50, response.getTotalFound()); + + //List<EntityEntry> responseEntities = response.getEntities(); + + //assertEquals(2, responseEntities.size()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchServletTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchServletTest.java new file mode 100644 index 0000000..69c408c --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchServletTest.java @@ -0,0 +1,992 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect; + +/* +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.entity.AutoSuggestDocumentEntity; +import org.openecomp.sparky.dal.elasticsearch.entity.AutoSuggestDocumentEntityFields; +import org.openecomp.sparky.dal.elasticsearch.entity.AutoSuggestElasticHitEntity; +import org.openecomp.sparky.dal.elasticsearch.entity.AutoSuggestElasticHitsEntity; +import org.openecomp.sparky.dal.elasticsearch.entity.AutoSuggestElasticSearchResponse; +import org.openecomp.sparky.dal.elasticsearch.entity.BucketEntity; +import org.openecomp.sparky.dal.elasticsearch.entity.ElasticHitsEntity; +import org.openecomp.sparky.dal.elasticsearch.entity.ElasticSearchAggegrationResponse; +import org.openecomp.sparky.dal.elasticsearch.entity.ElasticSearchAggregation; +import org.openecomp.sparky.dal.elasticsearch.entity.ElasticSearchCountResponse; +import org.openecomp.sparky.dal.elasticsearch.entity.PayloadEntity; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.sas.config.SearchServiceConfig; +import org.openecomp.sparky.dal.sas.entity.EntityCountResponse; +import org.openecomp.sparky.dal.sas.entity.GroupByAggregationResponseEntity; +import org.openecomp.sparky.dal.sas.entity.SearchAbstractionEntityBuilder; +import org.openecomp.sparky.search.VnfSearchService; +import org.openecomp.sparky.search.config.SuggestionConfig; +import org.openecomp.sparky.search.filters.FilteredSearchHelper; +import org.openecomp.sparky.search.filters.entity.UiFilterEntity; +import org.openecomp.sparky.search.filters.entity.UiFilterValueEntity; +import org.openecomp.sparky.search.filters.entity.UiFiltersEntity; +import org.openecomp.sparky.suggestivesearch.SuggestionEntity; +import org.openecomp.sparky.util.ExceptionHelper; +import org.openecomp.sparky.util.HttpServletHelper; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.entity.QuerySearchEntity; +import org.openecomp.sparky.viewandinspect.entity.SearchResponse; +import org.slf4j.MDC; + +import org.onap.aai.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.net.MediaType; + + +/** + * The Class SearchServletTest. + */ + +/*public class SearchServletTest { + + private static final String VNF_ROUTE = "vnf"; + private static final String VIEW_INSPECT_ROUTE = "viewInspect"; + + private HttpServletRequest commonRequest = null; + private HttpServletResponse commonResponse = null; + private PrintWriter printWriter = null; + private StringWriter responseStringWriter = null; + private SearchServiceWrapper searchWrapper = null; + private SearchAdapter searchAdapter = null; + private VnfSearchService vnfSearchService = null; + private ObjectMapper mapper = null; + private SecureRandom rand = null; + private OxmModelLoader loader; + private Map<String, OxmEntityDescriptor> descriptors = null; + private SuggestionConfig suggestionConfig = null; + private SearchServiceConfig esConfig = null; + + @BeforeClass + public static void initBeforeClass() throws IOException { + if (null == System.getProperty("CONFIG_HOME")) { + /* Set "CONFIG_HOME" environment variable so path of filter & view schema files are correct when + they're loaded during SearchServiceWrapper instantiation */ +/* String configHomePath = (new File(".").getCanonicalPath() + "/appconfig-local").replace('\\', '/'); + System.setProperty("CONFIG_HOME", configHomePath); + } + } + + /** + * Inits the. + * + * @throws Exception the exception + */ +/* @Before + public void init() throws Exception { + commonRequest = HttpServletHelper.getMockHttpServletRequest(); + responseStringWriter = new StringWriter(); + printWriter = new PrintWriter(responseStringWriter); + commonResponse = HttpServletHelper.getMockHttpServletResponse(printWriter); + mapper = new ObjectMapper(); + + // permit serialization of objects with no members + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + + rand = new SecureRandom(); + + loader = Mockito.mock(OxmModelLoader.class); + descriptors = new HashMap<String, OxmEntityDescriptor>(); + + esConfig = new SearchServiceConfig(); + suggestionConfig = SuggestionConfig.getConfig(); + + // Use SearchServiceWrapper and VnfSearchService for suggestionConfig + Map<String, String> svcs = new HashMap<String, String>(); + svcs.put("autosuggestIndexname", "SearchServiceWrapper"); + svcs.put("indexName", "VnfSearchService"); + suggestionConfig.setSearchIndexToSearchService(svcs); + + esConfig.setIndexName("esi-localhost"); + esConfig.setType("default"); + + searchAdapter = Mockito.mock(SearchAdapter.class); + vnfSearchService = Mockito.mock(VnfSearchService.class); + + initializeEntityDescriptors(); + + searchWrapper = new SearchServiceWrapper(); + searchWrapper.setSasConfig(esConfig); + searchWrapper.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + searchWrapper.setSuggestionConfig(suggestionConfig); + searchWrapper.setOxmModelLoader(loader); + } + + @Test + public void validateAccessors() { + assertNotNull("Vnf Search Service should not be null", searchWrapper.getVnfSearch()); + } + + @Test + public void validateInitializer() { + + try { + assertNotNull("Oxm Model loader should not be null", searchWrapper.getOxmModelLoader()); + assertNotNull("SearchAbstractionConfig should not be null", searchWrapper.getSasConfig()); + assertNotNull("SearchAdapter should not be null", searchWrapper.getSearch()); + assertNotNull("Suggestion Config should not be null", searchWrapper.getSuggestionConfig()); + assertNotNull("VnfSearchService should not be null", searchWrapper.getVnfSearch()); + + searchWrapper.setOxmModelLoader(null); + searchWrapper.setSasConfig(null); + searchWrapper.setSearch(null); + searchWrapper.setSuggestionConfig(null); + searchWrapper.setVnfSearch(null); + + assertNull("Oxm Model loader should be null", searchWrapper.getOxmModelLoader()); + assertNull("SearchAbstractionConfig should be null", searchWrapper.getSasConfig()); + assertNull("SearchAdapter should be null", searchWrapper.getSearch()); + assertNull("Suggestion Config should be null", searchWrapper.getSuggestionConfig()); + assertNull("VnfSearchService should be null", searchWrapper.getVnfSearch()); + + } catch (Exception exc) { + fail("Servlet Initialization Failed with error = " + exc.getMessage()); + } + + } + + /** + * Test doGet() and doPost() for a non-existent end-point. A test objective would be + * to either return a 404 Not Found. + */ + /* @Test + public void validateMdcContextLoggingVariablesWhenExplicitlySet() { + + final String transactionId = "1234"; + final String serviceName = "AAI-UI"; + final String partnerName = "SparkyApp"; + + HttpServletHelper.assignRequestHeader(commonRequest, "X-TransactionId", transactionId); + HttpServletHelper.assignRequestHeader(commonRequest, "X-FromAppId", partnerName); + + HttpServletHelper.assignRequestUri(commonRequest, "search/this/path/does/not/exist/"); + + try { + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ + + /* OperationResult result = doEvaluationTestMDC(true, commonRequest, commonResponse); + + assertEquals(transactionId,MDC.get(MdcContext.MDC_REQUEST_ID)); + assertEquals(serviceName,MDC.get(MdcContext.MDC_SERVICE_NAME)); + assertEquals(partnerName,MDC.get(MdcContext.MDC_PARTNER_NAME)); + + } catch (Exception exc) { + exc.printStackTrace(); + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + + } + + /** + * Test doGet() and doPost() for a non-existent end-point. A test objective would be + * to either return a 404 Not Found. + */ + /* @Test + public void validateMdcContextLoggingVariablesWhenNotExplicitlySet() { + + /*final String transactionId = "1234"; + final String serviceName = "AAI-UI"; + final String partnerName = "SparkyApp"; + + HttpServletHelper.assignRequestHeader(commonRequest, "X-TransactionId", transactionId); + HttpServletHelper.assignRequestHeader(commonRequest, "X-FromAppId", serviceName);*/ + +/* HttpServletHelper.assignRequestUri(commonRequest, "search/this/path/does/not/exist/"); + + try { + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ + + /* OperationResult result = doEvaluationTestMDC(true, commonRequest, commonResponse); + + assertNotNull(MDC.get(MdcContext.MDC_REQUEST_ID)); + assertNotNull(MDC.get(MdcContext.MDC_SERVICE_NAME)); + assertNotNull(MDC.get(MdcContext.MDC_PARTNER_NAME)); + + } catch (Exception exc) { + exc.printStackTrace(); + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + + } + + + + /** + * Test doGet() and doPost() for a non-existent end-point. + */ + /* @Test + public void validateViewAndInspectSearchError_invalidRequestUri() { + + HttpServletHelper.assignRequestUri(commonRequest, "search/this/path/does/not/exist/"); + + try { + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ +/* + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + assertEquals(404, result.getResultCode()); + assertTrue(result.getResult().contains("Ignored request-uri")); + + } catch (Exception exc) { + exc.printStackTrace(); + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + + } + + /** + * Test doGet() and doPost() for Unified Query Search success path + */ +/* @Test + public void validateQuerySearch_successPath() { + + try { + + QuerySearchEntity searchEntity = new QuerySearchEntity(); + searchEntity.setMaxResults("10"); + searchEntity.setQueryStr("the quick brown fox"); + + HttpServletHelper.assignRequestUri(commonRequest, "search/querysearch"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), + NodeUtils.convertObjectToJson(searchEntity, false)); + + + // set search-abstraction-response that we expect to get back from real system, but stubbed through a mock + // to fulfill collaborator behavior + + OperationResult mockedEntitySearchResponse = new OperationResult(); + mockedEntitySearchResponse.setResultCode(200); + mockedEntitySearchResponse.setResult(NodeUtils.convertObjectToJson( + SearchAbstractionEntityBuilder.getSuccessfulEntitySearchResponse(), false)); + + // TODO: make parameters expect certain values to lock in invocation attempt against a specific input sequence + Mockito.when(searchAdapter.doPost(anyString(), anyString(), anyString())) + .thenReturn(mockedEntitySearchResponse); + + List<SuggestionEntity> autoSuggestions = new ArrayList<SuggestionEntity>(); + + autoSuggestions.add(new SuggestionEntity("1234", "vnf", "VNFs", null)); + autoSuggestions.add(new SuggestionEntity("1111", "vnf", "Created VNFs", null)); + autoSuggestions.add(new SuggestionEntity("1122", "vnf", "ACTIVE VNFs", null)); + autoSuggestions.add(new SuggestionEntity("2233", "vnf", "ACTIVE and Error VNFs", null)); + autoSuggestions.add(new SuggestionEntity("3344", "vnf", "ACTIVE and NOT ORCHESTRATED VNFs", null)); + autoSuggestions.add(new SuggestionEntity("4455", "vnf", "ACTIVE and Running VNFs", null)); + autoSuggestions.add(new SuggestionEntity("5566", "vnf", "Activated VNFs", null)); + autoSuggestions.add(new SuggestionEntity("6677", "vnf", "CAPPED VNFs", null)); + autoSuggestions.add(new SuggestionEntity("7788", "vnf", "CAPPED and Created VNFs", null)); + + Mockito.when(vnfSearchService.getSuggestionsResults(Mockito.anyObject(), Mockito.anyInt())) + .thenReturn(autoSuggestions); + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ +/* + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + + assertEquals(200, result.getResultCode()); + + SearchResponse searchResponse = mapper.readValue(result.getResult(), SearchResponse.class); + + assertEquals(10, searchResponse.getTotalFound()); + + int numVnf = 0; + int numViewInspect = 0; + + for ( SuggestionEntity suggestion : searchResponse.getSuggestions()) { + + if ( VNF_ROUTE.equals(suggestion.getRoute())) { + numVnf++; + } else if ( VIEW_INSPECT_ROUTE.equals(suggestion.getRoute())) { + numViewInspect++; + } + } + + assertEquals(5, numVnf); + assertEquals(5, numViewInspect); + + //assertTrue(result.getResult().contains("Ignored request-uri")); + + } catch (Exception exc) { + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + + } + + /** + * Test doGet() and doPost() for Unified Query Search success path + */ + /*@Test + @Ignore + public void validateSummaryByEntityTypeCount_successPath() { + + try { + + HttpServletHelper.assignRequestUri(commonRequest, "search/summarybyentitytype/count"); + + Map<String,String> payloadFields = new HashMap<String,String>(); + payloadFields.put("hashId", "662d1b57c31df70d7ef57ec53c0ace81578ec77b6bc5de055a57c7547ec122dd"); + payloadFields.put("groupby", "orchestration-status"); + + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), NodeUtils.convertObjectToJson(payloadFields, false)); + + /* + * In this test we don't want to mock the vnf search service, only it's collaborator + * interactions with a REST endpoint. + */ + /* vnfSearchService = new VnfSearchService(); + vnfSearchService.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + + /* + * The first network response to mock is the one to elastic search to get the suggestion entity by hash id + * + * http://localhost:9200/entityautosuggestindex-localhost/_search + * {"query":{"term":{"_id":"2172a3c25ae56e4995038ffbc1f055692bfc76c0b8ceda1205bc745a9f7a805d"}}} + */ + /* + AutoSuggestElasticSearchResponse elasticResponse = new AutoSuggestElasticSearchResponse(); + + elasticResponse.setTook(1); + + elasticResponse.setTimedOut(false); + elasticResponse.addShard("total", "5"); + elasticResponse.addShard("successful", "5"); + elasticResponse.addShard("failed", "0"); + + AutoSuggestElasticHitEntity elasticHit = new AutoSuggestElasticHitEntity(); + elasticHit.setIndex("entityautosuggestindex-localhost"); + elasticHit.setType("default"); + elasticHit.setId("2172a3c25ae56e4995038ffbc1f055692bfc76c0b8ceda1205bc745a9f7a805d"); + elasticHit.setScore("1"); + + AutoSuggestDocumentEntityFields suggestDocFields = new AutoSuggestDocumentEntityFields(); + suggestDocFields.addInput("VNFs"); + suggestDocFields.addInput("generic-vnfs"); + suggestDocFields.setOutput("VNFs"); + suggestDocFields.setPayload(new PayloadEntity()); + suggestDocFields.setWeight(100); + + AutoSuggestDocumentEntity autoSuggestDoc = new AutoSuggestDocumentEntity(); + autoSuggestDoc.setFields(suggestDocFields); + + elasticHit.setSource(autoSuggestDoc); + + AutoSuggestElasticHitsEntity hits = new AutoSuggestElasticHitsEntity(); + hits.addHit(elasticHit); + + elasticResponse.setHits(hits); + + + OperationResult mockedSearchResponse = new OperationResult(); + mockedSearchResponse.setResultCode(200); + + mockedSearchResponse.setResult(NodeUtils.convertObjectToJson(elasticResponse, false)); + + + /* + * The second response is the count API dip to elastic search + */ + /* + ElasticSearchCountResponse countResponse = new ElasticSearchCountResponse(); + countResponse.setCount(3170); + countResponse.addShard("total", "5"); + countResponse.addShard("successful", "5"); + countResponse.addShard("failed", "0"); + + OperationResult searchResponseForCount = new OperationResult(); + searchResponseForCount.setResultCode(200); + + searchResponseForCount.setResult(NodeUtils.convertObjectToJson(countResponse, false)); + + // TODO: make parameters expect certain values to lock in invocation attempt against a specific input sequence + Mockito.when(searchAdapter.doPost(anyString(), anyString(), anyString())) + .thenReturn(mockedSearchResponse).thenReturn(searchResponseForCount); + + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ +/* + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + + assertEquals(200, result.getResultCode()); + + // + //{"shards":{"total":"5","failed":"0","successful":"5"},"count":3170} + + EntityCountResponse entityCountResponse = mapper.readValue(result.getResult(), EntityCountResponse.class); + + assertEquals(3170, entityCountResponse.getCount()); + + } catch (Exception exc) { + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + + } + + + /** + * Test doGet() and doPost() for Unified Query Search success path + */ + /*@Test + @Ignore + public void validateSummaryByEntityType_successPath() { + + try { + + HttpServletHelper.assignRequestUri(commonRequest, "search/summarybyentitytype"); + + Map<String,String> payloadFields = new HashMap<String,String>(); + payloadFields.put("hashId", "662d1b57c31df70d7ef57ec53c0ace81578ec77b6bc5de055a57c7547ec122dd"); + payloadFields.put("groupby", "orchestration-status"); + + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), NodeUtils.convertObjectToJson(payloadFields, false)); + + /* + * In this test we don't want to mock the vnf search service, only it's collaborator + * interactions with a REST endpoint. + */ + /* vnfSearchService = new VnfSearchService(); + vnfSearchService.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + + /* + * The first network response to mock is the one to elastic search to get the suggestion entity by hash id + * + * http://localhost:9200/entityautosuggestindex-localhost/_search + * {"query":{"term":{"_id":"2172a3c25ae56e4995038ffbc1f055692bfc76c0b8ceda1205bc745a9f7a805d"}}} + */ + + /* AutoSuggestElasticSearchResponse elasticResponse = new AutoSuggestElasticSearchResponse(); + + elasticResponse.setTook(1); + + elasticResponse.setTimedOut(false); + elasticResponse.addShard("total", "5"); + elasticResponse.addShard("successful", "5"); + elasticResponse.addShard("failed", "0"); + + AutoSuggestElasticHitEntity elasticHit = new AutoSuggestElasticHitEntity(); + elasticHit.setIndex("entityautosuggestindex-localhost"); + elasticHit.setType("default"); + elasticHit.setId("2172a3c25ae56e4995038ffbc1f055692bfc76c0b8ceda1205bc745a9f7a805d"); + elasticHit.setScore("1"); + + AutoSuggestDocumentEntityFields suggestDocFields = new AutoSuggestDocumentEntityFields(); + suggestDocFields.addInput("VNFs"); + suggestDocFields.addInput("generic-vnfs"); + suggestDocFields.setOutput("VNFs"); + suggestDocFields.setPayload(new PayloadEntity()); + suggestDocFields.setWeight(100); + + AutoSuggestDocumentEntity autoSuggestDoc = new AutoSuggestDocumentEntity(); + autoSuggestDoc.setFields(suggestDocFields); + + elasticHit.setSource(autoSuggestDoc); + + AutoSuggestElasticHitsEntity hits = new AutoSuggestElasticHitsEntity(); + hits.addHit(elasticHit); + + elasticResponse.setHits(hits); + + + OperationResult mockedSearchResponse = new OperationResult(); + mockedSearchResponse.setResultCode(200); + + mockedSearchResponse.setResult(NodeUtils.convertObjectToJson(elasticResponse, false)); + + + /* + * The second response is the aggregation API dip to elastic search + */ + /* + ElasticSearchAggegrationResponse aggResponse = new ElasticSearchAggegrationResponse(); + + aggResponse.setTook(20); + aggResponse.setTimedOut(false); + + aggResponse.addShard("total","5"); + aggResponse.addShard("successful","5"); + aggResponse.addShard("failed","0"); + + ElasticHitsEntity hitsEntity = new ElasticHitsEntity(); + + hitsEntity.setTotal(3170); + hitsEntity.setMaxScore(0); + + aggResponse.setHits(hitsEntity); + + ElasticSearchAggregation defaultAggregation = new ElasticSearchAggregation(); + + defaultAggregation.setDocCountErrorUpperBound(0); + defaultAggregation.setSumOtherDocCount(0); + defaultAggregation.addBucket(new BucketEntity("created",1876)); + defaultAggregation.addBucket(new BucketEntity("Created",649)); + defaultAggregation.addBucket(new BucketEntity("Activated",158)); + defaultAggregation.addBucket(new BucketEntity("active",59)); + defaultAggregation.addBucket(new BucketEntity("NOT ORCHESTRATED",42)); + defaultAggregation.addBucket(new BucketEntity("Pending-Create",10)); + defaultAggregation.addBucket(new BucketEntity("Running",9)); + defaultAggregation.addBucket(new BucketEntity("Configured",7)); + defaultAggregation.addBucket(new BucketEntity("pending-create",7)); + defaultAggregation.addBucket(new BucketEntity("Error",3)); + defaultAggregation.addBucket(new BucketEntity("planned",3)); + defaultAggregation.addBucket(new BucketEntity("PLANNED",2)); + defaultAggregation.addBucket(new BucketEntity("ERROR",1)); + defaultAggregation.addBucket(new BucketEntity("RUNNING",1)); + defaultAggregation.addBucket(new BucketEntity("example-orchestration-status-val-6176",1)); + + aggResponse.addAggregation("default", defaultAggregation); + + OperationResult searchResponseForAggregation = new OperationResult(); + searchResponseForAggregation.setResultCode(200); + + searchResponseForAggregation.setResult(NodeUtils.convertObjectToJson(aggResponse, false)); + + // TODO: make parameters expect certain values to lock in invocation attempt against a specific input sequence + Mockito.when(searchAdapter.doPost(anyString(), anyString(), anyString())) + .thenReturn(mockedSearchResponse).thenReturn(searchResponseForAggregation); + + + /* + * Testing the doGet() operation will hit the doPost() operation in the servlet as well + */ +/* + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + + assertEquals(200, result.getResultCode()); + + // + //{"shards":{"total":"5","failed":"0","successful":"5"},"count":3170} + + GroupByAggregationResponseEntity groupByResponse = mapper.readValue(result.getResult(), GroupByAggregationResponseEntity.class); + + assertEquals(2828, groupByResponse.getAggEntity().getTotalChartHits()); + assertEquals(15, groupByResponse.getAggEntity().getBuckets().size()); + + } catch (Exception exc) { + fail("Unexpected exception = " + exc.getLocalizedMessage()); + } + } + + @Test + public void validateHandleDiscoverSearchFilters_vnfSearchViewName() throws IOException { + String requestBody = "{ \"viewName\" : \"VnfSearch\" }"; + String expectedResponse = "{\"filters\":[{\"filterId\":\"1\",\"filterName\":\"Orchestration-Status\",\"displayName\":\"Orchestration Status\",\"dataType\":\"list\"},{\"filterId\":\"2\",\"filterName\":\"Prov-Status\",\"displayName\":\"Provisioning Status\",\"dataType\":\"list\"}]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilters"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + @Test + public void validateFilterAggregation_successPath() { + String requestBodyFilePath = "filters/filterAggregationEndpoint_successPath_requestBody.json"; + String expectedResponseFilePath = "filters/filterAggregationEndpoint_successPath_expectedResponse.json"; + String operationResultFilePath = "filters/filterAggregationEndpoint_successPath_operationResult.json"; + + String requestBody = getResourceFileContents(requestBodyFilePath); + String expectedResponse = getResourceFileContents(expectedResponseFilePath); + + HttpServletHelper.assignRequestUri(commonRequest, "search/filterAggregation"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + OperationResult operationResult = new OperationResult(); + operationResult.setResult(getResourceFileContents(operationResultFilePath)); + + vnfSearchService = new VnfSearchService(); + vnfSearchService.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + + Mockito.when(searchAdapter.doPost(anyString(), anyString(), anyString())).thenReturn(operationResult); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse.trim(), result.getResult().trim()); + } + + @Test + public void validateFilterAggregation_emptyRequestFilterArray() throws IOException { + String requestBodyFilePath = "filters/filterAggregationEndpoint_emptyRequestFilterArray_requestBody.json"; + String expectedResponseFilePath = "filters/filterAggregationEndpoint_emptyRequestFilterArray_expectedResponse.json"; + + String requestBody = getResourceFileContents(requestBodyFilePath); + String expectedResponse = getResourceFileContents(expectedResponseFilePath); + + HttpServletHelper.assignRequestUri(commonRequest, "search/filterAggregation"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + vnfSearchService = new VnfSearchService(); + vnfSearchService.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse.trim(), result.getResult().trim()); + } + + @Test + public void validateFilterAggregation_emptyRequestBody() throws IOException { + String expectedResponseFilePath = "filters/filterAggregationEndpoint_emptyRequestBody_expectedResponse.json"; + + String expectedResponse = getResourceFileContents(expectedResponseFilePath); + + HttpServletHelper.assignRequestUri(commonRequest, "search/filterAggregation"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), "{}"); + + vnfSearchService = new VnfSearchService(); + vnfSearchService.setSearch(searchAdapter); + searchWrapper.setVnfSearch(vnfSearchService); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse.trim(), result.getResult().trim()); + } + + @Test + public void validateHandleDiscoverSearchFilters_diuiViewName() throws IOException { + String requestBody = "{ \"viewName\" : \"dataIntegrity\" }"; + String expectedResponse = "{\"filters\":[{\"filterId\":\"3\",\"filterName\":\"Severity\",\"displayName\":\"Severity\",\"dataType\":\"list\"},{\"filterId\":\"4\",\"filterName\":\"Category\",\"displayName\":\"Category\",\"dataType\":\"list\"},{\"filterId\":\"5\",\"filterName\":\"Date\",\"displayName\":\"Date\",\"dataType\":\"date\"},{\"filterId\":\"6\",\"filterName\":\"EntityType\",\"displayName\":\"Entity Type\",\"dataType\":\"list\"}]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilters"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + @Test + public void validateHandleDiscoverSearchFilterValues_validId() throws IOException { + String requestBody = "{ \"filterIdList\" : [ { \"filterId\" : \"1\" } ] }"; + String expectedResponse = "{\"filters\":[{\"filterId\":\"1\",\"filterName\":\"Orchestration-Status\",\"displayName\":\"Orchestration Status\",\"dataType\":\"list\",\"filterValueList\":[{\"filterValue\":\"created\",\"displayName\":\"created\"}]}]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilterValues"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + FilteredSearchHelper filteredSearchHelper = Mockito.mock(FilteredSearchHelper.class); + searchWrapper.setFilteredSearchHelper(filteredSearchHelper); + + UiFilterValueEntity uiFilterValueEntity = new UiFilterValueEntity(null, "created", "created"); + + UiFilterEntity uiFilterEntity = new UiFilterEntity(); + uiFilterEntity.setDataType("list"); + uiFilterEntity.setDisplayName("Orchestration Status"); + uiFilterEntity.setFilterId("1"); + uiFilterEntity.setFilterName("Orchestration-Status"); + uiFilterEntity.addFilterValue(uiFilterValueEntity); + + UiFiltersEntity uiFiltersEntity = new UiFiltersEntity(); + uiFiltersEntity.addFilter(uiFilterEntity); + + Mockito.when(filteredSearchHelper.doFilterEnumeration(Mockito.anyList())) + .thenReturn(uiFiltersEntity); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + @Test + public void validateHandleDiscoverSearchFilterValues_multipleValidIds() throws IOException { + String requestBody = "{ \"filterIdList\" : [ { \"filterId\" : \"1\" }, { \"filterId\" : \"2\" } ] }"; + String expectedResponse = "{\"filters\":[{\"filterId\":\"1\",\"filterName\":\"Orchestration-Status\",\"displayName\":\"Orchestration Status\",\"dataType\":\"list\",\"filterValueList\":[{\"filterValue\":\"created\",\"displayName\":\"created\"}]},{\"filterId\":\"2\",\"filterName\":\"Prov-Status\",\"displayName\":\"Provisioning Status\",\"dataType\":\"list\",\"filterValueList\":[{\"filterValue\":\"active\",\"displayName\":\"active\"}]}]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilterValues"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + FilteredSearchHelper filteredSearchHelper = Mockito.mock(FilteredSearchHelper.class); + searchWrapper.setFilteredSearchHelper(filteredSearchHelper); + + UiFiltersEntity uiFiltersEntity = new UiFiltersEntity(); + + UiFilterValueEntity uiFilter1ValueEntity = new UiFilterValueEntity(null, "created", "created"); + UiFilterEntity uiFilterEntity1 = new UiFilterEntity(); + uiFilterEntity1.setDataType("list"); + uiFilterEntity1.setDisplayName("Orchestration Status"); + uiFilterEntity1.setFilterId("1"); + uiFilterEntity1.setFilterName("Orchestration-Status"); + uiFilterEntity1.addFilterValue(uiFilter1ValueEntity); + uiFiltersEntity.addFilter(uiFilterEntity1); + + UiFilterValueEntity uiFilter2ValueEntity = new UiFilterValueEntity(null, "active", "active"); + UiFilterEntity uiFilterEntity2 = new UiFilterEntity(); + uiFilterEntity2.setDataType("list"); + uiFilterEntity2.setDisplayName("Provisioning Status"); + uiFilterEntity2.setFilterId("2"); + uiFilterEntity2.setFilterName("Prov-Status"); + uiFilterEntity2.addFilterValue(uiFilter2ValueEntity); + uiFiltersEntity.addFilter(uiFilterEntity2); + + Mockito.when(filteredSearchHelper.doFilterEnumeration(Mockito.anyList())) + .thenReturn(uiFiltersEntity); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + @Test + public void validateHandleDiscoverSearchFilterValues_invalidId() throws IOException { + String requestBody = "{ \"filterIdList\" : [ { \"filterId\" : \"999\" } ] }"; + String expectedResponse = "{\"filters\":[]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilterValues"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + @Test + public void validateHandleDiscoverSearchFilterValues_validIdAndInvalidId() throws IOException { + String requestBody = "{ \"filterIdList\" : [ { \"filterId\" : \"1\" }, { \"filterId\" : \"999\" } ] }"; + String expectedResponse = "{\"filters\":[{\"filterId\":\"1\",\"filterName\":\"Orchestration-Status\",\"displayName\":\"Orchestration Status\",\"dataType\":\"list\",\"filterValueList\":[{\"filterValue\":\"created\",\"displayName\":\"created\"}]}]}"; + + HttpServletHelper.assignRequestUri(commonRequest, "search/discoverFilterValues"); + HttpServletHelper.setRequestPayload(commonRequest, MediaType.JSON_UTF_8.toString(), requestBody); + + FilteredSearchHelper filteredSearchHelper = Mockito.mock(FilteredSearchHelper.class); + searchWrapper.setFilteredSearchHelper(filteredSearchHelper); + + UiFilterValueEntity uiFilterValueEntity = new UiFilterValueEntity(null, "created", "created"); + + UiFilterEntity uiFilterEntity = new UiFilterEntity(); + uiFilterEntity.setDataType("list"); + uiFilterEntity.setDisplayName("Orchestration Status"); + uiFilterEntity.setFilterId("1"); + uiFilterEntity.setFilterName("Orchestration-Status"); + uiFilterEntity.addFilterValue(uiFilterValueEntity); + + UiFiltersEntity uiFiltersEntity = new UiFiltersEntity(); + uiFiltersEntity.addFilter(uiFilterEntity); + + Mockito.when(filteredSearchHelper.doFilterEnumeration(Mockito.anyList())) + .thenReturn(uiFiltersEntity); + + OperationResult result = doEvaluation(true, commonRequest, commonResponse); + + assertEquals(expectedResponse, result.getResult().trim()); + } + + /** + * Builds the resource entity descriptor. + * + * @param entityType the entity type + * @param attributeNames the attribute names + * @param searchableAttributes the searchable attributes + * @return the oxm entity descriptor + */ +/* @SuppressWarnings("unchecked") + private OxmEntityDescriptor buildResourceEntityDescriptor(String entityType, + String attributeNames, String searchableAttributes) { + OxmEntityDescriptor descriptor = new OxmEntityDescriptor(); + descriptor.setEntityName(entityType); + + if (attributeNames != null) { + descriptor.setPrimaryKeyAttributeName(Arrays.asList(attributeNames.split(","))); + } + + if (searchableAttributes != null) { + descriptor.setSearchableAttributes(Arrays.asList(searchableAttributes.split(","))); + } + + return descriptor; + } + + /** + * Initialize entity descriptors. + */ + /*private void initializeEntityDescriptors() { + descriptors.put("customer", + buildResourceEntityDescriptor("customer", "service-instance-id", "f1,f2,f3")); + } + + /** + * Builds the view and inspect search request. + * + * @param maxResults the max results + * @param queryStr the query str + * @return the string + * @throws JsonProcessingException the json processing exception + */ + /* public String buildViewAndInspectSearchRequest(Integer maxResults, String queryStr) + throws JsonProcessingException { + + /* + * { "maxResults" : "10", "searchStr" : "<search bar text>" } + */ +/* + ObjectNode rootNode = mapper.createObjectNode(); + + if (maxResults != null) { + rootNode.put("maxResults", maxResults); + } + + if (queryStr != null) { + rootNode.put("queryStr", queryStr); + } + + return NodeUtils.convertObjectToJson(rootNode, true); + + } + + public String getResourceFileContents(String filePath) { + StringBuilder result = new StringBuilder(""); + + ClassLoader classLoader = getClass().getClassLoader(); + File file = new File(classLoader.getResource(filePath).getFile()); + + try (Scanner scanner = new Scanner(file)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + result.append(line).append("\n"); + } + + scanner.close(); + + } catch (IOException e) { + e.printStackTrace(); + } + + return result.toString(); + } + + + /** + * Do evaluation. + * + * @param doGet the do get + * @param req the req + * @param res the res + * @return the string + *//* + private OperationResult doEvaluationTestMDC(boolean doGet, HttpServletRequest req, HttpServletResponse res) { + + /* + * Test method invocation + */ +/* + SearchServlet searchServlet = new SearchServlet(); + try { + searchServlet.init(); + } catch (ServletException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + ArgumentCaptor<Integer> responseCodeCaptor = ArgumentCaptor.forClass(Integer.class); + + try { + if (doGet) { + searchServlet.doGet(req, res); + } else { + searchServlet.doPost(req, res); + } + } catch (ServletException exc) { + fail(ExceptionHelper.extractStackTraceElements(5, exc)); + } catch (IOException exc) { + fail(ExceptionHelper.extractStackTraceElements(5, exc)); + } + + responseStringWriter.flush(); + Mockito.verify(commonResponse, Mockito.atLeast(1)).setStatus(responseCodeCaptor.capture()); + + OperationResult result = new OperationResult(); + + result.setResultCode(responseCodeCaptor.getValue()); + result.setResult(responseStringWriter.toString()); + + return result; + + } + + /** + * Do evaluation. + * + * @param doGet the do get + * @param req the req + * @param res the res + * @return the string + *//* + private OperationResult doEvaluation(boolean doGet, HttpServletRequest req, HttpServletResponse res) { + + /* + * Test method invocation + */ + /* ArgumentCaptor<Integer> responseCodeCaptor = ArgumentCaptor.forClass(Integer.class); + + try { + if (doGet) { + searchWrapper.doGet(req, res); + } else { + searchWrapper.doPost(req, res); + } + } catch (ServletException exc) { + fail(ExceptionHelper.extractStackTraceElements(5, exc)); + } catch (IOException exc) { + fail(ExceptionHelper.extractStackTraceElements(5, exc)); + } + + responseStringWriter.flush(); + Mockito.verify(commonResponse, Mockito.atLeast(1)).setStatus(responseCodeCaptor.capture()); + + OperationResult result = new OperationResult(); + + result.setResultCode(responseCodeCaptor.getValue()); + result.setResult(responseStringWriter.toString()); + + return result; + + } + +}*/ diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchableGroupsTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchableGroupsTest.java new file mode 100644 index 0000000..3aaf17a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/SearchableGroupsTest.java @@ -0,0 +1,73 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect; + +/** + * The Class SearchableGroupsTest. + */ +public class SearchableGroupsTest { + + static final String TEST_RESOURCE_PATH = "/src/test/resources"; + + static final String GOOD_TEST_CONFIG = "{\"groups\": [" + "{" + "\"group-name\" : \"inventory\"," + + "\"search-paths\" : [\"cloud-infrastructure\", \"business\", \"network\"]" + "}," + "{" + + "\"group-name\" : \"cloud-infrastructure\"," + + "\"search-paths\" : [\"complexes\", \"cloud-regions\", \"pservers\"]" + "}" + "]" + "}"; + /* + * @Before public void init() throws NoSuchFieldException, SecurityException, + * IllegalArgumentException, IllegalAccessException { Field instance = + * SearchableGroups.class.getDeclaredField("instance"); instance.setAccessible(true); + * instance.set(null, null); } + * + * @Test public void test_FileNotFound() throws ElasticSearchOperationException { + * System.setProperty("AJSC_HOME", ""); SearchableGroups testGroups = + * SearchableGroups.getTestInstance(); assertTrue(testGroups.getGroups().isEmpty()); } + * + * @Test public void test_FileFoundWithProperlyFormatedConfig() throws + * ElasticSearchOperationException { ResolverUtils testUtils = + * Mockito.mock(ResolverUtils.class); + * Mockito.when(testUtils.getConfigSettings(anyString())).thenReturn(GOOD_TEST_CONFIG); + * SearchableGroups testGroups = SearchableGroups.getTestInstance(); + * + * testGroups.setUtils(testUtils); testGroups.initSearchableGroups(); + * + * assertFalse(testGroups.getGroups().isEmpty()); + * + * assertFalse(testGroups.getSearchableGroups("inventory").isEmpty()); } + * + * @Test public void test_FileFoundGroupDoesNotExist() throws + * ElasticSearchOperationException { + * ResolverUtils testUtils = Mockito.mock(ResolverUtils.class); + * Mockito.when(testUtils.getConfigSettings(anyString())).thenReturn(GOOD_TEST_CONFIG); + * SearchableGroups testGroups = SearchableGroups.getTestInstance(); + * + * testGroups.setUtils(testUtils); testGroups.initSearchableGroups(); + * + * assertFalse(testGroups.getGroups().isEmpty()); + * + * assertEquals(null, testGroups.getSearchableGroups("Test")); } + */ +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/ViewAndInspectSearchRequestTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/ViewAndInspectSearchRequestTest.java new file mode 100644 index 0000000..7c05143 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/ViewAndInspectSearchRequestTest.java @@ -0,0 +1,81 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.search.entity.QuerySearchEntity; + +/** + * The Class ViewAndInspectSearchRequestTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class ViewAndInspectSearchRequestTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception {} + + /** + * Validate basic construction. + */ + @Test + public void validateBasicConstruction() { + + QuerySearchEntity request = new QuerySearchEntity(); + + // test constructor defaults + assertNull(request.getQueryStr()); + assertEquals("10", request.getMaxResults()); + + request.setMaxResults("500"); + assertEquals("500", request.getMaxResults()); + + assertNull(request.getSearchTerms()); + + request.setQueryStr(""); + assertEquals(1, request.getSearchTerms().length); + + request.setQueryStr("t1"); + assertEquals(1, request.getSearchTerms().length); + + request.setQueryStr("t1 t2 t3"); + assertEquals(3, request.getSearchTerms().length); + + } + +} + diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/VisualizationTransformerTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/VisualizationTransformerTest.java new file mode 100644 index 0000000..3f5988f --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/VisualizationTransformerTest.java @@ -0,0 +1,77 @@ +package org.onap.aai.sparky.viewandinspect; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.util.OxmModelAndProcessorHelper; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.GraphMeta; +import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphNode; +import org.onap.aai.sparky.viewandinspect.services.VisualizationTransformer; +import org.onap.aai.sparky.viewandinspect.util.SchemaVisualizationTestDataBuilder; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class VisualizationTransformerTest { + + VisualizationTransformer testTransformer; + VisualizationConfigs visConfig; + SubscriptionConfig subConfig; + GraphMeta graphMeta; + OxmEntityLookup oxmEntityLookup; + + + @Before + public void init() throws Exception { + this.visConfig = new VisualizationConfigs(); + this.subConfig = new SubscriptionConfig(); + this.graphMeta = new GraphMeta(); + this.oxmEntityLookup = OxmModelAndProcessorHelper.getInstance().getOxmEntityLookup(); + + this.testTransformer = new VisualizationTransformer(visConfig, subConfig); + } + + @Test + public void testGenerateVisualizationOutput() throws JsonParseException, JsonMappingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + JsonNode elasticValue = mapper.readValue(SchemaVisualizationTestDataBuilder.getRawRootNode(), JsonNode.class); + + } + + public ActiveInventoryNode generateActiveInventoryNode(String id, String type, String selfLink, String primaryKeyName, String primaryKeyValue) { + ActiveInventoryNode testAin = new ActiveInventoryNode(visConfig, oxmEntityLookup); + + testAin.setNodeId(id); + testAin.setEntityType(type); + testAin.setSelfLink(selfLink); + testAin.setPrimaryKeyName(primaryKeyName); + testAin.setPrimaryKeyValue(primaryKeyValue); + + return testAin; + } + + public SparkyGraphNode generateSparkyGraphNode(ActiveInventoryNode ain) { + + SparkyGraphNode testSparkyGraphNode = new SparkyGraphNode(ain, visConfig, subConfig); + + return testSparkyGraphNode; + } + + public Map<String, ActiveInventoryNode> generateFlatNodeArray() { + Map<String, ActiveInventoryNode> nodeArray = new HashMap<String, ActiveInventoryNode>(); + + return nodeArray; + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigTest.java new file mode 100644 index 0000000..5dbf7bb --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/config/VisualizationConfigTest.java @@ -0,0 +1,62 @@ +package org.onap.aai.sparky.viewandinspect.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; + +public class VisualizationConfigTest { + + + private VisualizationConfigs visualConfig; + private ArrayList<String> shallowEntities; + + @Before + public void init() throws Exception { + visualConfig = new VisualizationConfigs(); + shallowEntities = new ArrayList<String>(); + + } + + + @Test + public void updateValues() { + + visualConfig.setShallowEntities(shallowEntities); + assertNotNull(visualConfig.getShallowEntities()); + visualConfig.setMakeAllNeighborsBidirectional(true); + assertTrue(visualConfig.makeAllNeighborsBidirectional()); + visualConfig.setSelectedSearchedNodeClassName("selectedsearchedNodeClass"); + assertNotNull(visualConfig.getSelectedSearchedNodeClassName()); + visualConfig.setGeneralNodeClassName("generalNodeClass"); + assertNotNull(visualConfig.getGeneralNodeClassName()); + visualConfig.setSearchNodeClassName("searchedNodeClass"); + assertNotNull(visualConfig.getSearchNodeClassName()); + visualConfig.setAaiEntityNodeDescriptors("/etc/aaiEntityNodeDescriptors.json"); + assertNotNull(visualConfig.getAaiEntityNodeDescriptors()); + visualConfig.setVisualizationDebugEnabled(true); + assertTrue(visualConfig.isVisualizationDebugEnabled()); + visualConfig.setMaxSelfLinkTraversalDepth(3); + assertEquals(3,visualConfig.getMaxSelfLinkTraversalDepth()); + visualConfig.setNumOfThreadsToFetchNodeIntegrity(25); + assertEquals(25,visualConfig.getNumOfThreadsToFetchNodeIntegrity()); + assertNotNull(visualConfig.toString()); + visualConfig.setAaiEntityNodeDescriptors(null); + assertNull(visualConfig.getAaiEntityNodeDescriptors()); + visualConfig.setGeneralNodeClassName(null); + assertNull(visualConfig.getGeneralNodeClassName()); + visualConfig.setSearchNodeClassName(null); + assertNull(visualConfig.getSearchNodeClassName()); + visualConfig.setSelectedSearchedNodeClassName(null); + assertNull(visualConfig.getSelectedSearchedNodeClassName()); + assertNotNull(visualConfig.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNodeTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNodeTest.java new file mode 100644 index 0000000..8469d1a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/ActiveInventoryNodeTest.java @@ -0,0 +1,128 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class ActiveInventoryNodeTest { + + private ActiveInventoryNode activeInventoryNode; + private ArrayList<String> lst = new ArrayList<String>(); + private OperationResult opResult; + private VisualizationConfigs visualConfig; + private OxmEntityLookup oxmEntityLookup; + + @Before + public void init() throws Exception { + + visualConfig = new VisualizationConfigs(); + oxmEntityLookup = new OxmEntityLookup(); + activeInventoryNode = new ActiveInventoryNode(visualConfig,oxmEntityLookup); + + } + + @Test + public void successfulInitializationAndUpdate() { + + activeInventoryNode.addQueryParam("query-1"); + activeInventoryNode.clearQueryParams(); + activeInventoryNode.addQueryParams(lst); + assertNotNull(activeInventoryNode.getQueryParams()); + assertNull(activeInventoryNode.getOxmModelLoader()); + activeInventoryNode.setSelfLinkDeterminationPending(true); + assertTrue(activeInventoryNode.isSelfLinkDeterminationPending()); + assertNotNull(activeInventoryNode.getState()); + assertNotNull(activeInventoryNode.getComplexGroups()); + assertNotNull(activeInventoryNode.getRelationshipLists()); + activeInventoryNode.setOpResult(opResult); + assertNull(activeInventoryNode.getOpResult()); + activeInventoryNode.setPrimaryKeyName("PrimaryKeyName"); + assertNotNull(activeInventoryNode.getPrimaryKeyName()); + activeInventoryNode.setNodeDepth(2); + assertEquals(2,activeInventoryNode.getNodeDepth()); + activeInventoryNode.setvisualizationConfig(visualConfig); + assertNotNull(activeInventoryNode.getvisualizationConfigs()); + activeInventoryNode.setNodeValidated(true); + assertTrue(activeInventoryNode.isNodeValidated()); + activeInventoryNode.setPrimaryKeyValue("PrimaryKeyValue"); + assertNotNull(activeInventoryNode.getPrimaryKeyValue()); + activeInventoryNode.setIgnoredByFilter(true); + assertTrue(activeInventoryNode.isIgnoredByFilter()); + activeInventoryNode.setNodeIssue(true); + assertTrue(activeInventoryNode.isNodeIssue()); + activeInventoryNode.setProcessedNeighbors(true); + assertTrue(activeInventoryNode.hasProcessedNeighbors()); + activeInventoryNode.setResolvedSelfLink(true); + assertTrue(activeInventoryNode.hasResolvedSelfLink()); + activeInventoryNode.addInboundNeighbor("InBoundNodeID"); + activeInventoryNode.addOutboundNeighbor("OutBoundNodeID"); + assertTrue(activeInventoryNode.hasNeighbors()); + activeInventoryNode.addInboundNeighbor(null); + activeInventoryNode.addOutboundNeighbor(null); + assertNotNull(activeInventoryNode.getInboundNeighbors()); + assertNotNull(activeInventoryNode.getOutboundNeighbors()); + assertTrue(activeInventoryNode.isAtMaxDepth()); + activeInventoryNode.setSelfLinkPendingResolve(true); + assertTrue(activeInventoryNode.isSelfLinkPendingResolve()); + activeInventoryNode.setRootNode(true); + assertTrue(activeInventoryNode.isRootNode()); + activeInventoryNode.setSelflinkRetrievalFailure(true); + assertTrue(activeInventoryNode.isSelflinkRetrievalFailure()); + activeInventoryNode.setSelfLinkProcessed(true); + assertTrue(activeInventoryNode.getSelfLinkProcessed()); + activeInventoryNode.setNodeIntegrityProcessed(true); + assertTrue(activeInventoryNode.getNodeIntegrityProcessed()); + assertFalse(activeInventoryNode.isDirectSelfLink()); + activeInventoryNode.setProcessingErrorOccurred(true); + assertTrue(activeInventoryNode.isProcessingErrorOccurred()); + activeInventoryNode.setNodeId("NodeId-1"); + assertNotNull(activeInventoryNode.getNodeId()); + activeInventoryNode.setSelfLink("selfLinkSet"); + assertNotNull(activeInventoryNode.getSelfLink()); + activeInventoryNode.setEntityType("EntityId-1"); + assertNotNull(activeInventoryNode.getEntityType()); + assertNotNull(activeInventoryNode.dumpNodeTree(true)); + assertNotNull(activeInventoryNode.getProcessingErrorCauses()); + assertNull(activeInventoryNode.calculateEditAttributeUri("Invalid-link")); + assertNull(activeInventoryNode.calculateEditAttributeUri("aai/v[\\d]/")); + activeInventoryNode.processPathedSelfLinkResponse("jsonResp","startNodeType","startNodeResourceKey"); + activeInventoryNode.processPathedSelfLinkResponse(null,"startNodeType","startNodeResourceKey"); + activeInventoryNode.processPathedSelfLinkResponse("","startNodeType","startNodeResourceKey"); + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutputTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutputTest.java new file mode 100644 index 0000000..e1b9931 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/D3VisualizationOutputTest.java @@ -0,0 +1,46 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class D3VisualizationOutputTest { + + private D3VisualizationOutput d3visualization; + private InlineMessage inlineMessage; + private GraphMeta graphMeta; + private ArrayList<SparkyGraphNode> nodes; + private ArrayList<SparkyGraphLink> links; + + @Before + public void init() throws Exception { + + d3visualization = new D3VisualizationOutput(); + nodes = new ArrayList<SparkyGraphNode>(); + links = new ArrayList<SparkyGraphLink>(); + graphMeta = new GraphMeta(); + inlineMessage = new InlineMessage("level-1","Violation"); + } + + + @Test + public void updateValues() { + + d3visualization.setInlineMessage(inlineMessage); + assertNotNull(d3visualization.getInlineMessage()); + d3visualization.addLinks(links); + d3visualization.addNodes(nodes); + d3visualization.setGraphMeta(graphMeta); + assertNotNull(d3visualization.getGraphMeta()); + d3visualization.pegCounter("pegCounter-1"); + + } + +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntryTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntryTest.java new file mode 100644 index 0000000..9584d2b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/EntityEntryTest.java @@ -0,0 +1,94 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.IOException; +import java.security.NoSuchAlgorithmException; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.runners.MockitoJUnitRunner; +import org.onap.aai.sparky.viewandinspect.entity.EntityEntry; + +/** + * The Class EntityEntryTest. + */ +@RunWith(MockitoJUnitRunner.class) +public class EntityEntryTest { + + /** + * Inits the. + * + * @throws Exception the exception + */ + @Before + public void init() throws Exception {} + + /** + * Validate basic construction. + * + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws IOException Signals that an I/O exception has occurred. + */ + @Test + public void validateBasicConstruction() throws NoSuchAlgorithmException, IOException { + + EntityEntry entityEntry = new EntityEntry(); + + entityEntry.setEntityType("ShinyEntityType"); + entityEntry.setEntityPrimaryKeyValue("primary_key_value"); + entityEntry.setSearchTags("t1 t2 t3"); + entityEntry.setEntityId("Id-1"); + + assertEquals("ShinyEntityType", entityEntry.getEntityType()); + assertEquals("primary_key_value", entityEntry.getEntityPrimaryKeyValue()); + assertEquals("t1 t2 t3", entityEntry.getSearchTags()); + assertEquals("Id-1",entityEntry.getEntityId()); + assertNotNull(entityEntry.toString()); + + } + + @Test + public void nullEntry() throws NoSuchAlgorithmException, IOException { + + EntityEntry nullEntityEntry = new EntityEntry(); + + nullEntityEntry.setEntityType(null); + nullEntityEntry.setEntityPrimaryKeyValue(null); + nullEntityEntry.setSearchTags(null); + nullEntityEntry.setEntityId(null); + + assertNotNull(nullEntityEntry.toString()); + + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphMetaTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphMetaTest.java new file mode 100644 index 0000000..bd4e454 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphMetaTest.java @@ -0,0 +1,61 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.EntityTypeAggregation; + +import com.fasterxml.jackson.databind.node.JsonNodeFactory; + +public class GraphMetaTest { + + private GraphMeta graphMeta; + private EntityTypeAggregation entitySummary; + private com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDescriptors; + + + @Before + public void init() throws Exception { + + graphMeta = new GraphMeta(); + entitySummary = new EntityTypeAggregation(); + aaiEntityNodeDescriptors = JsonNodeFactory.instance.objectNode(); + + } + + + @Test + public void updateValues() { + + graphMeta.setEntitySummary(entitySummary); + assertNotNull(graphMeta.getEntitySummary()); + graphMeta.setAaiEntityNodeDescriptors(aaiEntityNodeDescriptors); + assertNotNull(graphMeta.getAaiEntityNodeDescriptors()); + + graphMeta.setNumLinksResolvedSuccessfullyFromCache(3); + assertEquals(3,graphMeta.getNumLinksResolvedSuccessfullyFromCache()); + graphMeta.setNumLinksResolvedSuccessfullyFromServer(25); + assertEquals(25,graphMeta.getNumLinksResolvedSuccessfullyFromServer()); + graphMeta.setNumLinkResolveFailed(3); + assertEquals(3,graphMeta.getNumLinkResolveFailed()); + graphMeta.setNumNodes(25); + assertEquals(25,graphMeta.getNumNodes()); + graphMeta.setNumLinks(3); + assertEquals(3,graphMeta.getNumLinks()); + graphMeta.setRenderTimeInMs(25); + assertEquals(25,graphMeta.getRenderTimeInMs()); + assertNotNull(graphMeta.toString()); + + graphMeta.setEntitySummary(null); + assertNull(graphMeta.getEntitySummary()); + graphMeta.setAaiEntityNodeDescriptors(null); + assertNull(graphMeta.getAaiEntityNodeDescriptors()); + assertNotNull(graphMeta.toString()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequestTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequestTest.java new file mode 100644 index 0000000..7eebb23 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/GraphRequestTest.java @@ -0,0 +1,61 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.GraphRequest; + +public class GraphRequestTest { + + private GraphRequest graphRequest; + + @Before + public void init() throws Exception { + graphRequest = new GraphRequest(); + + } + + @Test + public void updateValues() { + + graphRequest.setHashId("364c836b7f4c0d2a5b917693719741fa5e576b3da818a"); + assertNotNull(graphRequest.getHashId()); + graphRequest.setIncludeGraphMeta(true); + assertTrue(graphRequest.isIncludeGraphMeta()); + assertNotNull(graphRequest.toString()); + graphRequest.setHashId(null); + assertNull(graphRequest.getHashId()); + assertNotNull(graphRequest.toString()); + + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessageTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessageTest.java new file mode 100644 index 0000000..eebc821 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/InlineMessageTest.java @@ -0,0 +1,55 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.InlineMessage; + +public class InlineMessageTest { + + private InlineMessage inlineMsg; + + @Before + public void init() throws Exception { + inlineMsg = new InlineMessage("InlineMessage1","InlineMessage2"); + + } + + @Test + public void successfulInitializationAndUpdate() { + + inlineMsg.setLevel("Level-1"); + assertNotNull(inlineMsg.getLevel()); + inlineMsg.setMessage("InlineMessage3"); + assertNotNull(inlineMsg.getMessage()); + assertNotNull(inlineMsg.toString()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLinkTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLinkTest.java new file mode 100644 index 0000000..86b9014 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/JsonNodeLinkTest.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; +/* +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.JsonNodeLink; + +public class JsonNodeLinkTest { + + private JsonNodeLink jsonNodeLink; + + @Before + public void init() throws Exception { + jsonNodeLink = new JsonNodeLink(); + + } + + @Test + public void updateValues() { + + jsonNodeLink.setId("ID-1"); + assertNotNull(jsonNodeLink.getId()); + jsonNodeLink.setSource("SourceSet"); + assertNotNull(jsonNodeLink.getSource()); + jsonNodeLink.setTarget("targetSet"); + assertNotNull(jsonNodeLink.getTarget()); + assertNotNull(jsonNodeLink.toString()); + } + +} +*/
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebugTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebugTest.java new file mode 100644 index 0000000..b8450be --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeDebugTest.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.NodeDebug; + +public class NodeDebugTest { + + + + private NodeDebug nodeDebug; + + @Before + public void init() throws Exception { + nodeDebug = new NodeDebug(); + + } + + @Test + public void updateValues() { + + nodeDebug.setProcessingErrorCauses("NullPointerException"); + assertNotNull(nodeDebug.getProcessingErrorCauses()); + nodeDebug.setProcessingError(true); + assertTrue(nodeDebug.isProcessingError()); + nodeDebug.setMaxTraversalDepthReached(true); + assertTrue(nodeDebug.isMaxTraversalDepthReached()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeMetaTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeMetaTest.java new file mode 100644 index 0000000..7ebe3c6 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeMetaTest.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; + +public class NodeMetaTest { + + private NodeMeta nodeMeta; + private NodeProcessingState nodeProcessingState; + private NodeDebug nodeDebug; + private VisualizationConfigs visualizationConfigs; + + @Before + public void init() throws Exception { + this.visualizationConfigs = new VisualizationConfigs(); + nodeMeta = new NodeMeta(visualizationConfigs); + nodeDebug = new NodeDebug(); + + } + + @Test + public void updateValues() { + + nodeMeta.setAtMaxDepth(true); + assertTrue(nodeMeta.isAtMaxDepth()); + nodeMeta.setSelfLinkResolved(true); + assertTrue(nodeMeta.isSelfLinkResolved()); + nodeMeta.setProcessingState(nodeProcessingState.READY); + assertNotNull(nodeMeta.getProcessingState()); + nodeMeta.setProcessingErrorOccurred(true); + assertTrue(nodeMeta.isProcessingErrorOccurred()); + nodeMeta.setHasNeighbors(true); + assertTrue(nodeMeta.isHasNeighbors()); + nodeMeta.setNeighborsProcessed(true); + assertTrue(nodeMeta.isNeighborsProcessed()); + nodeMeta.setNodeDepth(3); + assertEquals(3,nodeMeta.getNodeDepth()); + nodeMeta.setNodeDebug(nodeDebug); + assertNotNull(nodeMeta.getNodeDebug()); + nodeMeta.setClassName("generalNodeClass"); + assertNotNull(nodeMeta.getClassName()); + nodeMeta.setNumInboundNeighbors(34); + assertEquals(34,nodeMeta.getNumInboundNeighbors()); + nodeMeta.setNumOutboundNeighbors(43); + assertEquals(43,nodeMeta.getNumOutboundNeighbors()); + nodeMeta.setSelfLinkResponseTimeInMs(43); + assertEquals(43,nodeMeta.getSelfLinkResponseTimeInMs()); + nodeMeta.setNodeIssue(true); + assertTrue(nodeMeta.isNodeIssue()); + nodeMeta.setNodeValidated(true); + assertTrue(nodeMeta.isNodeValidated()); + nodeMeta.setSearchTarget(true); + assertTrue(nodeMeta.isSearchTarget()); + + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransactionTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransactionTest.java new file mode 100644 index 0000000..a50ea3b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/NodeProcessingTransactionTest.java @@ -0,0 +1,62 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class NodeProcessingTransactionTest { + + + private NodeProcessingTransaction nodeProcessingTransaction; + private ActiveInventoryNode activeInventoryNode; + private ArrayList<String> lst = new ArrayList<String>(); + private OperationResult opResult; + private VisualizationConfigs visualConfig; + private OxmEntityLookup oxmEntityLookup; + + @Before + public void init() throws Exception { + + visualConfig = new VisualizationConfigs(); + oxmEntityLookup = new OxmEntityLookup(); + opResult = new OperationResult(); + activeInventoryNode = new ActiveInventoryNode(visualConfig,oxmEntityLookup); + nodeProcessingTransaction = new NodeProcessingTransaction(); + + } + + + @Test + public void updateValues() { + + nodeProcessingTransaction.setRequestParameters(""); + assertNotNull(nodeProcessingTransaction.getRequestParameters()); + nodeProcessingTransaction.setProcessingNode(activeInventoryNode); + assertNotNull(nodeProcessingTransaction.getProcessingNode()); + nodeProcessingTransaction.setOpResult(opResult); + assertNotNull(nodeProcessingTransaction.getOpResult()); + assertNull(nodeProcessingTransaction.getSelfLink()); + assertNotNull(nodeProcessingTransaction.getSelfLinkWithModifiers()); + assertTrue(nodeProcessingTransaction.processingErrorOccurred()); + assertNotNull(nodeProcessingTransaction.toString()); + nodeProcessingTransaction.setProcessingNode(null); + assertNull(nodeProcessingTransaction.getProcessingNode()); + nodeProcessingTransaction.setOpResult(null); + assertNull(nodeProcessingTransaction.getOpResult()); + assertNull(nodeProcessingTransaction.getSelfLink()); + assertNull(nodeProcessingTransaction.getSelfLinkWithModifiers()); + assertNotNull(nodeProcessingTransaction.toString()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryParamsTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryParamsTest.java new file mode 100644 index 0000000..d1bd60b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryParamsTest.java @@ -0,0 +1,56 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; + +public class QueryParamsTest { + + private QueryParams parameterQuery; + + @Before + public void init() throws Exception { + parameterQuery = new QueryParams(); + + } + + @Test + public void updateValues() { + + parameterQuery.setSearchTargetPrimaryKeyValues("Internet Network"); + assertNotNull(parameterQuery.getSearchTargetPrimaryKeyValues()); + parameterQuery.setSearchTargetNodeId("Internet Network"); + assertNotNull(parameterQuery.getSearchTargetNodeId()); + } + + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequestTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequestTest.java new file mode 100644 index 0000000..fca4051 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/QueryRequestTest.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.QueryRequest; + +public class QueryRequestTest { + + private QueryRequest requestQuery; + + @Before + public void init() throws Exception { + requestQuery = new QueryRequest(); + + } + + @Test + public void updateValues() { + + requestQuery.setHashId("claymore-service-tree"); + assertNotNull(requestQuery.getHashId()); + assertNotNull(requestQuery.toString()); + requestQuery.setHashId(null); + assertNotNull(requestQuery.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToPropertyTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToPropertyTest.java new file mode 100644 index 0000000..48a724b --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelatedToPropertyTest.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.RelatedToProperty; + +public class RelatedToPropertyTest { + + private RelatedToProperty relatedToProperty; + + @Before + public void init() throws Exception { + relatedToProperty = new RelatedToProperty(); + + } + + @Test + public void successfulInitializationAndUpdate() { + + relatedToProperty.setPropertyKey("PropertyKey-1"); + assertNotNull(relatedToProperty.getPropertyKey()); + relatedToProperty.setPropertyValue("PropertyValue-1"); + assertNotNull(relatedToProperty.getPropertyValue()); + assertNotNull(relatedToProperty.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDataTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDataTest.java new file mode 100644 index 0000000..e20e720 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipDataTest.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.RelationshipData; + +public class RelationshipDataTest { + + private RelationshipData relationshipData; + + @Before + public void init() throws Exception { + relationshipData = new RelationshipData(); + + } + + @Test + public void updateValues() { + + relationshipData.setRelationshipKey("RelationshipKey"); + assertNotNull(relationshipData.getRelationshipKey()); + relationshipData.setRelationshipValue("RelationshipValues"); + assertNotNull(relationshipData.getRelationshipValue()); + assertNotNull(relationshipData.toString()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipListTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipListTest.java new file mode 100644 index 0000000..3e81dbf --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipListTest.java @@ -0,0 +1,32 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; + +public class RelationshipListTest { + + + private Relationship[] relationship; + private RelationshipList relationshipList; + + + @Before + public void init() throws Exception { + + relationship = new Relationship[] {}; + relationshipList = new RelationshipList(); + + } + + + @Test + public void updateValues() { + + relationshipList.setRelationshipList(relationship); + assertNotNull(relationshipList.getRelationshipList()); + assertNotNull(relationshipList.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipTest.java new file mode 100644 index 0000000..2aeffe9 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/RelationshipTest.java @@ -0,0 +1,43 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + + + +import org.junit.Before; +import org.junit.Test; + +public class RelationshipTest { + + + private Relationship relationship; + private RelationshipData[] relationshipData; + private RelatedToProperty[] relatedToProperty; + + + @Before + public void init() throws Exception { + + relationshipData = new RelationshipData[] {}; + relatedToProperty = new RelatedToProperty[] {}; + relationship = new Relationship(); + + } + + + @Test + public void updateValues() { + + relationship.setRelatedLink(""); + assertNotNull(relationship.getRelatedLink()); + relationship.setRelatedTo("selectedsearchedNodeClass"); + assertNotNull(relationship.getRelatedTo()); + relationship.setRelationshipData(relationshipData); + assertNotNull(relationship.getRelationshipData()); + relationship.setRelatedToProperty(relatedToProperty); + assertNotNull(relationship.getRelatedToProperty()); + assertNotNull(relationship.toString()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponseTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponseTest.java new file mode 100644 index 0000000..6c08c8c --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchResponseTest.java @@ -0,0 +1,52 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.search.SearchResponse; + +public class SearchResponseTest { + + private SearchResponse searchResponse; + + @Before + public void init() throws Exception { + searchResponse = new SearchResponse(); + + } + + @Test + public void updateValues() { + + assertNotNull(searchResponse.toString()); + searchResponse.setSuggestions(null); + assertNotNull(searchResponse.toString()); + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityListTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityListTest.java new file mode 100644 index 0000000..d2c6513 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SearchableEntityListTest.java @@ -0,0 +1,44 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.HashMap; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.sync.entity.SearchableEntity; + +public class SearchableEntityListTest { + + + private SearchableEntityList searchableEntityList; + private ArrayList<SearchableEntity> searchable; + private SearchableEntity entity; + private HashMap<String,String> searchTags; + + @Before + public void init() throws Exception { + + searchable = new ArrayList<SearchableEntity>(); + entity = new SearchableEntity(); + searchableEntityList = new SearchableEntityList(); + searchTags = new HashMap<String,String>(); + + } + + + @SuppressWarnings("static-access") + @Test + public void updateValues() { + + searchableEntityList.setEntities(searchable); + assertNotNull(searchableEntityList.getEntities()); + searchableEntityList.addEntity(entity); + searchableEntityList.buildEntity("","","",searchTags); + assertNotNull(searchableEntityList.getSearchTagMap("tagtagtag")); + assertNotNull(searchableEntityList.toString()); + } + + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransactionTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransactionTest.java new file mode 100644 index 0000000..8dd862a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SelfLinkDeterminationTransactionTest.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + + +public class SelfLinkDeterminationTransactionTest { + + private SelfLinkDeterminationTransaction selfLinkDeterminationTransaction; + private OperationResult opResult; + private ActiveInventoryNode activeInventoryNode; + private VisualizationConfigs visualizationConfigs; + private OxmEntityLookup oxmEntityLookup; + + @Before + public void init() throws Exception { + selfLinkDeterminationTransaction = new SelfLinkDeterminationTransaction(); + opResult = new OperationResult(); + oxmEntityLookup = new OxmEntityLookup(); + activeInventoryNode = new ActiveInventoryNode(visualizationConfigs, oxmEntityLookup); + + } + + @Test + public void updateValues() { + + selfLinkDeterminationTransaction.setParentNodeId("ID-1"); + assertNotNull(selfLinkDeterminationTransaction.getParentNodeId()); + selfLinkDeterminationTransaction.setOpResult(opResult); + assertNotNull(selfLinkDeterminationTransaction.getOpResult()); + selfLinkDeterminationTransaction.setQueryString("QueryString"); + assertNotNull(selfLinkDeterminationTransaction.getQueryString()); + selfLinkDeterminationTransaction.setEntityUrl("EntityURL"); + assertNotNull(selfLinkDeterminationTransaction.getEntityUrl()); + selfLinkDeterminationTransaction.setNewNode(activeInventoryNode); + assertNotNull(selfLinkDeterminationTransaction.getNewNode()); + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLinkTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLinkTest.java new file mode 100644 index 0000000..20d6825 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphLinkTest.java @@ -0,0 +1,56 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertNotNull; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.viewandinspect.entity.SparkyGraphLink; + +public class SparkyGraphLinkTest { + + private SparkyGraphLink jsonNodeLink; + + @Before + public void init() throws Exception { + jsonNodeLink = new SparkyGraphLink(); + + } + + @Test + public void updateValues() { + + jsonNodeLink.setId("ID-1"); + assertNotNull(jsonNodeLink.getId()); + jsonNodeLink.setSource("SourceSet"); + assertNotNull(jsonNodeLink.getSource()); + jsonNodeLink.setTarget("targetSet"); + assertNotNull(jsonNodeLink.getTarget()); + assertNotNull(jsonNodeLink.toString()); + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNodeTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNodeTest.java new file mode 100644 index 0000000..a9a10c5 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/entity/SparkyGraphNodeTest.java @@ -0,0 +1,81 @@ +package org.onap.aai.sparky.viewandinspect.entity; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import java.util.HashMap; + +import org.junit.Before; +import org.junit.Test; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.subscription.config.SubscriptionConfig; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; + +public class SparkyGraphNodeTest { + + + private SparkyGraphNode graphNode; + private ActiveInventoryNode activeInventoryNode; + private SubscriptionConfig subscriptionConfigs; + private VisualizationConfigs visualConfigs; + private OxmEntityLookup oxmEntityLookup; + private HashMap<String,String> itemProperties; + private NodeMeta nodeMeta; + + @Before + public void init() throws Exception { + + oxmEntityLookup = new OxmEntityLookup(); + visualConfigs = new VisualizationConfigs(); + subscriptionConfigs = new SubscriptionConfig(); + nodeMeta = new NodeMeta(visualConfigs); + itemProperties = new HashMap<String,String>(); + activeInventoryNode = new ActiveInventoryNode(visualConfigs,oxmEntityLookup); + + graphNode = new SparkyGraphNode(activeInventoryNode,visualConfigs,subscriptionConfigs); + + } + + + @Test + public void updateValues() { + + graphNode.setId("graphID"); + assertNotNull(graphNode.getId()); + assertFalse(graphNode.isRootNode()); + graphNode.setItemNameKey("selectedsearchedNodeClass"); + assertNotNull(graphNode.getItemNameKey()); + graphNode.setItemNameValue("generalNodeClass"); + assertNotNull(graphNode.getItemNameValue()); + graphNode.setResourceKey("searchedNodeClass"); + assertNotNull(graphNode.getResourceKey()); + graphNode.setItemType("/etc/aaiEntityNodeDescriptors.json"); + assertNotNull(graphNode.getItemType()); + + graphNode.setItemProperties(itemProperties); + assertNotNull(graphNode.getItemProperties()); + graphNode.setNodeMeta(nodeMeta); + assertNotNull(graphNode.getNodeMeta()); + assertNotNull(graphNode.toString()); + + graphNode.setId(null); + assertNull(graphNode.getId()); + assertFalse(graphNode.isRootNode()); + graphNode.setItemNameKey(null); + assertNull(graphNode.getItemNameKey()); + graphNode.setItemNameValue(null); + assertNull(graphNode.getItemNameValue()); + graphNode.setResourceKey(null); + assertNull(graphNode.getResourceKey()); + graphNode.setItemType(null); + assertNull(graphNode.getItemType()); + graphNode.setItemProperties(null); + assertNull(graphNode.getItemProperties()); + graphNode.setNodeMeta(null); + assertNull(graphNode.getNodeMeta()); + assertNotNull(graphNode.toString()); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContextTest.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContextTest.java new file mode 100644 index 0000000..09ca3a3 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/services/BaseVisualizationContextTest.java @@ -0,0 +1,273 @@ +package org.onap.aai.sparky.viewandinspect.services; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorService; + +import org.hamcrest.Matcher; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Matchers; +import org.mockito.Mockito; +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.OxmModelLoader; +import org.onap.aai.sparky.config.oxm.OxmModelProcessor; +import org.onap.aai.sparky.dal.ActiveInventoryAdapter; +import org.onap.aai.sparky.dal.rest.config.RestEndpointConfig; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.util.NodeUtils; +import org.onap.aai.sparky.util.StringCollectionContainsMatcher; +import org.onap.aai.sparky.util.TestResourceLoader; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.onap.aai.sparky.viewandinspect.entity.QueryParams; +import org.onap.aai.sparky.viewandinspect.enumeration.NodeProcessingState; + + +public class BaseVisualizationContextTest { + + private static SecureRandom secureRandom = new SecureRandom(); + private static Logger LOG = LoggerFactory.getInstance().getLogger(BaseVisualizationContextTest.class); + + private BaseVisualizationContext baseVisualizationContext; + private ExecutorService aaiExecutorService; + private VisualizationConfigs visualizationConfig; + + private OxmEntityLookup oxmEntityLookup; + + private ActiveInventoryAdapter aaiAdapter; + private RestEndpointConfig aaiRestEndPointConfig; + + @Before + public void init() throws Exception { + + aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER", 5, LOG); + visualizationConfig = new VisualizationConfigs(); + + ArrayList<String> shallowEntities = new ArrayList<String>(); + shallowEntities.add("cloud-region"); + + visualizationConfig.setShallowEntities(shallowEntities); + visualizationConfig.setMaxSelfLinkTraversalDepth(2); + + oxmEntityLookup = new OxmEntityLookup(); + + aaiAdapter = Mockito.mock(ActiveInventoryAdapter.class); + + + Set<OxmModelProcessor> processors = new HashSet<OxmModelProcessor>(); + + processors.add(oxmEntityLookup); + + + OxmModelLoader oxmModelLoader = new OxmModelLoader(-1, processors); + oxmModelLoader.loadLatestOxmModel(); + + aaiRestEndPointConfig = new RestEndpointConfig(); + aaiRestEndPointConfig.setNumRequestRetries(5); + + Mockito.when(aaiAdapter.getEndpointConfig()).thenReturn(aaiRestEndPointConfig); + + MdcContext.initialize("" + secureRandom.nextLong(), "AAI-UI", "", "partner-name", + "localhost:4242"); + + // all our resources are prefixed already, so the repairSelfLink shouldn't do anything to the link + Mockito.when(aaiAdapter.repairSelfLink(Matchers.contains(""))).thenReturn(""); + + + } + + private Matcher<List<String>> listContainsValue(String expectedValue) { + return new StringCollectionContainsMatcher(expectedValue); + } + + + @Test + public void validateBasicConstruction() throws Exception { + + long contextId = secureRandom.nextLong(); + + baseVisualizationContext = new BaseVisualizationContext(contextId, aaiAdapter, + aaiExecutorService, visualizationConfig, oxmEntityLookup); + + assertEquals(contextId, baseVisualizationContext.getContextId()); + + } + + @Test + public void validateSmallGraphAssembly() throws Exception { + + /** + * We have a tiny graph that we will validate assembly of: + * + * <li>customer -> tenant + * <li>customer -> service-subscription + * <li>service-subscription -> service-instance-1 + * <li>service-subscription -> service-instance-2 + * + * At the end of this success path, we should have 5 nodes in the node cache. Once we have this + * flow we can experiment with error paths involving resource download failures to ensure graph + * nodes are in the correct state and that expected nodes are successfully represented in the + * cache. + */ + + long contextId = secureRandom.nextLong(); + + baseVisualizationContext = new BaseVisualizationContext(contextId, aaiAdapter, + aaiExecutorService, visualizationConfig, oxmEntityLookup); + + SearchableEntity searchableEntity = new SearchableEntity(); + String customerSelfLink = + "https://server.proxy:8443/aai/v11/business/customers/customer/customer-4"; + String customerNodeId = NodeUtils.generateUniqueShaDigest(customerSelfLink); + + searchableEntity.setId(customerNodeId); + searchableEntity.setEntityType("customer"); + searchableEntity.setEntityPrimaryKeyValue("customer-4"); + searchableEntity.setLink(customerSelfLink); + + QueryParams queryParams = new QueryParams(); + queryParams.setSearchTargetNodeId(customerNodeId); + queryParams.setSearchTargetPrimaryKeyValues("customer-4"); + + // aai customer resource dip + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("customer-4"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/customer/customer-4.json"))); + + // aai tenant resource dip + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("tenant/tenant-1"), Mockito.anyString(), + Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-resources/tenant/tenant-1.json"))); + + // generic-queries for service-subscription + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-subscription"), + Matchers.argThat(listContainsValue("service-subscription.service-type:service-subscription-2")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-subscription-2"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers.contains("generic-query/service-subscription-2"), Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader + .getTestResourceDataJson("/sync/aai/aai-traversal/generic-query/service-subscription/service-subscription-2.json"))); + + // generic-queries for service-instance-1 + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat( + listContainsValue("service-instance-id:service-instance-54")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-id/service-instance-54"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers + .contains("generic-query/service-instance-id/service-instance-54"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance/service-instance-54.json"))); + + // generic-queries for service-instance-2 + + Mockito + .when(aaiAdapter.getGenericQueryForSelfLink(Matchers.contains("service-instance"), + Matchers.argThat( + listContainsValue("service-instance-id:service-instance-55")))) + .thenReturn( + "https://server.proxy:8443/aai/v11/search/generic-query/service-instance-id/service-instance-55"); + + Mockito + .when(aaiAdapter.queryActiveInventoryWithRetries( + Matchers + .contains("generic-query/service-instance-id/service-instance-55"), + Mockito.anyString(), Mockito.anyInt())) + .thenReturn(new OperationResult(200, TestResourceLoader.getTestResourceDataJson( + "/sync/aai/aai-traversal/generic-query/service-instance/service-instance-55.json"))); + + + + // start the test + + baseVisualizationContext.processSelfLinks(searchableEntity, queryParams); + + /* + * validation can be in the form of validating nodes + relationships from the node cache + * baseVisualizationContext.getNodeCache(); + */ + + Map<String, ActiveInventoryNode> nodeCache = baseVisualizationContext.getNodeCache(); + + assertEquals(5, nodeCache.size()); + assertNotNull(nodeCache.get(customerNodeId)); + assertEquals("customer", nodeCache.get(customerNodeId).getEntityType()); + + // verify node collection nodes + + ActiveInventoryNode customerNode = nodeCache.get("da4101ad19b3c380a1c12ffeda8ab390e1489fb4a22a392c9a1939db63c3dec5"); + ActiveInventoryNode ssNode = nodeCache.get("f4ceaf19459993c4fc9438a7579dd20d786109f4455e38682c579045b7ae615e"); + ActiveInventoryNode tenantNode = nodeCache.get("4735439b29e446b339535668238076e4b392eaa3eec218936e12f735179bc55e"); + ActiveInventoryNode s1 = nodeCache.get("f975ab453b142197af5d0173e0a9cf2aa22d10502f8ad655c8d17de81b066e8f"); + ActiveInventoryNode s2 = nodeCache.get("de77ef8f76dd6f19662b163527ff839891b9596cac655e3143fdd7ad39e2e4e3"); + + assertNotNull( customerNode ); + assertNotNull( ssNode ); + assertNotNull( tenantNode ); + assertNotNull( s1 ); + assertNotNull( s2 ); + + // verify node depths + + assertEquals( 0, customerNode.getNodeDepth() ); + assertEquals( 1, ssNode.getNodeDepth() ); + + /* + * I think there is a bug in the way the node depth is represented due to the enforcement of + * bidirectional links being disabled. We may have to circle back to this behavior at some point + * and re-verify that the behavior works properly. + */ + + assertEquals( 2, tenantNode.getNodeDepth() ); + assertEquals( 2, s1.getNodeDepth() ); + assertEquals( 2, s2.getNodeDepth() ); + + // verify node states + + assertEquals( NodeProcessingState.READY, customerNode.getState() ); + assertEquals( NodeProcessingState.READY, ssNode.getState() ); + + /* + * these nodes have a NEIGHBORS_UNPROCESSED state because the max traversal depth was hit before + * processing all the nested relationships. I think what we should look at is advancing the state + * to READY if in fact there are no relationships to process, which I think could be the case + * sometimes. + */ + assertEquals( NodeProcessingState.NEIGHBORS_UNPROCESSED, tenantNode.getState() ); + assertEquals( NodeProcessingState.NEIGHBORS_UNPROCESSED, s1.getState() ); + assertEquals( NodeProcessingState.NEIGHBORS_UNPROCESSED, s2.getState() ); + + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoEntitySynchronizer.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoEntitySynchronizer.java new file mode 100644 index 0000000..6d63a8a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoEntitySynchronizer.java @@ -0,0 +1,792 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.sync; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.onap.aai.cl.api.Logger; +import org.onap.aai.cl.eelf.LoggerFactory; +import org.onap.aai.cl.mdc.MdcContext; +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityContainerLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.dal.NetworkTransaction; +import org.onap.aai.sparky.dal.rest.HttpMethod; +import org.onap.aai.sparky.logging.AaiUiMsgs; +import org.onap.aai.sparky.sync.AbstractEntitySynchronizer; +import org.onap.aai.sparky.sync.IndexSynchronizer; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.entity.MergableEntity; +import org.onap.aai.sparky.sync.entity.SearchableEntity; +import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor; +import org.onap.aai.sparky.sync.enumeration.OperationState; +import org.onap.aai.sparky.sync.enumeration.SynchronizerState; +import org.onap.aai.sparky.sync.task.PerformElasticSearchPut; +import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval; +import org.onap.aai.sparky.sync.task.PerformElasticSearchUpdate; +import org.onap.aai.sparky.sync.task.PerformGizmoRetrieval; +import org.onap.aai.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class SearchableEntitySynchronizer. + */ +public class ViewInspectGizmoEntitySynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetrySearchableEntitySyncContainer. + */ + private class RetrySearchableEntitySyncContainer { + NetworkTransaction txn; + SearchableEntity se; + + /** + * Instantiates a new retry searchable entity sync container. + * + * @param txn the txn + * @param se the se + */ + public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { + this.txn = txn; + this.se = se; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SearchableEntity getSearchableEntity() { + return se; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ViewInspectGizmoEntitySynchronizer.class); + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetrySearchableEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + protected ExecutorService esPutExecutor; + private OxmEntityLookup oxmEntityLookup; + private SearchableEntityLookup searchableEntityLookup; + private GizmoAdapter gizmoAdapter; + private OxmEntityContainerLookup entityContainerLookup; + + /** + * Instantiates a new searchable entity synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public ViewInspectGizmoEntitySynchronizer(ElasticSearchSchemaConfig schemaConfig, + int internalSyncWorkers, int gizmoWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig, + NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup, + SearchableEntityLookup searchableEntityLookup, OxmEntityContainerLookup entityContainerLookup) throws Exception { + super(LOG, "SES", internalSyncWorkers, gizmoWorkers, esWorkers, schemaConfig.getIndexName(), + aaiStatConfig, esStatConfig); + + this.oxmEntityLookup = oxmEntityLookup; + this.searchableEntityLookup = searchableEntityLookup; + this.entityContainerLookup = entityContainerLookup; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetrySearchableEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.synchronizerName = "Searchable Entity Synchronizer"; + this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); + this.aaiEntityStats.intializeEntityCounters( + searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + this.esEntityStats.intializeEntityCounters( + searchableEntityLookup.getSearchableEntityDescriptors().keySet()); + this.syncDurationInMs = -1; + } + + + + public GizmoAdapter getGizmoAdapter() { + return gizmoAdapter; +} + + + +public void setGizmoAdapter(GizmoAdapter gizmoAdapter) { + this.gizmoAdapter = gizmoAdapter; +} + + + +/** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + + Collection<String> searchableEntityGroups = entityContainerLookup.getSearchableEntityGroups(); + + if (searchableEntityGroups.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + return OperationState.ERROR; + } + + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the + * of the configured executor anyway) + */ + + /*searchableEntityGroups = new ArrayList<String>(); + searchableEntityGroups.add("pservers");*/ + + aaiWorkOnHand.set(searchableEntityGroups.size()); + + for (String searchableEntityGroup : searchableEntityGroups) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = gizmoAdapter.getSelfLinksByEntityType(searchableEntityGroup); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + + exc.printStackTrace(); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + this.syncDurationInMs = -1; + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + this.allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + collectAllTheWork(); + + return OperationState.OK; + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + ArrayNode resultDataArrayNode = null; + + if (rootNode.isArray()) { + resultDataArrayNode = (ArrayNode) rootNode; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String id = NodeUtils.getNodeFieldAsText(element, "id"); + final String type = NodeUtils.getNodeFieldAsText(element, "type"); + final String url = NodeUtils.getNodeFieldAsText(element, "url"); + + String resourceLink; + try { + resourceLink = gizmoAdapter.getFullInventoryUrl(type + "/" + id); + selflinks.add(new SelfLinkDescriptor(NodeUtils.extractRawGizmoPathWithoutVersion(resourceLink), null, type)); + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "ERROR: Failed to determine resource link caused by " + e.getMessage()); + } + + } + } + } + + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformGizmoRetrieval(txn, gizmoAdapter), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param se the se + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(NodeUtils.convertObjectToJson(se,false)); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = se.getAsJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = elasticSearchAdapter.buildBulkImportOperationRequest(getIndexName(), + "default", se.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(elasticSearchAdapter.getBulkUrl(), + requestPayload, elasticSearchAdapter, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Searchable entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + + } else { + + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during searchable entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + + /** + * Populate searchable entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateSearchableEntityDocument(SearchableEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + String id = NodeUtils.getNodeFieldAsText(entityNode, "id"); + String type = NodeUtils.getNodeFieldAsText(entityNode, "type"); + String url = NodeUtils.getNodeFieldAsText(entityNode, "url"); + + JsonNode properties = entityNode.get("properties"); + + Iterator<String> fieldNames = properties.fieldNames(); + + + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup.getSearchableEntityDescriptors().get(resultDescriptor.getEntityName()); + + for (String keyName : searchableDescriptor.getPrimaryKeyAttributeNames()) { + pkeyValue = NodeUtils.getNodeFieldAsText(properties, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " + + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + final List<String> searchTagFields = searchableDescriptor.getSearchableAttributes(); + + /* + * Based on configuration, use the configured field names for this entity-Type to build a + * multi-value collection of search tags for elastic search entity search criteria. + */ + for (String searchTagField : searchTagFields) { + String searchTagValue = NodeUtils.getNodeFieldAsText(properties, searchTagField); + if (searchTagValue != null && !searchTagValue.isEmpty()) { + doc.addSearchTagWithKey(searchTagValue, searchTagField); + } + } + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + SearchableOxmEntityDescriptor searchableDescriptor = searchableEntityLookup + .getSearchableEntityDescriptors().get(txn.getDescriptor().getEntityName()); + + try { + if (searchableDescriptor.hasSearchableAttributes()) { + + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + SearchableEntity se = new SearchableEntity(); + se.setLink( txn.getLink() ); + populateSearchableEntityDocument(se, jsonResult, searchableDescriptor); + se.deriveFields(); + + + String link = null; + try { + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + } + + } + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Processing error while fetching document for elasticsearch update. Error: " + exc.getMessage() ); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, "Processing error while fetching document for elasticsearch update. Error: " + exc.getMessage() ); + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param se the se + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SearchableEntity se) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(se.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySearchableEntitySyncContainer rsc = + new RetrySearchableEntitySyncContainer(esGetResult, se); + retryQueue.push(rsc); + + String message = "Store document failed during searchable entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during searchable entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + SearchableEntity se = rsc.getSearchableEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), se.getId()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs; + return this.getStatReport(syncDurationInMs, showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoSyncController.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoSyncController.java new file mode 100644 index 0000000..c2a1150 --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/sync/ViewInspectGizmoSyncController.java @@ -0,0 +1,107 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * Copyright © 2017 Amdocs + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + */ +package org.onap.aai.sparky.viewandinspect.sync; + +import org.onap.aai.sparky.config.oxm.OxmEntityContainerLookup; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.config.oxm.SearchableEntityLookup; +import org.onap.aai.sparky.dal.ElasticSearchAdapter; +import org.onap.aai.sparky.dal.GizmoAdapter; +import org.onap.aai.sparky.sync.ElasticSearchIndexCleaner; +import org.onap.aai.sparky.sync.ElasticSearchSchemaFactory; +import org.onap.aai.sparky.sync.IndexCleaner; +import org.onap.aai.sparky.sync.IndexIntegrityValidator; +import org.onap.aai.sparky.sync.SyncControllerImpl; +import org.onap.aai.sparky.sync.SyncControllerRegistrar; +import org.onap.aai.sparky.sync.SyncControllerRegistry; +import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig; +import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig; +import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig; +import org.onap.aai.sparky.sync.config.SyncControllerConfig; + +public class ViewInspectGizmoSyncController extends SyncControllerImpl + implements SyncControllerRegistrar { + + private SyncControllerRegistry syncControllerRegistry; + //private GizmoAdapter gizmoAdapter; + //private ElasticSearchAdapter esAdapter; + //private ElasticSearchSchemaConfig schemaConfig; + //private ElasticSearchEndpointConfig endpointConfig; + + public ViewInspectGizmoSyncController(SyncControllerConfig syncControllerConfig, + GizmoAdapter gizmoAdapter, ElasticSearchAdapter esAdapter, + ElasticSearchSchemaConfig schemaConfig, ElasticSearchEndpointConfig endpointConfig, + NetworkStatisticsConfig gizmoStatConfig, NetworkStatisticsConfig esStatConfig, + OxmEntityLookup oxmEntityLookup, + SearchableEntityLookup searchableEntityLookup, OxmEntityContainerLookup oxmEntityContainerLookup, + ElasticSearchSchemaFactory elasticSearchSchemaFactory) throws Exception { + super(syncControllerConfig); + + // final String controllerName = "View and Inspect Entity Synchronizer"; + + //this.gizmoAdapter = gizmoAdapter; + //this.esAdapter = esAdapter; + //this.schemaConfig = schemaConfig; + //this.endpointConfig = endpointConfig; + + IndexIntegrityValidator indexValidator = new IndexIntegrityValidator(esAdapter, schemaConfig, + endpointConfig, elasticSearchSchemaFactory.getIndexSchema(schemaConfig)); + + registerIndexValidator(indexValidator); + + ViewInspectGizmoEntitySynchronizer ses = new ViewInspectGizmoEntitySynchronizer(schemaConfig, + syncControllerConfig.getNumInternalSyncWorkers(), + syncControllerConfig.getNumSyncActiveInventoryWorkers(), + syncControllerConfig.getNumSyncElasticWorkers(), gizmoStatConfig, esStatConfig, + oxmEntityLookup, searchableEntityLookup, oxmEntityContainerLookup); + + ses.setGizmoAdapter(gizmoAdapter); + ses.setElasticSearchAdapter(esAdapter); + + registerEntitySynchronizer(ses); + + IndexCleaner indexCleaner = + new ElasticSearchIndexCleaner(esAdapter, endpointConfig, schemaConfig); + + registerIndexCleaner(indexCleaner); + + } + + public SyncControllerRegistry getSyncControllerRegistry() { + return syncControllerRegistry; + } + + public void setSyncControllerRegistry(SyncControllerRegistry syncControllerRegistry) { + this.syncControllerRegistry = syncControllerRegistry; + } + + @Override + public void registerController() { + if ( syncControllerRegistry != null ) { + if ( syncControllerConfig.isEnabled()) { + syncControllerRegistry.registerSyncController(this); + } + } + + } +} diff --git a/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/util/SchemaVisualizationTestDataBuilder.java b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/util/SchemaVisualizationTestDataBuilder.java new file mode 100644 index 0000000..335e18a --- /dev/null +++ b/sparkybe-onap-service/src/test/java/org/onap/aai/sparky/viewandinspect/util/SchemaVisualizationTestDataBuilder.java @@ -0,0 +1,134 @@ +package org.onap.aai.sparky.viewandinspect.util; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonObjectBuilder; + +import org.onap.aai.restclient.client.OperationResult; +import org.onap.aai.sparky.config.oxm.OxmEntityLookup; +import org.onap.aai.sparky.util.OxmModelAndProcessorHelper; +import org.onap.aai.sparky.viewandinspect.config.VisualizationConfigs; +import org.onap.aai.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.restlet.data.Status; + + +public class SchemaVisualizationTestDataBuilder { + + // All of these hash IDs were generated based off the self links of the aaiNodes populated below + public static final String ROOT_NODE_HASH_ID = "f2f7ca5b565b43d7ff7cffea26d3315caaaff709e03abac127604008fc323b23"; + public static final String DEPTH_ONE_NODE_HASH_ID = "f845ab53e23cad7b9a3fc31aeef7b32bd21a72ba0fcdf568b1cfb116f2cff06f"; + public static final String DEPTH_TWO_NODE_HASH_ID = "372a132a0cd9bacfe408b09e6518cf824dcfbf23297678ddc0a8bf4f55da5c66"; + public static final String DEPTH_ONE_NODE_TWO_HASH_ID = "56a248219414380145b40eaa41b0482da388baa7492020d37687a998951ecdf9"; + + Map<String, ActiveInventoryNode> aaiNodes; + + SchemaVisualizationTestDataBuilder() { + aaiNodes = new HashMap<String, ActiveInventoryNode>(); + } + + public static String getQueryRequest() { + JsonObjectBuilder rootNodeBuilder = Json.createObjectBuilder(); + rootNodeBuilder.add("hashId", "f2f7ca5b565b43d7ff7cffea26d3315caaaff709e03abac127604008fc323b23"); + return rootNodeBuilder.build().toString(); + } + + public static String getRawRootNode() { + + JsonObjectBuilder rootNodeBuilder = Json.createObjectBuilder(); + rootNodeBuilder.add("_index", "magical-test-index"); + rootNodeBuilder.add("_type", "default"); + rootNodeBuilder.add("_id", "f2f7ca5b565b43d7ff7cffea26d3315caaaff709e03abac127604008fc323b23"); // Yes, hashed the link below + rootNodeBuilder.add("_version", 1); + rootNodeBuilder.add("found", true); + + JsonObjectBuilder sourceObjBuilder = Json.createObjectBuilder(); + sourceObjBuilder.add("entityType", "vf-module"); + sourceObjBuilder.add("entityPrimaryKeyValue", "root-node-pimary-key-value"); + sourceObjBuilder.add("link", "/aai/v12/network/generic-vnfs/generic-vnf/root-nodes-generic-vnf/vf-modules/vf-module/root-node-pimary-key-value"); + sourceObjBuilder.add("searchTags", "root-node-pimary-key-value;root-node-name"); + sourceObjBuilder.add("searchTagIDs", "vf-module-id;vf-module-name"); + sourceObjBuilder.add("lastmodTimestamp", "2018-02-01T15:48:03.580+0000"); + + rootNodeBuilder.add("_source", sourceObjBuilder.build()); + + return rootNodeBuilder.build().toString(); + } + + public static OperationResult getSchemaVisResult() { + OperationResult result = new OperationResult(); + + result.setResult(Status.SUCCESS_OK.getCode(), "{}"); + + return result; + } + + public static void buildAaiGraph(VisualizationConfigs visualizationConfigs) throws IOException { + OxmEntityLookup lookerOfTheOxmEntities = OxmModelAndProcessorHelper.getInstance().getOxmEntityLookup(); + + ActiveInventoryNode rootNode = new ActiveInventoryNode(visualizationConfigs, lookerOfTheOxmEntities); + rootNode.setEntityType("vf-module"); + rootNode.setPrimaryKeyName("vf-module-id"); + rootNode.setPrimaryKeyValue("root-node-pimary-key-value"); + rootNode.setSelfLink("/aai/v12/network/generic-vnfs/generic-vnf/root-nodes-generic-vnf/vf-modules/vf-module/root-node-pimary-key-value"); + rootNode.addProperty("entityType", "vf-module"); + rootNode.addProperty("vf-module-id", "root-node-pimary-key-value"); + rootNode.addProperty("selfLink", "/aai/v12/network/generic-vnfs/generic-vnf/root-nodes-generic-vnf/vf-modules/vf-module/root-node-pimary-key-value"); + rootNode.addProperty("vf-module-name", "this-is-my-name-eh"); + rootNode.addProperty("is-base-vf-module", "false"); + rootNode.addProperty("uri", "network/generic-vnfs/generic-vnf/root-nodes-generic-vnf/vf-modules/vf-module/root-node-pimary-key-value"); + rootNode.addProperty("orchestration-status", "Created"); + rootNode.addProperty("resource-version", "432156789"); + rootNode.setNodeId(ROOT_NODE_HASH_ID); + + ActiveInventoryNode depthOneNode = new ActiveInventoryNode(visualizationConfigs, lookerOfTheOxmEntities); + depthOneNode.setEntityType("vserver"); + depthOneNode.setPrimaryKeyName("vserver-id"); + depthOneNode.setPrimaryKeyValue("depth-one-from-root-node-vserver-id"); + depthOneNode.setSelfLink("/aai/v12/cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id"); + depthOneNode.addProperty("entityType", "vserver"); + depthOneNode.addProperty("vserver-id", "depth-one-from-root-node-vserver-id"); + depthOneNode.addProperty("selfLink", "/aai/v12/cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id"); + depthOneNode.addProperty("vserver-name", "depth-one-vserver"); + depthOneNode.addProperty("in-maint", "false"); + depthOneNode.addProperty("vserver-name2", "depth-one-vserver-name2"); + depthOneNode.addProperty("vserver-selflink", "https://magicaltest.com:6666/v2/tenant-id/servers/depth-one-from-root-node-vserver-id"); + depthOneNode.addProperty("uri", "cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id"); + depthOneNode.addProperty("is-closed-loop-disabled", "false"); + depthOneNode.addProperty("resource-version", "123456789"); + depthOneNode.setNodeId(DEPTH_ONE_NODE_HASH_ID); + + ActiveInventoryNode depthTwoNode = new ActiveInventoryNode(visualizationConfigs, lookerOfTheOxmEntities); + depthTwoNode.setEntityType("pserver"); + depthTwoNode.setPrimaryKeyName("hostname"); + depthTwoNode.setPrimaryKeyValue("I-am-a-host"); + depthTwoNode.setSelfLink("/aai/v12/cloud-infrastructure/pservers/pserver/depth-two-from-root-node-pserver-id"); + depthTwoNode.addProperty("entityType", "pserver"); + depthTwoNode.addProperty("hostname", "I-am-a-host"); + depthTwoNode.addProperty("selfLink", "/aai/v12/cloud-infrastructure/pservers/pserver/depth-two-from-root-node-pserver-id"); + depthTwoNode.addProperty("in-maint", "false"); + depthTwoNode.addProperty("resource-version", "987654321"); + depthTwoNode.addProperty("pserver-id", "depth-two-from-root-node-pserver-id"); + depthTwoNode.addProperty("uri", "cloud-infrastructure/pservers/pserver/depth-two-from-root-node-pserver-id"); + depthTwoNode.setNodeId(DEPTH_TWO_NODE_HASH_ID); + + ActiveInventoryNode depthOneNodeTwo = new ActiveInventoryNode(visualizationConfigs, lookerOfTheOxmEntities); + depthOneNodeTwo.setEntityType("vserver"); + depthOneNodeTwo.setPrimaryKeyName("vserver-id"); + depthOneNodeTwo.setPrimaryKeyValue("depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.setSelfLink("/aai/v12/cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.addProperty("entityType", "vserver"); + depthOneNodeTwo.addProperty("vserver-id", "depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.addProperty("selfLink", "/aai/v12/cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.addProperty("in-maint", "false"); + depthOneNodeTwo.addProperty("resource-version", "678954321"); + depthOneNodeTwo.addProperty("vserver-name", "depth-one-vserver-2"); + depthOneNodeTwo.addProperty("vserver-name2", "depth-one-vserver-2-name2"); + depthOneNodeTwo.addProperty("vserver-selflink", "https://magicaltest.com:6666/v2/tenant-id/servers/depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.addProperty("uri", "cloud-infrastructure/cloud-regions/cloud-region/cloud-region-id/ice-cream/tenants/tenant/tenant-id/vservers/vserver/depth-one-from-root-node-vserver-id-2"); + depthOneNodeTwo.addProperty("is-closed-loop-disabled", "false"); + depthOneNodeTwo.setNodeId(DEPTH_ONE_NODE_TWO_HASH_ID); + } +} diff --git a/sparkybe-onap-service/src/test/resources/bundleconfig/etc/appprops/source-of-truth.properties b/sparkybe-onap-service/src/test/resources/bundleconfig/etc/appprops/source-of-truth.properties new file mode 100644 index 0000000..f08722f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/bundleconfig/etc/appprops/source-of-truth.properties @@ -0,0 +1,47 @@ +# Source of Truth mappings. This file maps an enitity path to a source of truth identifier +# AAI v7 +/v7/network/ipsec-configurations/ipsec-configuration/requested-vig-address-type=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/requested-encryption-strength=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/requested-dmz-type=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/shared-dmz-network-address=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/requested-customer-name=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ike-version=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-authentication=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-encryption=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-dh-group=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-am-group-id=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-am-password=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ikev1-sa-lifetime=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ipsec-authentication=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ipsec-encryption=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ipsec-sa-lifetime=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/ipsec-pfs=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/xauth-userid=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/xauth-user-password=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/dpd-interval=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/dpd-frequency=service-manager +/v7/network/ipsec-configurations/ipsec-configuration/vig-servers=service-manager + +# AAI v8 +/v8/network/ipsec-configurations/ipsec-configuration/requested-vig-address-type=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/requested-encryption-strength=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/requested-dmz-type=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/shared-dmz-network-address=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/requested-customer-name=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ike-version=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-authentication=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-encryption=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-dh-group=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-am-group-id=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-am-password=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ikev1-sa-lifetime=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ipsec-authentication=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ipsec-encryption=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ipsec-sa-lifetime=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/ipsec-pfs=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/xauth-userid=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/xauth-user-password=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/dpd-interval=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/dpd-frequency=service-manager +/v8/network/ipsec-configurations/ipsec-configuration/vig-servers=service-manager + diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/commands.txt b/sparkybe-onap-service/src/test/resources/es_test_scripts/commands.txt new file mode 100644 index 0000000..5d25157 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/commands.txt @@ -0,0 +1,3 @@ +commands histoty +curl -XPUT localhost:9200/topographyhistorysearchindex?pretty --data-binary @topoHistoryConfigSettings.json +curl -XPUT localhost:9200/_bulk?pretty --data-binary @topoHistoryBulkLoad.json diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/geoEntities.json b/sparkybe-onap-service/src/test/resources/es_test_scripts/geoEntities.json new file mode 100644 index 0000000..9af3978 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/geoEntities.json @@ -0,0 +1,6 @@ +{"index":{"_index":"topographicalsearchindex-localhost","_type":"default"} +{"pkey": "complex.TEST1", "entityType": "complex", "longitude": "-82.089844", "latitude": "33.642063", "selfLink": "http://localhost:8443/complex/TEST1"} +{"index":{"_index":"topographicalsearchindex-localhost","_type":"default"} +{"pkey": "complex.TEST2", "entityType": "complex", "longitude": "-114.785156", "latitude": "37.640335", "selfLink": "http://localhost:8443/complex/TEST2"} +{"index":{"_index":"topographicalsearchindex-localhost","_type":"default"} +{"pkey": "complex.TEST3", "entityType": "complex", "longitude": "-97.910156", "latitude": "27.595935", "selfLink": "http://localhost:8443/complex/TEST3"} diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/prepareGeoEntityBulkImport.pl b/sparkybe-onap-service/src/test/resources/es_test_scripts/prepareGeoEntityBulkImport.pl new file mode 100644 index 0000000..67ed571 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/prepareGeoEntityBulkImport.pl @@ -0,0 +1,41 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +my $filename = $ARGV[0]; +my $outputfile= $ARGV[1]; + +open my $fh_input, '<', $filename or die "Cannot open $filename: $!"; +open my $fh_output, '>', $outputfile or die "Cannot open $outputfile: $!"; + +while ( my $line = <$fh_input> ) { + chomp ($line); + + if ( $line =~ /(.*)(\".*\")(.*)/ ) { + + # we have seen examples of the status field containing quoted comma-delimited + # strings which is messing up parsing of the record data which is supposed to be + # comma-separated at the field level. This little block converts sections of + # this type of data into a single-quoted-string with a semi-colon delimiter instead. + + my $beforeBadStr = $1; + my $badStr = $2; + my $afterBadStr = $3; + + $badStr =~ s/,/;/g; + $badStr =~ s/"/'/g; + + $line = $beforeBadStr . $badStr . $afterBadStr ; + + } + + my @row = split(",", $line); + print $fh_output "{\"index\":{\"_index\":\"topographicalsearchindex-localhost\",\"_type\":\"default\"}\n"; + print $fh_output "{\"pkey\": \"$row[0]\", \"entityType\": \"$row[1]\", \"location\" : {\"lat\": \"$row[3]\", \"lon\": \"$row[2]\"}, \"selfLink\": \"$row[4]\"}\n"; + +} + +close($fh_input); +close($fh_output); + diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/sampleGeoEntities.csv b/sparkybe-onap-service/src/test/resources/es_test_scripts/sampleGeoEntities.csv new file mode 100644 index 0000000..d149e39 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/sampleGeoEntities.csv @@ -0,0 +1,4 @@ +complex.TEST1,complex,-82.089844,33.642063,http://localhost:8443/complex/TEST1, +complex.TEST2,complex,-114.785156,37.640335,http://localhost:8443/complex/TEST2, +complex.TEST3,complex,-97.910156,27.595935,http://localhost:8443/complex/TEST3, +pserver.TEST1,pserver,-97.910156,27.595935,http://localhost:8443/pserver/TEST1
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryBulkLoad.json b/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryBulkLoad.json new file mode 100644 index 0000000..77d57f7 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryBulkLoad.json @@ -0,0 +1,24 @@ +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"vServer","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":2,"entityType":"pServer","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":3,"entityType":"pServer","timestamp":"31-01-2017 02:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"pServer","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":2,"entityType":"vServer","timestamp":"31-01-2017 01:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"pServer","timestamp":"30-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"generic-vnf","timestamp":"30-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"vpe","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"newvce","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"vce","timestamp":"31-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"vce","timestamp":"30-01-2017 03:00:00"} +{"index":{"_index":"topographyhistorysearchindex-localhost","_type":"default"} +{"count":4,"entityType":"vce","timestamp":"01-02-2017 03:00:00"} diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryConfigSettings.json b/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryConfigSettings.json new file mode 100644 index 0000000..875813e --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/topoHistoryConfigSettings.json @@ -0,0 +1,20 @@ +{ + "topographyhistorysearchindex-localhost" : { + "mappings" : { + "default" : { + "properties" : { + "count" : { + "type" : "keyword" + }, + "entityType" : { + "type" : "keyword" + }, + "timestamp" : { + "type" : "date", + "format" : "MMM d y HH:m:s||dd-MM-yyyy HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SSSZZ||MM/dd/yyyy||yyyyMMdd'T'HHmmssZ" + } + } + } + } + } +} diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/topographicalConfigSettings.json b/sparkybe-onap-service/src/test/resources/es_test_scripts/topographicalConfigSettings.json new file mode 100644 index 0000000..c9f5d5d --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/topographicalConfigSettings.json @@ -0,0 +1,24 @@ +{ + "mappings": { + "default": { + "properties": { + "pkey": { + "type": "string" + }, + "entityType": { + "type": "string" + }, + "longitude": { + "type": "string" + }, + "latitude": { + "type": "string" + }, + "selfLink": { + "type": "string" + } + } + } + + } +} diff --git a/sparkybe-onap-service/src/test/resources/es_test_scripts/topographysearch_schema.json b/sparkybe-onap-service/src/test/resources/es_test_scripts/topographysearch_schema.json new file mode 100644 index 0000000..5de6904 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/es_test_scripts/topographysearch_schema.json @@ -0,0 +1,9 @@ +{ + "fields": [ + {"name": "pkey", "data-type": "string", "searchable": "false"}, + {"name": "entityType", "data-type": "string", "searchable": "false"}, + {"name": "latitude", "data-type": "string", "searchable": "false"}, + {"name": "longitude", "data-type": "string", "searchable": "false"}, + {"name": "selfLink", "data-type": "string", "searchable": "false"} + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/AggregationSuggestionEntity_getIndexDocumentJson_expected.json b/sparkybe-onap-service/src/test/resources/filters/AggregationSuggestionEntity_getIndexDocumentJson_expected.json new file mode 100644 index 0000000..53e9ec9 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/AggregationSuggestionEntity_getIndexDocumentJson_expected.json @@ -0,0 +1 @@ +"filterList":[{"filterId":"1"},{"filterId":"2"},{"filterId":"7"},{"filterId":"8"}]
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/aaiui_filters.json b/sparkybe-onap-service/src/test/resources/filters/aaiui_filters.json new file mode 100644 index 0000000..31716ed --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/aaiui_filters.json @@ -0,0 +1,88 @@ +{ + "filters": [ + { + "filterId": "1", + "filterName": "Orchestration-Status", + "displayName": "Orchestration Status", + "dataType": "list", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "orchestration-status" + } + }, + { + "filterId": "2", + "filterName": "Prov-Status", + "displayName": "Provisioning Status", + "dataType": "list", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "prov-status" + } + }, + { + "filterId": "3", + "filterName": "Severity", + "displayName": "Severity", + "dataType": "list", + "dataSource": { + "indexName": "di-violations", + "docType": "default", + "fieldName": "severity" + } + }, + { + "filterId": "4", + "filterName": "Category", + "displayName": "Category", + "dataType": "list", + "dataSource": { + "indexName": "di-violations", + "docType": "default", + "fieldName": "category" + } + }, + { + "filterId": "5", + "filterName": "Date", + "displayName": "Date", + "dataType": "date" + }, + { + "filterId": "6", + "filterName": "EntityType", + "displayName": "Object Type", + "dataType": "list", + "dataSource": { + "indexName": "di-violations", + "docType": "default", + "fieldName": "entityType" + } + }, + { + "filterId": "7", + "filterName": "NF-Type", + "displayName": "Network Function Type", + "dataType": "list", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "nf-type" + } + }, + { + "filterId": "8", + "filterName": "NF-Role", + "displayName": "Network Function Role", + "dataType": "list", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "nf-role" + } + } + + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/aaiui_filters_testConfig.json b/sparkybe-onap-service/src/test/resources/filters/aaiui_filters_testConfig.json new file mode 100644 index 0000000..62b6811 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/aaiui_filters_testConfig.json @@ -0,0 +1,79 @@ +{ + "filters": [ + { + "filterId": "1", + "filterName": "Orchestration-Status", + "displayName": "Orchestration Status", + "dataType": "dropDown", + "multiSelect": "false", + "watermark": "Any Orchestration Status", + "optionsType": "options", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "orchestration-status" + } + }, + { + "filterId": "2", + "filterName": "Prov-Status", + "displayName": "Provisioning Status", + "dataType": "dropDown", + "multiSelect": "false", + "watermark": "Any Provisioning Status", + "optionsType": "options", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "prov-status" + } + }, + { + "filterId": "5", + "filterName": "Date", + "displayName": "Date", + "dataType": "date", + "multiSelect": "false", + "watermark": "Choose Date Range", + "defaultValue" : {"decode": "Today", "code": "last_0_hours"}, + "optionsType": "dynamicOptions", + "optionsValues": [ + {"decode": "Today", "code": "last_0_hours"}, + {"decode": "Since Yesterday", "code": "last_1_days"}, + {"decode": "Since Last Week", "code": "last_1_weeks"}, + {"decode": "Since Last Month", "code": "last_1_months"}, + {"decode": "Since Last Year", "code": "last_1_years"}, + {"decode": "Custom Range", "code": "custom_range"} + ] + }, + { + "filterId": "7", + "filterName": "NF-Type", + "displayName": "Network Function Type", + "dataType": "dropDown", + "multiSelect": "false", + "watermark": "Any Network Function Type", + "optionsType": "options", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "nf-type" + } + }, + { + "filterId": "8", + "filterName": "NF-Role", + "displayName": "Network Function Role", + "dataType": "dropDown", + "multiSelect": "false", + "watermark": "Any Network Function Role", + "optionsType": "options", + "dataSource": { + "indexName": "aggregate_generic-vnf_index", + "docType": "default", + "fieldName": "nf-role" + } + } + + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/aaiui_views.json b/sparkybe-onap-service/src/test/resources/filters/aaiui_views.json new file mode 100644 index 0000000..e6ece9f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/aaiui_views.json @@ -0,0 +1,38 @@ +{ + "views": [ + { + "viewName" : "vnfSearch", + "filters" : [ + { + "filterId": "1" + }, + { + "filterId": "2" + }, + { + "filterId": "7" + }, + { + "filterId": "8" + } + ] + }, + { + "viewName" : "dataIntegrity", + "filters" : [ + { + "filterId": "3" + }, + { + "filterId": "4" + }, + { + "filterId": "5" + }, + { + "filterId": "6" + } + ] + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/aaiui_views_testConfig.json b/sparkybe-onap-service/src/test/resources/filters/aaiui_views_testConfig.json new file mode 100644 index 0000000..9ca0119 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/aaiui_views_testConfig.json @@ -0,0 +1,21 @@ +{ + "views": [ + { + "viewName" : "vnfSearch", + "filters" : [ + { + "filterId": "1" + }, + { + "filterId": "2" + }, + { + "filterId": "7" + }, + { + "filterId": "8" + } + ] + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestBody_expectedResponse.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestBody_expectedResponse.json new file mode 100644 index 0000000..95a2b16 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestBody_expectedResponse.json @@ -0,0 +1 @@ +{"count":0}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_expectedResponse.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_expectedResponse.json new file mode 100644 index 0000000..36ae0a5 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_expectedResponse.json @@ -0,0 +1 @@ +{"groupby_aggregation":[{"totalChartHits":0,"buckets":[]}]}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_requestBody.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_requestBody.json new file mode 100644 index 0000000..ba7d987 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_emptyRequestFilterArray_requestBody.json @@ -0,0 +1,3 @@ +{ + "filters": [] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_expectedResponse.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_expectedResponse.json new file mode 100644 index 0000000..e2c5766 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_expectedResponse.json @@ -0,0 +1 @@ +{"total":116,"aggregations":{"prov-status":[{"doc_count":77,"key":""},{"doc_count":2,"key":"PREPROV"}],"orchestration-status":[{"doc_count":116,"key":"Created"}]}} diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_operationResult.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_operationResult.json new file mode 100644 index 0000000..753a11f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_operationResult.json @@ -0,0 +1 @@ +{"took":39,"timed_out":false,"_shards":{"total":5,"successful":5,"failed":0},"hits":{"total":116,"max_score":0.0,"hits":[]},"aggregations":{"prov-status":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"","doc_count":77},{"key":"PREPROV","doc_count":2}]},"orchestration-status":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"Created","doc_count":116}]}}}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_requestBody.json b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_requestBody.json new file mode 100644 index 0000000..c321760 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/filters/filterAggregationEndpoint_successPath_requestBody.json @@ -0,0 +1,11 @@ +{ + "filters": [ + { + "filterId": "1", + "filterValue": "Created" + }, + { + "filterId": "2" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/portal/portal-authentication.properties b/sparkybe-onap-service/src/test/resources/portal/portal-authentication.properties new file mode 100644 index 0000000..c3c34d6 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/portal/portal-authentication.properties @@ -0,0 +1,2 @@ +username=testuser +password=OBF:1vv31vn61xtv1zlo1y0s1v1p1v2p1y0y1zlu1xtn1vnw1vu7
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/portal/roles.config b/sparkybe-onap-service/src/test/resources/portal/roles.config new file mode 100644 index 0000000..b8313bd --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/portal/roles.config @@ -0,0 +1,6 @@ +[ + { + "id":1, + "name":"View" + } +]
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/AfterSync_ElasticSearch_ScrollApi_Successful.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/AfterSync_ElasticSearch_ScrollApi_Successful.json new file mode 100644 index 0000000..014e194 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/AfterSync_ElasticSearch_ScrollApi_Successful.json @@ -0,0 +1,171 @@ +{ + "_scroll_id": "cXVlcnlUaGVuRmV0Y2g7NTs1MzA6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjg6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjc6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1MjY6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjk6T2dvOXI5ekVSWkdTM0JBQ0tnR0JvdzswOw==", + "took": 29, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1345, + "max_score": 1, + "hits": [ + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "7defe67ea13549fcf8163c755db0d8595f32da23022966e3313281474a551742DWH", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft2i12bd2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i12bd2", + "searchTags": "genericvnft2i12bd2;genericvnft2i12bd2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:40.413-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "981d7d3e27d91cbf118b01e6b5415ecbe1622a0c6c2afe4e04f91919f37f8fbeDWH", + "_score": 1, + "_source": { + "entityType": "complex", + "entityPrimaryKeyValue": "STUB-609ccabb-7504-3e23-91a4-30d29776e792", + "link": "/aai/v11/cloud-infrastructure/complexes/complex/STUB-609ccabb-7504-3e23-91a4-30d29776e792", + "searchTags": "STUB-609ccabb-7504-3e23-91a4-30d29776e792;Sydney;BennelongPoint; 2000", + "searchTagIDs": "physical-location-id;complex-name;street1;postal-code", + "lastmodTimestamp": "2018-01-25T13:15:57.471-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "89788f6bbcddfe40a5248fdb2c46c380aaea66f61e56c66e00d815163ddf98c4DWH", + "_score": 1, + "_source": { + "entityType": "pnf", + "entityPrimaryKeyValue": "NSG34-ANZBank", + "link": "/aai/v11/network/pnfs/pnf/NSG34-ANZBank", + "searchTags": "NSG34-ANZBank", + "searchTagIDs": "pnf-name", + "lastmodTimestamp": "2018-01-25T13:15:50.890-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "76ec93a5740e6c519dbbe4f20c366bf8114273adc13db228bc3012658ec80e57DWH", + "_score": 1, + "_source": { + "entityType": "service-instance", + "entityPrimaryKeyValue": "84635d51-0183-4078-850b-8eeb1ac88a5a", + "link": "/aai/v11/business/customers/customer/ANZBank/service-subscriptions/service-subscription/SDWAN-CPE/service-instances/service-instance/84635d51-0183-4078-850b-8eeb1ac88a5a", + "searchTags": "84635d51-0183-4078-850b-8eeb1ac88a5a;CPE-PERTH-NSG32", + "searchTagIDs": "service-instance-id;service-instance-name", + "lastmodTimestamp": "2018-01-25T13:15:55.672-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "1ad1fbf279a6f822db790bc1d1143b6c09be33dbcae870b332bcb622b42bb208DWH", + "_score": 1, + "_source": { + "entityType": "service-instance", + "entityPrimaryKeyValue": "69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb", + "link": "/aai/v11/business/customers/customer/AEESDNC/service-subscriptions/service-subscription/SDWAN-DOMAIN/service-instances/service-instance/69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb", + "searchTags": "69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb;Domain-AEESDNC", + "searchTagIDs": "service-instance-id;service-instance-name", + "lastmodTimestamp": "2018-01-25T13:15:54.341-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "11b0c483640efdac72d39859387aa623b2059d8ac35bf5310b3ef0d77c0eaebcDWH", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft1i1bd2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft1i1bd2", + "searchTags": "genericvnft1i1bd2;genericvnft1i1bd2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:28.908-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "44182de194b1b4b6ad8002bf79277cf7be2b3143d2cc4e84c2ce80c46574a550DWH", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft3i1be2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft3i1be2", + "searchTags": "genericvnft3i1be2;genericvnft3i1be2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:47.064-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "ee9e7975cb6db6deb2222802b7cac2dd1ec122341742ce4efb1e34c792712679DWH", + "_score": 1, + "_source": { + "entityType": "customer", + "entityPrimaryKeyValue": "Barclays", + "link": "/aai/v11/business/customers/customer/Barclays", + "searchTags": "Barclays;Barclays", + "searchTagIDs": "global-customer-id;subscriber-name", + "lastmodTimestamp": "2018-01-25T13:16:00.301-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "f05c00191ad4746d4a2e7457e5f378df328cf7edfd4f99cc5ad8688be0185fd2DWH", + "_score": 1, + "_source": { + "entityType": "customer", + "entityPrimaryKeyValue": "AEESDNC", + "link": "/aai/v11/business/customers/customer/AEESDNC", + "searchTags": "AEESDNC;AEESDNC", + "searchTagIDs": "global-customer-id;subscriber-name", + "lastmodTimestamp": "2018-01-25T13:16:00.390-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "96be943f01d1db1fe8a3c6ffd03f79385d99dc8d351a6292980fc1af93da987bDWH", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "aeetest_vnf1_under_fw-si1", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/aeetest_vnf1_under_fw-si1", + "searchTags": "aeetest_vnf1_under_fw-si1;aeetest_vnf1_under_fw-si1", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:51.548-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "96be943f01d1db1fe8a3c6ffd03f79385d99dc8d351a6292980fc1af93da987bDWH2", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "aeetest_vnf1_under_fw-si12", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/aeetest_vnf1_under_fw-si12", + "searchTags": "aeetest_vnf1_under_fw-si1;aeetest_vnf1_under_fw-si12", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:51.548-0502" + } + } + + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/BeforeSync_ElasticSearch_ScrollApi_Successful.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/BeforeSync_ElasticSearch_ScrollApi_Successful.json new file mode 100644 index 0000000..38fc297 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/BeforeSync_ElasticSearch_ScrollApi_Successful.json @@ -0,0 +1,156 @@ +{ + "_scroll_id": "cXVlcnlUaGVuRmV0Y2g7NTs1MzA6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjg6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjc6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1MjY6T2dvOXI5ekVSWkdTM0JBQ0tnR0Jvdzs1Mjk6T2dvOXI5ekVSWkdTM0JBQ0tnR0JvdzswOw==", + "took": 29, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1345, + "max_score": 1, + "hits": [ + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "7defe67ea13549fcf8163c755db0d8595f32da23022966e3313281474a551742", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft2i12bd2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i12bd2", + "searchTags": "genericvnft2i12bd2;genericvnft2i12bd2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:40.413-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "981d7d3e27d91cbf118b01e6b5415ecbe1622a0c6c2afe4e04f91919f37f8fbe", + "_score": 1, + "_source": { + "entityType": "complex", + "entityPrimaryKeyValue": "STUB-609ccabb-7504-3e23-91a4-30d29776e792", + "link": "/aai/v11/cloud-infrastructure/complexes/complex/STUB-609ccabb-7504-3e23-91a4-30d29776e792", + "searchTags": "STUB-609ccabb-7504-3e23-91a4-30d29776e792;Sydney;BennelongPoint; 2000", + "searchTagIDs": "physical-location-id;complex-name;street1;postal-code", + "lastmodTimestamp": "2018-01-25T13:15:57.471-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "89788f6bbcddfe40a5248fdb2c46c380aaea66f61e56c66e00d815163ddf98c4", + "_score": 1, + "_source": { + "entityType": "pnf", + "entityPrimaryKeyValue": "NSG34-ANZBank", + "link": "/aai/v11/network/pnfs/pnf/NSG34-ANZBank", + "searchTags": "NSG34-ANZBank", + "searchTagIDs": "pnf-name", + "lastmodTimestamp": "2018-01-25T13:15:50.890-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "76ec93a5740e6c519dbbe4f20c366bf8114273adc13db228bc3012658ec80e57", + "_score": 1, + "_source": { + "entityType": "service-instance", + "entityPrimaryKeyValue": "84635d51-0183-4078-850b-8eeb1ac88a5a", + "link": "/aai/v11/business/customers/customer/ANZBank/service-subscriptions/service-subscription/SDWAN-CPE/service-instances/service-instance/84635d51-0183-4078-850b-8eeb1ac88a5a", + "searchTags": "84635d51-0183-4078-850b-8eeb1ac88a5a;CPE-PERTH-NSG32", + "searchTagIDs": "service-instance-id;service-instance-name", + "lastmodTimestamp": "2018-01-25T13:15:55.672-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "1ad1fbf279a6f822db790bc1d1143b6c09be33dbcae870b332bcb622b42bb208", + "_score": 1, + "_source": { + "entityType": "service-instance", + "entityPrimaryKeyValue": "69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb", + "link": "/aai/v11/business/customers/customer/AEESDNC/service-subscriptions/service-subscription/SDWAN-DOMAIN/service-instances/service-instance/69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb", + "searchTags": "69d0ab4c-1ab1-4488-997d-d6f8ae82bfeb;Domain-AEESDNC", + "searchTagIDs": "service-instance-id;service-instance-name", + "lastmodTimestamp": "2018-01-25T13:15:54.341-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "11b0c483640efdac72d39859387aa623b2059d8ac35bf5310b3ef0d77c0eaebc", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft1i1bd2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft1i1bd2", + "searchTags": "genericvnft1i1bd2;genericvnft1i1bd2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:28.908-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "44182de194b1b4b6ad8002bf79277cf7be2b3143d2cc4e84c2ce80c46574a550", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "genericvnft3i1be2", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft3i1be2", + "searchTags": "genericvnft3i1be2;genericvnft3i1be2", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:47.064-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "ee9e7975cb6db6deb2222802b7cac2dd1ec122341742ce4efb1e34c792712679", + "_score": 1, + "_source": { + "entityType": "customer", + "entityPrimaryKeyValue": "Barclays", + "link": "/aai/v11/business/customers/customer/Barclays", + "searchTags": "Barclays;Barclays", + "searchTagIDs": "global-customer-id;subscriber-name", + "lastmodTimestamp": "2018-01-25T13:16:00.301-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "f05c00191ad4746d4a2e7457e5f378df328cf7edfd4f99cc5ad8688be0185fd2", + "_score": 1, + "_source": { + "entityType": "customer", + "entityPrimaryKeyValue": "AEESDNC", + "link": "/aai/v11/business/customers/customer/AEESDNC", + "searchTags": "AEESDNC;AEESDNC", + "searchTagIDs": "global-customer-id;subscriber-name", + "lastmodTimestamp": "2018-01-25T13:16:00.390-0500" + } + }, + { + "_index": "entitysearchindex-localhost", + "_type": "default", + "_id": "96be943f01d1db1fe8a3c6ffd03f79385d99dc8d351a6292980fc1af93da987b", + "_score": 1, + "_source": { + "entityType": "generic-vnf", + "entityPrimaryKeyValue": "aeetest_vnf1_under_fw-si1", + "link": "/aai/v11/network/generic-vnfs/generic-vnf/aeetest_vnf1_under_fw-si1", + "searchTags": "aeetest_vnf1_under_fw-si1;aeetest_vnf1_under_fw-si1", + "searchTagIDs": "vnf-id;vnf-name", + "lastmodTimestamp": "2018-01-29T03:35:51.548-0500" + } + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch1.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch1.json new file mode 100644 index 0000000..904c729 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch1.json @@ -0,0 +1,17 @@ +{ + "_index" : "aggregate_generic-vnf_index", + "_type" : "default", + "_id" : "a5583fff3d57e0412363a9edfc265fd95374e6e0ef7b7061c7286622da904848", + "_version" : 1, + "found" : true, + "_source" : { + "link" : "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i11bd2", + "lastmodTimestamp" : "2018-01-29T03:35:46.859-0500", + "vnf-id" : "genericvnft2i11bd2", + "in-maint" : "false", + "resource-version" : "1508939769479", + "vnf-name" : "genericvnft2i11bd2", + "vnf-type" : "VirtualNetworkFunctionType", + "is-closed-loop-disabled" : "true" + } + }
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch2.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch2.json new file mode 100644 index 0000000..7eb2317 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch2.json @@ -0,0 +1,17 @@ +{ + "_index" : "aggregate_generic-vnf_index", + "_type" : "default", + "_id" : "a5583fff3d57e0412363a9edfc265fd95374e6e0ef7b7061c7286622da9048482", + "_version" : 1, + "found" : true, + "_source" : { + "link" : "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i11bd22", + "lastmodTimestamp" : "2018-01-29T03:35:46.859-0500", + "vnf-id" : "genericvnft2i11bd22", + "in-maint" : "false", + "resource-version" : "1508939769479", + "vnf-name" : "genericvnft2i11bd22", + "vnf-type" : "VirtualNetworkFunctionType", + "is-closed-loop-disabled" : "true" + } + }
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch3.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch3.json new file mode 100644 index 0000000..6332610 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch3.json @@ -0,0 +1,17 @@ +{ + "_index" : "aggregate_generic-vnf_index", + "_type" : "default", + "_id" : "a5583fff3d57e0412363a9edfc265fd95374e6e0ef7b7061c7286622da9048483", + "_version" : 1, + "found" : true, + "_source" : { + "link" : "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i11bd3", + "lastmodTimestamp" : "2018-01-29T03:35:46.859-0500", + "vnf-id" : "genericvnft2i11bd3", + "in-maint" : "false", + "resource-version" : "1508939769479", + "vnf-name" : "genericvnft2i11bd3", + "vnf-type" : "VirtualNetworkFunctionType", + "is-closed-loop-disabled" : "true" + } + }
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch4.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch4.json new file mode 100644 index 0000000..71298e9 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch4.json @@ -0,0 +1,17 @@ +{ + "_index" : "aggregate_generic-vnf_index", + "_type" : "default", + "_id" : "a5583fff3d57e0412363a9edfc265fd95374e6e0ef7b7061c7286622da9048484", + "_version" : 1, + "found" : true, + "_source" : { + "link" : "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i11bd4", + "lastmodTimestamp" : "2018-01-29T03:35:46.859-0500", + "vnf-id" : "genericvnft2i11bd4", + "in-maint" : "false", + "resource-version" : "1508939769479", + "vnf-name" : "genericvnft2i11bd4", + "vnf-type" : "VirtualNetworkFunctionType", + "is-closed-loop-disabled" : "true" + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch5.json b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch5.json new file mode 100644 index 0000000..08d8cdd --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/ElasticSearch/docEntityFromElasticSearch5.json @@ -0,0 +1,17 @@ +{ + "_index" : "aggregate_generic-vnf_index", + "_type" : "default", + "_id" : "a5583fff3d57e0412363a9edfc265fd95374e6e0ef7b7061c7286622da904848", + "_version" : 1, + "found" : true, + "_source" : { + "link" : "/aai/v11/network/generic-vnfs/generic-vnf/genericvnft2i11bd5", + "lastmodTimestamp" : "2018-01-29T03:35:46.859-0500", + "vnf-id" : "genericvnft2i11bd5", + "in-maint" : "false", + "resource-version" : "1508939769479", + "vnf-name" : "genericvnft2i11bd5", + "vnf-type" : "VirtualNetworkFunctionType", + "is-closed-loop-disabled" : "true" + } + }
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/customer/customer-4.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/customer/customer-4.json new file mode 100644 index 0000000..df805ba --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/customer/customer-4.json @@ -0,0 +1,60 @@ +{ + "global-customer-id": "customer-4", + "subscriber-name": "Gold", + "subscriber-type": "GoldType", + "resource-version": "1494001938080", + "service-subscriptions": { + "service-subscription": [ + { + "service-type": "service-subscription-2", + "resource-version": "1494001891362", + "service-instances": { + "service-instance": [ + { + "service-instance-id": "service-instance-54", + "service-instance-name": "si_failtest", + "model-invariant-id": "732263bd-0655-428d-a347-d65676d1a949", + "resource-version": "1494001997513" + }, + { + "service-instance-id": "service-instance-55", + "service-instance-name": "test-343432", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1500370094198", + "orchestration-status": "Active" + } + ] + }, + "relationship-list": { + "relationship": [ + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/tenant-1", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "cotton-candy" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "fluffy-clouds" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "tenant-1" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "CandyMan" + } + ] + } + ] + } + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-50.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-50.json new file mode 100644 index 0000000..17a48fc --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-50.json @@ -0,0 +1,26 @@ +{ + "service-instance-id": "service-instance-50", + "service-instance-name": "s01", + "service-type": "", + "service-role": "", + "environment-context": "Universe", + "workload-context": "QuantumContext", + "model-invariant-id": "5b9c0f33-eec1-484a-bf77-736a6644d7a8", + "model-version-id": "b75e0d22-05ff-4448-9266-5f0d4e1dbbd6", + "resource-version": "1510659038818", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "project", + "related-link": "/aai/v11/business/projects/project/project1", + "relationship-data": [ + { + "relationship-key": "project.project-name", + "relationship-value": "project1" + } + ] + } + ] + } +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-51.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-51.json new file mode 100644 index 0000000..bfc5241 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-51.json @@ -0,0 +1,28 @@ +{ + "service-instance-id": "service-instance-51", + "service-instance-name": "test765445g", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1499868690949", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "generic-vnf", + "related-link": "/aai/v11/network/generic-vnfs/generic-vnf/fbb52a16-2c57-4212-802f-32dbba2204f2", + "relationship-data": [ + { + "relationship-key": "generic-vnf.vnf-id", + "relationship-value": "fbb52a16-2c57-4212-802f-32dbba2204f2" + } + ], + "related-to-property": [ + { + "property-key": "generic-vnf.vnf-name", + "property-value": "fdfdfdf" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-52.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-52.json new file mode 100644 index 0000000..6083a8f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-52.json @@ -0,0 +1,12 @@ +{ + "service-instance-id": "service-instance-52", + "service-instance-name": "si52", + "service-type": "", + "service-role": "", + "environment-context": "null", + "workload-context": "null", + "model-invariant-id": "d7b48529-6ae2-49f0-8633-b29e7cd4d4ce", + "model-version-id": "44671b15-83dd-4db7-a36e-dfada3eaa2f9", + "resource-version": "1508144995828", + "orchestration-status": "Active" +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-53.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-53.json new file mode 100644 index 0000000..5765c43 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-53.json @@ -0,0 +1,12 @@ +{ + "service-instance-id": "service-instance-53", + "service-instance-name": "a1", + "service-type": "si53-type", + "service-role": "si53-role", + "environment-context": "Universe", + "workload-context": "Nano", + "model-invariant-id": "340f3957-ff0a-4503-866d-a34fd1b97450", + "model-version-id": "ee2d8783-8495-4fb1-9553-6cdbd2dd3a50", + "resource-version": "1509355912484", + "orchestration-status": "Active", +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-54.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-54.json new file mode 100644 index 0000000..249c38b --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-54.json @@ -0,0 +1,6 @@ +{ + "service-instance-id": "service-instance-54", + "service-instance-name": "si_failtest", + "model-invariant-id": "732263bd-0655-428d-a347-d65676d1a949", + "resource-version": "1494001997513" +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-55.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-55.json new file mode 100644 index 0000000..8db5b2e --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-55.json @@ -0,0 +1,8 @@ +{ + "service-instance-id": "service-instance-55", + "service-instance-name": "test-343432", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1500370094198", + "orchestration-status": "Active" +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-56.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-56.json new file mode 100644 index 0000000..a87de6f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-56.json @@ -0,0 +1,4 @@ +{ + "service-instance-id": "service-instance-56", + "resource-version": "1495736709053" +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-57.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-57.json new file mode 100644 index 0000000..ce26fda --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-57.json @@ -0,0 +1,11 @@ +{ + "service-instance-id": "service-instance-57", + "service-instance-name": "first_macro_shani", + "service-type": "", + "service-role": "", + "workload-context": "Universe", + "model-invariant-id": "5b9c0f33-eec1-484a-bf77-736a6644d7a8", + "model-version-id": "b75e0d22-05ff-4448-9266-5f0d4e1dbbd6", + "resource-version": "1508071197869", + "orchestration-status": "Active" +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-58.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-58.json new file mode 100644 index 0000000..7e6f5e9 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-58.json @@ -0,0 +1,32 @@ +{ + "service-instance-id": "service-instance-58", + "service-instance-name": "a3", + "service-type": "", + "service-role": "", + "environment-context": "FluffyClouds", + "workload-context": "TasteGood", + "model-invariant-id": "d5937aa1-37fb-4ed0-8c30-5144b89a64ae", + "model-version-id": "06ac9663-54cf-4c77-b926-6e8757cf6380", + "resource-version": "1509356358573", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "generic-vnf", + "related-link": "/aai/v11/network/generic-vnfs/generic-vnf/9c925d7e-1a94-4784-a45b-408c8cc96fa8", + "relationship-data": [ + { + "relationship-key": "generic-vnf.vnf-id", + "relationship-value": "9c925d7e-1a94-4784-a45b-408c8cc96fa8" + } + ], + "related-to-property": [ + { + "property-key": "generic-vnf.vnf-name", + "property-value": "dfdsfds" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-59.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-59.json new file mode 100644 index 0000000..37e7d35 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-instance/service-instance-59.json @@ -0,0 +1,4 @@ +{ + "service-instance-id": "service-instance-59", + "resource-version": "1506087955081" +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-subscription/service-subscription-2.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-subscription/service-subscription-2.json new file mode 100644 index 0000000..2d02ccc --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/service-subscription/service-subscription-2.json @@ -0,0 +1,50 @@ +{ + "service-type": "service-subscription-2", + "resource-version": "1494001891362", + "service-instances": { + "service-instance": [ + { + "service-instance-id": "service-instance-54", + "service-instance-name": "si_failtest", + "model-invariant-id": "732263bd-0655-428d-a347-d65676d1a949", + "resource-version": "1494001997513" + }, + { + "service-instance-id": "service-instance-55", + "service-instance-name": "test-343432", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1500370094198", + "orchestration-status": "Active" + } + ] + }, + "relationship-list": { + "relationship": [ + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/myCloud/isBig/tenants/tenant/tenant-1", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "myCloud" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "isBig" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "tenant-1" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "SuperDude" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/tenant/tenant-1.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/tenant/tenant-1.json new file mode 100644 index 0000000..3dad086 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-resources/tenant/tenant-1.json @@ -0,0 +1,23 @@ +{ + "tenant-id": "tenant-1", + "tenant-name": "SuperDude", + "resource-version": "1494001855362", + "relationship-list": { + "relationship": [ + { + "related-to": "service-subscription", + "related-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2", + "relationship-data": [ + { + "relationship-key": "customer.global-customer-id", + "relationship-value": "customer-4" + }, + { + "relationship-key": "service-subscription.service-type", + "relationship-value": "service-subscription-2" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-50.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-50.json new file mode 100644 index 0000000..d4e9d8d --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-50.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-50" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-51.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-51.json new file mode 100644 index 0000000..7c29652 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-51.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-51" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-52.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-52.json new file mode 100644 index 0000000..5d88946 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-52.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-52" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-53.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-53.json new file mode 100644 index 0000000..8422ab0 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-53.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-53" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-54.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-54.json new file mode 100644 index 0000000..d61ee8e --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-54.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-54" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-55.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-55.json new file mode 100644 index 0000000..7594913 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-55.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-55" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-56.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-56.json new file mode 100644 index 0000000..dc66079 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-56.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/DHV_20170525142406/service-subscriptions/service-subscription/uCPE/service-instances/service-instance/service-instance-56" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-57.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-57.json new file mode 100644 index 0000000..427fbdf --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-57.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-57" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-58.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-58.json new file mode 100644 index 0000000..f459422 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-58.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-58" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-59.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-59.json new file mode 100644 index 0000000..380f932 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance-59.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription-1/service-instances/service-instance/service-instance-59" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-54.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-54.json new file mode 100644 index 0000000..d61ee8e --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-54.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-54" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-55.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-55.json new file mode 100644 index 0000000..7594913 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-instance/service-instance-55.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-instance", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2/service-instances/service-instance/service-instance-55" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-subscription/service-subscription-2.json b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-subscription/service-subscription-2.json new file mode 100644 index 0000000..d377d5d --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/aai-traversal/generic-query/service-subscription/service-subscription-2.json @@ -0,0 +1,8 @@ +{ + "result-data": [ + { + "resource-type": "service-subscription", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_complex_nodesQuery_response.json b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_complex_nodesQuery_response.json new file mode 100644 index 0000000..508d711 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_complex_nodesQuery_response.json @@ -0,0 +1,24 @@ +{ + "result-data" : [ + { + "resource-type" : "complex", + "resource-link" : "https://a.server.some.where:8443/aai/v7/complexes/complex/complex1" + }, + { + "resource-type" : "complex", + "resource-link" : "https://a.server.some.where:8443/aai/v7/complexes/complex/complex2" + }, + { + "resource-type" : "complex", + "resource-link" : "https://a.server.some.where:8443/aai/v7/complexes/complex/complex3" + }, + { + "resource-type" : "complex", + "resource-link" : "https://a.server.some.where:8443/aai/v7/complexes/complex/complex4" + }, + { + "resource-type" : "complex", + "resource-link" : "https://a.server.some.where:8443/aai/v7/complexes/complex/complex5" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_generic-vnf_nodesQuery_response.json b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_generic-vnf_nodesQuery_response.json new file mode 100644 index 0000000..f1cb3d4 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_generic-vnf_nodesQuery_response.json @@ -0,0 +1,16 @@ +{ + "result-data": [ + { + "resource-type": "generic-vnf", + "resource-link": "/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-1" + }, + { + "resource-type": "generic-vnf", + "resource-link": "/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-2" + }, + { + "resource-type": "generic-vnf", + "resource-link": "/aai/v11/network/generic-vnfs/generic-vnf/generic-vnf-3" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_service-subscription_nodesQuery_response.json b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_service-subscription_nodesQuery_response.json new file mode 100644 index 0000000..414724f --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/activeInventory_service-subscription_nodesQuery_response.json @@ -0,0 +1,16 @@ +{ + "result-data": [ + { + "resource-type": "service-subscription", + "resource-link": "/aai/v11/business/customers/customer/customer-1/service-subscriptions/service-subscription/service-subscription1" + }, + { + "resource-type": "service-subscription", + "resource-link": "/aai/v11/business/customers/customer/customer-2/service-subscriptions/service-subscription/service-subscription-2" + }, + { + "resource-type": "service-subscription", + "resource-link": "/aai/v11/business/customers/customer/customer-3/service-subscriptions/service-subscription/service-subscription-3" + } + ] +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/complex1_fullDepth_aaiEntityRetrieval.json b/sparkybe-onap-service/src/test/resources/sync/aai/complex1_fullDepth_aaiEntityRetrieval.json new file mode 100644 index 0000000..3831848 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/complex1_fullDepth_aaiEntityRetrieval.json @@ -0,0 +1,30 @@ +{ + "physical-location-id": "HMBGGEANN02-1", + "resource-version": "1494001844620", + "physical-location-type": "EQUIPMENT LOCATION-1", + "street1": "WENDENSTRASSE 379-1", + "city": "HAMBURG-1", + "state": "GE", + "postal-code": "20097-1", + "country": "USA", + "region": "US", + "relationship-list": { + "relationship": [ + { + "related-to": "pserver", + "related-link": "/aai/v11/cloud-infrastructure/pservers/pserver/GER-45-IIGA-TRANSPORT_ONLY_DEL-1", + "relationship-data": [ + { + "relationship-key": "pserver.hostname", + "relationship-value": "GER-45-IIGA-TRANSPORT_ONLY_DEL-1" + } + ], + "related-to-property": [ + { + "property-key": "pserver.pserver-name2-1" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/complex2_fullDepth_aaiEntityRetrieval.json b/sparkybe-onap-service/src/test/resources/sync/aai/complex2_fullDepth_aaiEntityRetrieval.json new file mode 100644 index 0000000..7a4d680 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/complex2_fullDepth_aaiEntityRetrieval.json @@ -0,0 +1,30 @@ +{ + "physical-location-id": "HMBGGEANN02-2", + "resource-version": "1494001844620", + "physical-location-type": "EQUIPMENT LOCATION-2", + "street1": "WENDENSTRASSE 379-2", + "city": "HAMBURG-2", + "state": "GE", + "postal-code": "20097-2", + "country": "USA", + "region": "US", + "relationship-list": { + "relationship": [ + { + "related-to": "pserver", + "related-link": "/aai/v11/cloud-infrastructure/pservers/pserver/GER-45-IIGA-TRANSPORT_ONLY_DEL-2", + "relationship-data": [ + { + "relationship-key": "pserver.hostname", + "relationship-value": "GER-45-IIGA-TRANSPORT_ONLY_DEL-2" + } + ], + "related-to-property": [ + { + "property-key": "pserver.pserver-name2-2" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/complex3_fullDepth_aaiEntityRetrieval.json b/sparkybe-onap-service/src/test/resources/sync/aai/complex3_fullDepth_aaiEntityRetrieval.json new file mode 100644 index 0000000..e4a3789 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/complex3_fullDepth_aaiEntityRetrieval.json @@ -0,0 +1,30 @@ +{ + "physical-location-id": "HMBGGEANN02-3", + "resource-version": "1494001844620", + "physical-location-type": "EQUIPMENT LOCATION-3", + "street1": "WENDENSTRASSE 379-3", + "city": "HAMBURG-3", + "state": "GE", + "postal-code": "20097-3", + "country": "USA", + "region": "US", + "relationship-list": { + "relationship": [ + { + "related-to": "pserver", + "related-link": "/aai/v11/cloud-infrastructure/pservers/pserver/GER-45-IIGA-TRANSPORT_ONLY_DEL-3", + "relationship-data": [ + { + "relationship-key": "pserver.hostname", + "relationship-value": "GER-45-IIGA-TRANSPORT_ONLY_DEL-3" + } + ], + "related-to-property": [ + { + "property-key": "pserver.pserver-name2-3" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/complex4_fullDepth_aaiEntityRetrieval.json b/sparkybe-onap-service/src/test/resources/sync/aai/complex4_fullDepth_aaiEntityRetrieval.json new file mode 100644 index 0000000..1963b3b --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/complex4_fullDepth_aaiEntityRetrieval.json @@ -0,0 +1,30 @@ +{ + "physical-location-id": "HMBGGEANN02-4", + "resource-version": "1494001844620", + "physical-location-type": "EQUIPMENT LOCATION-4", + "street1": "WENDENSTRASSE 379-4", + "city": "HAMBURG-4", + "state": "GE", + "postal-code": "20097-4", + "country": "USA", + "region": "US", + "relationship-list": { + "relationship": [ + { + "related-to": "pserver", + "related-link": "/aai/v11/cloud-infrastructure/pservers/pserver/GER-45-IIGA-TRANSPORT_ONLY_DEL-4", + "relationship-data": [ + { + "relationship-key": "pserver.hostname", + "relationship-value": "GER-45-IIGA-TRANSPORT_ONLY_DEL-4" + } + ], + "related-to-property": [ + { + "property-key": "pserver.pserver-name2-4" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/complex5_fullDepth_aaiEntityRetrieval.json b/sparkybe-onap-service/src/test/resources/sync/aai/complex5_fullDepth_aaiEntityRetrieval.json new file mode 100644 index 0000000..70fcba8 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/complex5_fullDepth_aaiEntityRetrieval.json @@ -0,0 +1,30 @@ +{ + "physical-location-id": "HMBGGEANN02-5", + "resource-version": "1494001844620", + "physical-location-type": "EQUIPMENT LOCATION-5", + "street1": "WENDENSTRASSE 379-5", + "city": "HAMBURG-5", + "state": "GE", + "postal-code": "20097-5", + "country": "USA", + "region": "US", + "relationship-list": { + "relationship": [ + { + "related-to": "pserver", + "related-link": "/aai/v11/cloud-infrastructure/pservers/pserver/GER-45-IIGA-TRANSPORT_ONLY_DEL-5", + "relationship-data": [ + { + "relationship-key": "pserver.hostname", + "relationship-value": "GER-45-IIGA-TRANSPORT_ONLY_DEL-5" + } + ], + "related-to-property": [ + { + "property-key": "pserver.pserver-name2-5" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-1_full_depth.json b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-1_full_depth.json new file mode 100644 index 0000000..14abe18 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-1_full_depth.json @@ -0,0 +1,136 @@ +{ + "vnf-id": "generic-vnf-1", + "vnf-name": "vProbe_Fe_VEPMS_sk316t_sd_01", + "vnf-type": "vProbe Fe VEPMS_sk316t/vProbe Fe VEPMS_sk316t 0", + "service-id": "a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb", + "prov-status": "PREPROV", + "orchestration-status": "Created", + "in-maint": false, + "is-closed-loop-disabled": false, + "resource-version": "1511102454640", + "model-invariant-id": "b8dba4bd-04ee-42bc-8c41-db06addb6866", + "model-version-id": "c9e7826d-c4b8-40c7-83e4-d9f96e7d5827", + "model-customization-id": "d2ca6d16-6529-4362-b53e-7bf4141372b1", + "nf-type": "PROBES", + "nf-function": "vProbes (Frontend )", + "nf-role": "vLB", + "nf-naming-code": "null", + "relationship-list": { + "relationship": [ + { + "related-to": "service-instance", + "related-link": "/aai/v11/business/customers/customer/a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb/service-subscriptions/service-subscription/Mobility/service-instances/service-instance/22663785-ebb9-49c7-b95b-209537e6627f", + "relationship-data": [ + { + "relationship-key": "customer.global-customer-id", + "relationship-value": "a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb" + }, + { + "relationship-key": "service-subscription.service-type", + "relationship-value": "Mobility" + }, + { + "relationship-key": "service-instance.service-instance-id", + "relationship-value": "22663785-ebb9-49c7-b95b-209537e6627f" + } + ], + "related-to-property": [ + { + "property-key": "service-instance.service-instance-name", + "property-value": "vProbe_Fe_VEPMS_sk316t_sd" + } + ] + }, + { + "related-to": "line-of-business", + "related-link": "/aai/v11/business/lines-of-business/line-of-business/lob1", + "relationship-data": [ + { + "relationship-key": "line-of-business.line-of-business-name", + "relationship-value": "lob1" + } + ] + }, + { + "related-to": "platform", + "related-link": "/aai/v11/business/platforms/platform/platform1", + "relationship-data": [ + { + "relationship-key": "platform.platform-name", + "relationship-value": "platform1" + } + ] + }, + { + "related-to": "volume-group", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/volume-groups/volume-group/ecf748d5-be43-45ae-a8b1-c8520d144bdf", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "volume-group.volume-group-id", + "relationship-value": "ecf748d5-be43-45ae-a8b1-c8520d144bdf" + } + ] + }, + { + "related-to": "volume-group", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/volume-groups/volume-group/1e26c1a7-8a91-4459-ba02-8cc2d70dd065", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "volume-group.volume-group-id", + "relationship-value": "1e26c1a7-8a91-4459-ba02-8cc2d70dd065" + } + ] + }, + { + "related-to": "volume-group", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/volume-groups/volume-group/5223cfdc-50bf-4607-9651-2270384d6414", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "volume-group.volume-group-id", + "relationship-value": "5223cfdc-50bf-4607-9651-2270384d6414" + } + ] + } + ] + }, + "vf-modules": { + "vf-module": [ + { + "vf-module-id": "eba81b96-851e-4374-9b9e-d186527f3c46", + "vf-module-name": "zmtn6afprb01_base1_sd_02", + "heat-stack-id": "zmtn6afprb01_base1_sd_02/cfd1203d-ff2f-49a3-a40b-72ac67cd8432", + "orchestration-status": "active", + "is-base-vf-module": true, + "resource-version": "1511301104226", + "model-invariant-id": "c01b5b9f-0760-4bcc-93b5-e1b24dd9bcfa", + "model-version-id": "30e71b66-93c6-4e0f-b7ff-de26b45c83e9", + "model-customization-id": "caaf8b40-03e9-4c58-9d18-613f564e2ea8", + "module-index": 0 + } + ] + } +} diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-2_full_depth.json b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-2_full_depth.json new file mode 100644 index 0000000..a03e4c8 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-2_full_depth.json @@ -0,0 +1,118 @@ +{ + "vnf-id": "generic-vnf-2", + "vnf-name": "zmtn6apndns_v6_01", + "vnf-type": "APNDNS_mm779p_II/APNDNS_mm779p_II 0", + "service-id": "a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb", + "prov-status": "PREPROV", + "orchestration-status": "Created", + "in-maint": false, + "is-closed-loop-disabled": false, + "resource-version": "1511184323962", + "model-invariant-id": "f3f6ed00-cd41-4e96-b669-0daa10da5491", + "model-version-id": "f76e6281-d80f-4403-9603-4245b0c8d8cd", + "model-customization-id": "f499ebb9-4383-42c1-8ace-2b682f312504", + "nf-type": "DNS", + "nf-function": "APN-DNS", + "nf-role": "VDNS", + "nf-naming-code": "null", + "relationship-list": { + "relationship": [ + { + "related-to": "service-instance", + "related-link": "/aai/v11/business/customers/customer/a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb/service-subscriptions/service-subscription/Nimbus/service-instances/service-instance/3a743f07-86cc-47db-bee5-03fa91c77748", + "relationship-data": [ + { + "relationship-key": "customer.global-customer-id", + "relationship-value": "a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb" + }, + { + "relationship-key": "service-subscription.service-type", + "relationship-value": "Nimbus" + }, + { + "relationship-key": "service-instance.service-instance-id", + "relationship-value": "3a743f07-86cc-47db-bee5-03fa91c77748" + } + ], + "related-to-property": [ + { + "property-key": "service-instance.service-instance-name", + "property-value": "APNDNS_mm779p_II_V6" + } + ] + }, + { + "related-to": "line-of-business", + "related-link": "/aai/v11/business/lines-of-business/line-of-business/lob1", + "relationship-data": [ + { + "relationship-key": "line-of-business.line-of-business-name", + "relationship-value": "lob1" + } + ] + }, + { + "related-to": "platform", + "related-link": "/aai/v11/business/platforms/platform/platform1", + "relationship-data": [ + { + "relationship-key": "platform.platform-name", + "relationship-value": "platform1" + } + ] + }, + { + "related-to": "volume-group", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/volume-groups/volume-group/03ec51a4-2ea9-4947-b66a-b01c7b9e9ea5", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "volume-group.volume-group-id", + "relationship-value": "03ec51a4-2ea9-4947-b66a-b01c7b9e9ea5" + } + ] + }, + { + "related-to": "volume-group", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/volume-groups/volume-group/1b3bf4c2-a4cc-4c37-afcc-ec49f5c1c653", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "volume-group.volume-group-id", + "relationship-value": "1b3bf4c2-a4cc-4c37-afcc-ec49f5c1c653" + } + ] + } + ] + }, + "vf-modules": { + "vf-module": [ + { + "vf-module-id": "1b5602c0-b0c7-42fd-9bbd-8d503e464c5a", + "vf-module-name": "APNDNS_V6_base", + "heat-stack-id": "APNDNS_V6_base/bd3aa649-179f-4017-a848-f9a90cfa8908", + "orchestration-status": "active", + "is-base-vf-module": true, + "resource-version": "1511271087769", + "model-invariant-id": "874a259a-c4a3-4928-bbaa-0cd391ea3ec8", + "model-version-id": "417a7a36-87f8-4366-8d7b-95e47f1009b9", + "model-customization-id": "76a64957-74c7-4598-84c9-aa0e94bd2a69", + "module-index": 0 + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-3_full_depth.json b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-3_full_depth.json new file mode 100644 index 0000000..de2481b --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/generic-vnf-generic-vnf-3_full_depth.json @@ -0,0 +1,30 @@ +{ + "vnf-id": "generic-vnf-3", + "vnf-name": "generic-vnf-test01-name19", + "vnf-name2": "generic-vnf-test01-name19", + "vnf-type": "generic-vnf-rx2202-type", + "service-id": "d0217510-514b-429d-9b84-e4ed74e4c552", + "regional-resource-zone": "example-regional-resource-zone-val-8204", + "prov-status": "ACTIVE", + "license-key": "lk", + "equipment-role": "role", + "orchestration-status": "PendingDelete", + "heat-stack-id": "generic-vnf-rx2202-heat-stack-id", + "mso-catalog-key": "generic-vnf-rx2202-mso-catalog-key", + "management-option": "generic-vnf-rx2202-management-option", + "ipv4-oam-address": "1.2.3.4", + "ipv4-loopback0-address": "4.5.6.7", + "nm-lan-v6-address": "33::34", + "management-v6-address": "34::35", + "vcpu": 7957, + "vcpu-units": "example-vcpu-units-val-8204", + "vmemory": 168, + "vmemory-units": "example-vmemory-units-val-8204", + "vdisk": 3227, + "vdisk-units": "example-vdisk-units-val-8204", + "in-maint": false, + "is-closed-loop-disabled": true, + "resource-version": "1500476417001", + "nf-type": "vCE/vFW", + "nf-role": "vSeGW" +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-1.json b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-1.json new file mode 100644 index 0000000..3d2d38c --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-1.json @@ -0,0 +1,12 @@ +{ + "service-type": "service-subscription-1", + "resource-version": "1506087955078", + "service-instances": { + "service-instance": [ + { + "service-instance-id": "service-subscription-1", + "resource-version": "1506087955081" + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-2.json b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-2.json new file mode 100644 index 0000000..d4c2414 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-2.json @@ -0,0 +1,350 @@ +{ + "service-type": "service-subscription-2", + "resource-version": "1494001891362", + "service-instances": { + "service-instance": [ + { + "service-instance-id": "service-instance-54", + "service-instance-name": "si_failtest", + "model-invariant-id": "732263bd-0655-428d-a347-d65676d1a949", + "resource-version": "1494001997513" + }, + { + "service-instance-id": "service-instance-55", + "service-instance-name": "test-343432", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1500370094198", + "orchestration-status": "Active" + }, + { + "service-instance-id": "service-instance-50", + "service-instance-name": "s01", + "service-type": "", + "service-role": "", + "environment-context": "General_Revenue-Bearing", + "workload-context": "Production", + "model-invariant-id": "5b9c0f33-eec1-484a-bf77-736a6644d7a8", + "model-version-id": "b75e0d22-05ff-4448-9266-5f0d4e1dbbd6", + "resource-version": "1510659038818", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "project", + "related-link": "/aai/v11/business/projects/project/project1", + "relationship-data": [ + { + "relationship-key": "project.project-name", + "relationship-value": "project1" + } + ] + }, + { + "related-to": "owning-entity", + "related-link": "/aai/v11/business/owning-entities/owning-entity/589fe0db-26c4-45e5-9f4e-a246c74fce76", + "relationship-data": [ + { + "relationship-key": "owning-entity.owning-entity-id", + "relationship-value": "589fe0db-26c4-45e5-9f4e-a246c74fce76" + } + ] + } + ] + } + }, + { + "service-instance-id": "service-instance-52", + "service-instance-name": "shanitest", + "service-type": "", + "service-role": "", + "environment-context": "null", + "workload-context": "null", + "model-invariant-id": "d7b48529-6ae2-49f0-8633-b29e7cd4d4ce", + "model-version-id": "44671b15-83dd-4db7-a36e-dfada3eaa2f9", + "resource-version": "1508144995828", + "orchestration-status": "Active" + }, + { + "service-instance-id": "service-instance-57", + "service-instance-name": "first_macro_shani", + "service-type": "", + "service-role": "", + "environment-context": "General_Revenue-Bearing", + "workload-context": "Production", + "model-invariant-id": "5b9c0f33-eec1-484a-bf77-736a6644d7a8", + "model-version-id": "b75e0d22-05ff-4448-9266-5f0d4e1dbbd6", + "resource-version": "1508071197869", + "orchestration-status": "Active" + }, + { + "service-instance-id": "service-instance-53", + "service-instance-name": "a1", + "service-type": "service_type_shani", + "service-role": "service_role_shani", + "environment-context": "General_Revenue-Bearing", + "workload-context": "Production", + "model-invariant-id": "340f3957-ff0a-4503-866d-a34fd1b97450", + "model-version-id": "ee2d8783-8495-4fb1-9553-6cdbd2dd3a50", + "resource-version": "1509355912484", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "owning-entity", + "related-link": "/aai/v11/business/owning-entities/owning-entity/2356a43d-ed56-43b6-aefc-8391b82588c9", + "relationship-data": [ + { + "relationship-key": "owning-entity.owning-entity-id", + "relationship-value": "2356a43d-ed56-43b6-aefc-8391b82588c9" + } + ] + } + ] + } + }, + { + "service-instance-id": "service-instance-58", + "service-instance-name": "a3", + "service-type": "", + "service-role": "", + "environment-context": "General_Revenue-Bearing", + "workload-context": "Production", + "model-invariant-id": "d5937aa1-37fb-4ed0-8c30-5144b89a64ae", + "model-version-id": "06ac9663-54cf-4c77-b926-6e8757cf6380", + "resource-version": "1509356358573", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "generic-vnf", + "related-link": "/aai/v11/network/generic-vnfs/generic-vnf/9c925d7e-1a94-4784-a45b-408c8cc96fa8", + "relationship-data": [ + { + "relationship-key": "generic-vnf.vnf-id", + "relationship-value": "9c925d7e-1a94-4784-a45b-408c8cc96fa8" + } + ], + "related-to-property": [ + { + "property-key": "generic-vnf.vnf-name", + "property-value": "dfdsfds" + } + ] + }, + { + "related-to": "owning-entity", + "related-link": "/aai/v11/business/owning-entities/owning-entity/589fe0db-26c4-45e5-9f4e-a246c74fce76", + "relationship-data": [ + { + "relationship-key": "owning-entity.owning-entity-id", + "relationship-value": "589fe0db-26c4-45e5-9f4e-a246c74fce76" + } + ] + } + ] + } + }, + { + "service-instance-id": "service-instance-51", + "service-instance-name": "test765445g", + "model-invariant-id": "709d1be4-9a3f-4a29-8c4d-a20465e808a3", + "model-version-id": "240376de-870e-48df-915a-31f140eedd2c", + "resource-version": "1499868690949", + "orchestration-status": "Active", + "relationship-list": { + "relationship": [ + { + "related-to": "generic-vnf", + "related-link": "/aai/v11/network/generic-vnfs/generic-vnf/fbb52a16-2c57-4212-802f-32dbba2204f2", + "relationship-data": [ + { + "relationship-key": "generic-vnf.vnf-id", + "relationship-value": "fbb52a16-2c57-4212-802f-32dbba2204f2" + } + ], + "related-to-property": [ + { + "property-key": "generic-vnf.vnf-name", + "property-value": "fdfdfdf" + } + ] + } + ] + } + } + ] + }, + "relationship-list": { + "relationship": [ + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/tenant-1", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "tenant-1" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AIN Web Tool-15-D-sspstMAIL" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/23cbbd1872864ea08aa53ade25d34172", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "23cbbd1872864ea08aa53ade25d34172" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AIN Web Tool-15-D-rtertet" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/ded2432abb5a4100801331d07787afc2", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "ded2432abb5a4100801331d07787afc2" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AIN/SMS-16-D-SSPecompFlvr2" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/ad5aab08f5f7435e9983311af543f311", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "ad5aab08f5f7435e9983311af543f311" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AINWebTool-15-X-PODECOMP" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/8071dc66981c4294b05483a1cddac801", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "8071dc66981c4294b05483a1cddac801" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "CESAR-100-X-ecompPODprivate" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/036f769581904ca08ead1415c22b9ec0", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "036f769581904ca08ead1415c22b9ec0" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AINWebTool-15-X-PODECOMPCLONE" + } + ] + }, + { + "related-to": "tenant", + "related-link": "/aai/v11/cloud-infrastructure/cloud-regions/cloud-region/att-aic/mtn6/tenants/tenant/c2475a36c5e04f12a21593849ae83420", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "att-aic" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "mtn6" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "c2475a36c5e04f12a21593849ae83420" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "AIN/SMS-16-X-hope" + } + ] + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-3.json b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-3.json new file mode 100644 index 0000000..39b46ad --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/aai/service-subscription-service-subscription-3.json @@ -0,0 +1,12 @@ +{ + "service-type": "service-subscription-3", + "resource-version": "1495736709052", + "service-instances": { + "service-instance": [ + { + "service-instance-id": "service-subscription-3", + "resource-version": "1495736709053" + } + ] + } +}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/auth/emptyCert.p12 b/sparkybe-onap-service/src/test/resources/sync/auth/emptyCert.p12 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/auth/emptyCert.p12 diff --git a/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestMappings.json b/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestMappings.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestMappings.json @@ -0,0 +1 @@ +{}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestSettings.json b/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestSettings.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/sync/etc/autoSuggestSettings.json @@ -0,0 +1 @@ +{}
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users-empty.config b/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users-empty.config new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users-empty.config diff --git a/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users.config b/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users.config new file mode 100644 index 0000000..b9bb4e2 --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/user-auth-reader/authorized-users.config @@ -0,0 +1,3 @@ +user1 +user2 user3 +user4
\ No newline at end of file diff --git a/sparkybe-onap-service/src/test/resources/user-validator/authorized-users.config b/sparkybe-onap-service/src/test/resources/user-validator/authorized-users.config new file mode 100644 index 0000000..58698bc --- /dev/null +++ b/sparkybe-onap-service/src/test/resources/user-validator/authorized-users.config @@ -0,0 +1,3 @@ + user1 +user2 user3 +user4
\ No newline at end of file |