From c38dac308232f3e739fe18ce456b4ad10474d331 Mon Sep 17 00:00:00 2001 From: The n6 Development Team Date: Wed, 13 Oct 2021 20:06:16 +0200 Subject: [PATCH] Version 3.0.0-beta1 --- .gitignore | 4 + .n6-version | 2 +- CHANGELOG.md | 12 + N6AdminPanel/adminpanel.wsgi | 4 + N6AdminPanel/n6adminpanel/__init__.py | 3 + N6AdminPanel/n6adminpanel/admin_panel.conf | 307 +- N6AdminPanel/n6adminpanel/app.py | 550 +- .../n6adminpanel/mail_notices_helpers.py | 60 + .../n6adminpanel/org_request_helpers.py | 629 + N6AdminPanel/n6adminpanel/patches.py | 11 +- N6AdminPanel/n6adminpanel/static/logo.png | Bin 11724 -> 7689 bytes N6AdminPanel/n6adminpanel/tests/__init__.py | 0 N6AdminPanel/n6adminpanel/tests/test_tools.py | 46 + N6AdminPanel/n6adminpanel/tools.py | 17 +- N6AdminPanel/setup.py | 50 +- N6BrokerAuthApi/n6brokerauthapi/__init__.py | 13 +- .../n6brokerauthapi/auth_stream_api.py | 13 +- .../tests/test_views_with_auth_stream_api.py | 18 +- N6BrokerAuthApi/n6brokerauthapi/views.py | 10 +- N6BrokerAuthApi/setup.py | 40 +- N6Core/n6/__init__.py | 43 +- N6Core/n6/archiver/mysqldb_patch.py | 171 - N6Core/n6/archiver/recorder.py | 278 +- N6Core/n6/base/config.py | 5 +- N6Core/n6/base/queue.py | 258 +- N6Core/n6/collectors/abuse_ch.py | 62 +- N6Core/n6/collectors/generic.py | 254 +- N6Core/n6/data/conf/05_enrich.conf | 14 +- N6Core/n6/data/conf/07_aggregator.conf | 5 + N6Core/n6/data/conf/09_manage.conf | 21 +- N6Core/n6/data/conf/11_jinja_rendering.conf | 59 + N6Core/n6/data/conf/11_mailing.conf | 242 + N6Core/n6/data/conf/21_recorder.conf | 17 +- N6Core/n6/data/conf/70_abuse_ch.conf | 2 + N6Core/n6/data/conf/pipeline.conf | 27 + N6Core/n6/parsers/abuse_ch.py | 28 +- N6Core/n6/parsers/generic.py | 32 +- N6Core/n6/parsers/misp.py | 16 +- N6Core/n6/parsers/packetmail.py | 17 +- .../collectors/_collectors_test_helpers.py | 27 +- N6Core/n6/tests/collectors/test_abuse_ch.py | 316 +- N6Core/n6/tests/collectors/test_generic.py | 779 +- N6Core/n6/tests/parsers/_parser_test_mixin.py | 6 + N6Core/n6/tests/parsers/conftest.py | 6 + N6Core/n6/tests/parsers/test_abuse_ch.py | 48 +- N6Core/n6/tests/parsers/test_generic.py | 46 +- N6Core/n6/tests/parsers/test_packetmail.py | 57 +- N6Core/n6/tests/utils/test_aggregator.py | 740 +- N6Core/n6/tests/utils/test_enrich.py | 1100 +- N6Core/n6/tests/utils/test_filter.py | 63 +- N6Core/n6/utils/aggregator.py | 143 +- N6Core/n6/utils/anonymizer.py | 3 - N6Core/n6/utils/comparator.py | 20 +- N6Core/n6/utils/enrich.py | 116 +- N6Core/n6/utils/filter.py | 17 +- N6Core/n6/utils/management/n6manage.py | 4 +- N6Core/n6/utils/recorder_conf_generator.py | 384 + N6Core/setup.py | 60 +- N6CoreLib/.n6-version | 1 + N6CoreLib/MANIFEST.in | 3 + N6CoreLib/n6corelib/__init__.py | 14 + .../n6corelib}/concurrency_helpers.py | 59 +- .../n6corelib}/email_message.py | 15 +- .../n6corelib}/manage_api/__init__.py | 7 +- .../n6corelib}/manage_api/_ca_env.py | 245 +- .../n6corelib}/manage_api/_manage_api.py | 67 +- .../n6corelib}/pki_related_test_helpers.py | 63 +- N6CoreLib/n6corelib/tests/__init__.py | 0 .../n6corelib}/tests/test_manage_api.py | 211 +- .../n6corelib}/timeout_callback_manager.py | 40 +- .../n6corelib}/x509_helpers.py | 12 +- N6CoreLib/requirements | 10 + N6CoreLib/setup.py | 121 + N6DataPipeline/.n6-version | 1 + N6DataPipeline/MANIFEST.in | 2 + N6DataPipeline/console_scripts | 11 + N6DataPipeline/n6datapipeline/__init__.py | 3 + N6DataPipeline/n6datapipeline/aux/__init__.py | 0 N6DataPipeline/n6datapipeline/base.py | 1790 +++ N6DataPipeline/n6datapipeline/data/.gitkeep | 0 .../n6datapipeline/intelmq/__init__.py | 213 + .../n6datapipeline/intelmq/bots_config.py | 328 + .../n6datapipeline/intelmq/helpers.py | 519 + .../n6datapipeline/intelmq/utils/__init__.py | 0 .../intelmq/utils/intelmq_adapter.py | 80 + .../intelmq/utils/intelmq_converter.py | 494 + .../n6datapipeline/intelmq/wrapper.py | 203 + .../n6datapipeline/tests/__init__.py | 0 .../n6datapipeline/tests/test_stub.py | 2 + N6DataPipeline/setup.py | 89 + N6DataSources/.n6-version | 1 + N6DataSources/MANIFEST.in | 2 + N6DataSources/console_scripts | 2 + N6DataSources/n6datasources/__init__.py | 3 + N6DataSources/n6datasources/data/.gitkeep | 0 N6DataSources/n6datasources/tests/__init__.py | 0 .../n6datasources/tests/test_stub.py | 2 + N6DataSources/setup.py | 89 + N6Lib-py2/.n6-version | 1 + N6Lib-py2/MANIFEST.in | 5 + N6Lib-py2/n6lib/__init__.py | 46 + N6Lib-py2/n6lib/_picklable_objs.py | 5 + N6Lib-py2/n6lib/amqp_getters_pushers.py | 543 + N6Lib-py2/n6lib/amqp_helpers.py | 174 + N6Lib-py2/n6lib/argument_parser.py | 66 + N6Lib-py2/n6lib/auth_api.py | 1882 +++ N6Lib-py2/n6lib/auth_db/__init__.py | 66 + .../n6lib/auth_db/_before_alembic/__init__.py | 37 + .../auth_db/_before_alembic/legacy_models.py | 1714 ++ .../legacy_simplified_validators.py | 327 + .../script_preparing_for_alembic.py | 354 + .../n6lib/auth_db/_ddl_naming_convention.py | 210 + N6Lib-py2/n6lib/auth_db/alembic.ini | 82 + N6Lib-py2/n6lib/auth_db/alembic/README.md | 92 + N6Lib-py2/n6lib/auth_db/alembic/env.py | 85 + .../n6lib/auth_db/alembic/script.py.mako | 24 + ...210c30b4fe6a__initial_alembic_migration.py | 55 + ...official_entity_related_stuff_revamped_.py | 248 + ...41__registrationrequest_ticket_id_added.py | 25 + ..._registrationrequest_orggroup_relation_.py | 31 + ...81__registrationrequest_id_type_changed.py | 150 + ...__orgconfigupdaterequest_added_related_.py | 165 + ...c9__registrationrequest_terms__version_.py | 30 + ...ew_official_entity_fields__alert_email_.py | 83 + ...user_api_key_id_api_key_id_modified_on_.py | 28 + ..._new_stuff_related_to_improved_logging_.py | 95 + ...__registrationrequest_csr_made_nullable.py | 56 + ...able_user_token_replaced_with_web_token.py | 79 + N6Lib-py2/n6lib/auth_db/api.py | 882 ++ N6Lib-py2/n6lib/auth_db/audit_log.py | 816 + N6Lib-py2/n6lib/auth_db/config.py | 363 + N6Lib-py2/n6lib/auth_db/conftest.py | 5 + N6Lib-py2/n6lib/auth_db/fields.py | 312 + N6Lib-py2/n6lib/auth_db/models.py | 2445 +++ N6Lib-py2/n6lib/auth_db/scripts.py | 630 + N6Lib-py2/n6lib/auth_db/sqlalchemy_helpers.py | 163 + N6Lib-py2/n6lib/auth_db/validators.py | 399 + N6Lib-py2/n6lib/class_helpers.py | 272 + N6Lib-py2/n6lib/common_helpers.py | 4215 +++++ N6Lib-py2/n6lib/config.py | 4807 ++++++ N6Lib-py2/n6lib/const.py | 169 + N6Lib-py2/n6lib/context_helpers.py | 1568 ++ N6Lib-py2/n6lib/csv_helpers.py | 60 + .../mail_notice__mfa_config_done__EN.txt | 28 + .../mail_notice__mfa_config_done__PL.txt | 29 + .../mail_notice__mfa_config_erased__EN.txt | 26 + .../mail_notice__mfa_config_erased__PL.txt | 27 + ...l_notice__new_org_and_user_created__EN.txt | 31 + ...l_notice__new_org_and_user_created__PL.txt | 32 + ..._notice__org_config_update_applied__EN.txt | 210 + ..._notice__org_config_update_applied__PL.txt | 211 + ...notice__org_config_update_rejected__EN.txt | 209 + ...notice__org_config_update_rejected__PL.txt | 210 + ...otice__org_config_update_requested__EN.txt | 211 + ...otice__org_config_update_requested__PL.txt | 211 + .../mail_notice__password_reset_done__EN.txt | 25 + .../mail_notice__password_reset_done__PL.txt | 25 + ...l_notice__password_reset_requested__EN.txt | 33 + ...l_notice__password_reset_requested__PL.txt | 33 + N6Lib-py2/n6lib/data_backend_api.py | 1283 ++ N6Lib-py2/n6lib/data_spec/__init__.py | 29 + N6Lib-py2/n6lib/data_spec/_data_spec.py | 1177 ++ N6Lib-py2/n6lib/data_spec/fields.py | 398 + N6Lib-py2/n6lib/data_spec/typing_helpers.py | 12 + N6Lib-py2/n6lib/datetime_helpers.py | 738 + N6Lib-py2/n6lib/db_events.py | 372 + N6Lib-py2/n6lib/db_filtering_abstractions.py | 1494 ++ N6Lib-py2/n6lib/generate_test_events.py | 514 + N6Lib-py2/n6lib/http_helpers.py | 389 + N6Lib-py2/n6lib/jinja_helpers.py | 288 + N6Lib-py2/n6lib/jwt_helpers.py | 126 + N6Lib-py2/n6lib/ldap_api_replacement.py | 1455 ++ N6Lib-py2/n6lib/ldap_related_test_helpers.py | 329 + N6Lib-py2/n6lib/log_helpers.py | 655 + N6Lib-py2/n6lib/mail_notices_api.py | 669 + N6Lib-py2/n6lib/mail_sending_api.py | 661 + N6Lib-py2/n6lib/pyramid_commons/__init__.py | 80 + .../pyramid_commons/_config_converters.py | 125 + .../pyramid_commons/_generic_view_mixins.py | 401 + .../n6lib/pyramid_commons/_pyramid_commons.py | 1715 ++ .../n6lib/pyramid_commons/_tween_factories.py | 157 + .../n6lib/pyramid_commons/data_spec_fields.py | 293 + .../n6lib/pyramid_commons/mfa_helpers.py | 94 + N6Lib-py2/n6lib/pyramid_commons/renderers.py | 342 + .../pyramid_commons/web_token_helpers.py | 28 + N6Lib-py2/n6lib/record_dict.py | 1007 ++ N6Lib-py2/n6lib/rt_client_api.py | 277 + .../n6lib/sqlalchemy_related_test_helpers.py | 20 + .../structured_data_conversion/__init__.py | 16 + .../structured_data_conversion/converters.py | 1268 ++ .../structured_data_conversion/exceptions.py | 186 + .../structured_data_conversion/interfaces.py | 602 + .../namespace_conversion_helpers.py | 349 + N6Lib-py2/n6lib/tests/__init__.py | 0 .../IMPORTANT.txt | 2 + ...t---n6-service-ca-af2f68651a16f6567e07.pem | 32 + ...r---n6-service-ca-af2f68651a16f6567e07.pem | 27 + ...t---n6-service-ca-765496b0d44901863497.pem | 32 + ...r---n6-service-ca-765496b0d44901863497.pem | 27 + ...t---n6-service-ca-00000000000000000018.pem | 31 + ...r---n6-service-ca-00000000000000000018.pem | 27 + ...y---n6-service-ca-00000000000000000018.pem | 51 + ...rt---n6-client-ca-0000000000000000abcd.pem | 31 + ...sr---n6-client-ca-0000000000000000abcd.pem | 27 + ...ey---n6-client-ca-0000000000000000abcd.pem | 51 + ...rt---n6-client-ca-000000000000000eabcd.pem | 31 + ...sr---n6-client-ca-000000000000000eabcd.pem | 26 + .../ca-cert-n6-client-ca.pem | 30 + .../ca-cert-n6-service-ca.pem | 30 + .../ca-cert-root.pem | 29 + .../ca-config-n6-client-ca.cnf | 29 + .../ca-config-n6-service-ca.cnf | 34 + .../ca-csr-n6-client-ca.pem | 27 + .../ca-csr-n6-service-ca.pem | 27 + .../ca-key-n6-client-ca.pem | 51 + .../ca-key-n6-service-ca.pem | 51 + .../ca-key-root.pem | 51 + ...t---n6-service-ca-9956a34b77371f3931c1.pem | 31 + ...r---n6-service-ca-9956a34b77371f3931c1.pem | 27 + ...t---n6-service-ca-f9962d93676e439cdcb5.pem | 31 + ...r---n6-service-ca-f9962d93676e439cdcb5.pem | 27 + ...t---n6-service-ca-ce0c519c49fd5659271d.pem | 31 + ...r---n6-service-ca-ce0c519c49fd5659271d.pem | 27 + ...t---n6-service-ca-5b2637aaa005c88856d9.pem | 32 + ...r---n6-service-ca-5b2637aaa005c88856d9.pem | 27 + .../empty-ca-sect-config---n6-client-ca.cnf | 28 + ...ntCA_policy-sect-config---n6-client-ca.cnf | 28 + ...eCA_policy-sect-config---n6-service-ca.cnf | 31 + ...th-wrong-user-cert---n6-client-ca-1234.pem | 31 + ...ith-wrong-user-csr---n6-client-ca-1234.pem | 27 + ...ith-wrong-user-key---n6-client-ca-1234.pem | 51 + .../missing-ca-sect-config---n6-client-ca.cnf | 25 + ...-certificate-opt-config---n6-client-ca.cnf | 15 + ..._extensions-sect-config---n6-client-ca.cnf | 21 + ...ntCA_policy-sect-config---n6-client-ca.cnf | 24 + ...g-default_ca-opt-config---n6-client-ca.cnf | 15 + ...lt_crl_days-opt-config---n6-service-ca.cnf | 13 + ...default_days-opt-config---n6-client-ca.cnf | 27 + ...g-default_md-opt-config---n6-client-ca.cnf | 27 + ...-default_md-opt-config---n6-service-ca.cnf | 13 + .../missing-dir-opt-config---n6-client-ca.cnf | 15 + ...missing-identifier-keys---n6-client-ca.pem | 25 + ...ssing-policy-opt-config---n6-client-ca.cnf | 20 + ...eCA_policy-sect-config---n6-service-ca.cnf | 29 + ...t---n6-service-ca-fedcba12345678000000.pem | 31 + ...r---n6-service-ca-fedcba12345678000000.pem | 26 + ...rt---n6-client-ca-00000000000000001200.pem | 31 + ...sr---n6-client-ca-00000000000000001200.pem | 26 + ...t---n6-service-ca-1a26b67f5df2e5ba3eba.pem | 31 + ...r---n6-service-ca-1a26b67f5df2e5ba3eba.pem | 26 + ...t---n6-service-ca-f908c0489127701717b4.pem | 31 + ...r---n6-service-ca-f908c0489127701717b4.pem | 27 + ...rt---n6-client-ca-c55fd65ffe0671c4ba19.pem | 31 + ...sr---n6-client-ca-c55fd65ffe0671c4ba19.pem | 26 + ...rt---n6-client-ca-48a43f0059fbc1eb82b2.pem | 31 + ...sr---n6-client-ca-48a43f0059fbc1eb82b2.pem | 26 + ...rt---n6-client-ca-e61753a2f8e887770288.pem | 32 + ...sr---n6-client-ca-e61753a2f8e887770288.pem | 26 + ...t---n6-service-ca-00000000000000123456.pem | 31 + ...r---n6-service-ca-00000000000000123456.pem | 27 + ...r---n6-service-ca-2a64f0eee4ce12a2bdc9.pem | 16 + .../test_for_n6_manage_admin.csr | 16 + .../test_for_n6_manage_client.csr | 16 + .../test_for_n6_manage_component-inner.csr | 17 + .../test_for_n6_manage_component-outer.csr | 17 + .../test_for_n6_manage_private.key | 27 + ...t---n6-service-ca-a1717cc76c11b4b84faf.pem | 32 + ...r---n6-service-ca-a1717cc76c11b4b84faf.pem | 27 + N6Lib-py2/n6lib/threaded_async.py | 662 + N6Lib-py2/n6lib/typing_helpers.py | 67 + N6Lib-py2/n6lib/unit_test_helpers.py | 1380 ++ N6Lib-py2/n6lib/unpacking_helpers.py | 99 + N6Lib-py2/n6lib/url_helpers.py | 794 + N6Lib-py2/requirements | 36 + N6Lib-py2/setup.py | 125 + N6Lib/.n6sdk-version | 1 - N6Lib/MANIFEST.in | 8 +- N6Lib/n6lib/__init__.py | 91 +- N6Lib/n6lib/_picklable_objs.py | 4 +- N6Lib/n6lib/amqp_getters_pushers.py | 74 +- N6Lib/n6lib/amqp_helpers.py | 150 +- N6Lib/n6lib/argument_parser.py | 4 +- N6Lib/n6lib/auth_api.py | 1797 ++- N6Lib/n6lib/auth_db/__init__.py | 58 +- .../n6lib/auth_db/_before_alembic/__init__.py | 37 + .../auth_db/_before_alembic/legacy_models.py | 1706 ++ .../legacy_simplified_validators.py | 312 + .../script_preparing_for_alembic.py | 351 + N6Lib/n6lib/auth_db/_ddl_naming_convention.py | 205 + N6Lib/n6lib/auth_db/alembic.ini | 82 + N6Lib/n6lib/auth_db/alembic/README.md | 92 + N6Lib/n6lib/auth_db/alembic/env.py | 85 + N6Lib/n6lib/auth_db/alembic/script.py.mako | 24 + ...210c30b4fe6a__initial_alembic_migration.py | 55 + ...official_entity_related_stuff_revamped_.py | 248 + ...41__registrationrequest_ticket_id_added.py | 25 + ..._registrationrequest_orggroup_relation_.py | 31 + ...81__registrationrequest_id_type_changed.py | 150 + ...__orgconfigupdaterequest_added_related_.py | 165 + ...c9__registrationrequest_terms__version_.py | 30 + ...ew_official_entity_fields__alert_email_.py | 83 + ...user_api_key_id_api_key_id_modified_on_.py | 28 + ..._new_stuff_related_to_improved_logging_.py | 95 + ...__registrationrequest_csr_made_nullable.py | 56 + ...able_user_token_replaced_with_web_token.py | 79 + N6Lib/n6lib/auth_db/api.py | 800 +- N6Lib/n6lib/auth_db/audit_log.py | 205 +- N6Lib/n6lib/auth_db/config.py | 107 +- N6Lib/n6lib/auth_db/conftest.py | 5 + N6Lib/n6lib/auth_db/fields.py | 136 +- N6Lib/n6lib/auth_db/models.py | 1192 +- N6Lib/n6lib/auth_db/scripts.py | 394 +- N6Lib/n6lib/auth_db/sqlalchemy_helpers.py | 157 + N6Lib/n6lib/auth_db/validators.py | 169 +- N6Lib/n6lib/auth_related_test_helpers.py | 26 +- N6Lib/n6lib/class_helpers.py | 173 +- N6Lib/n6lib/common_helpers.py | 1865 ++- N6Lib/n6lib/config.py | 807 +- N6Lib/n6lib/const.py | 21 +- N6Lib/n6lib/context_helpers.py | 157 +- N6Lib/n6lib/csv_helpers.py | 54 +- .../mail_notice__mfa_config_done__EN.txt | 28 + .../mail_notice__mfa_config_done__PL.txt | 29 + .../mail_notice__mfa_config_erased__EN.txt | 26 + .../mail_notice__mfa_config_erased__PL.txt | 27 + ...l_notice__new_org_and_user_created__EN.txt | 31 + ...l_notice__new_org_and_user_created__PL.txt | 32 + ..._notice__org_config_update_applied__EN.txt | 210 + ..._notice__org_config_update_applied__PL.txt | 211 + ...notice__org_config_update_rejected__EN.txt | 209 + ...notice__org_config_update_rejected__PL.txt | 210 + ...otice__org_config_update_requested__EN.txt | 211 + ...otice__org_config_update_requested__PL.txt | 211 + .../mail_notice__password_reset_done__EN.txt | 25 + .../mail_notice__password_reset_done__PL.txt | 25 + ...l_notice__password_reset_requested__EN.txt | 33 + ...l_notice__password_reset_requested__PL.txt | 33 + N6Lib/n6lib/data_backend_api.py | 1560 +- N6Lib/n6lib/data_spec/__init__.py | 2 - N6Lib/n6lib/data_spec/_data_spec.py | 83 +- N6Lib/n6lib/data_spec/fields.py | 66 +- N6Lib/n6lib/data_spec/typing_helpers.py | 12 + N6Lib/n6lib/datetime_helpers.py | 692 +- N6Lib/n6lib/db_events.py | 191 +- N6Lib/n6lib/db_filtering_abstractions.py | 58 +- N6Lib/n6lib/generate_test_events.py | 33 +- N6Lib/n6lib/http_helpers.py | 82 +- N6Lib/n6lib/jinja_helpers.py | 286 + N6Lib/n6lib/jwt_helpers.py | 122 + N6Lib/n6lib/ldap_api_replacement.py | 438 +- N6Lib/n6lib/ldap_related_test_helpers.py | 18 +- N6Lib/n6lib/log_helpers.py | 180 +- N6Lib/n6lib/mail_notices_api.py | 666 + N6Lib/n6lib/mail_sending_api.py | 569 + N6Lib/n6lib/pyramid_commons/__init__.py | 59 +- .../pyramid_commons/_config_converters.py | 123 + .../pyramid_commons/_generic_view_mixins.py | 394 + .../n6lib/pyramid_commons/_pyramid_commons.py | 1543 +- .../n6lib/pyramid_commons/_tween_factories.py | 157 + .../n6lib/pyramid_commons/data_spec_fields.py | 292 + N6Lib/n6lib/pyramid_commons/mfa_helpers.py | 90 + N6Lib/n6lib/pyramid_commons/renderers.py | 252 +- .../pyramid_commons/web_token_helpers.py | 28 + N6Lib/n6lib/record_dict.py | 189 +- N6Lib/n6lib/rt_client_api.py | 268 + .../n6lib/sqlalchemy_related_test_helpers.py | 10 +- .../structured_data_conversion/__init__.py | 16 + .../structured_data_conversion/converters.py | 1250 ++ .../structured_data_conversion/exceptions.py | 179 + .../structured_data_conversion/interfaces.py | 601 + .../namespace_conversion_helpers.py | 328 + ...quicktest.py => auth_related_quicktest.py} | 564 +- ...t---n6-service-ca-00000000000000000018.pem | 31 + ...r---n6-service-ca-00000000000000000018.pem | 27 + ...y---n6-service-ca-00000000000000000018.pem | 51 + .../empty-ca-sect-config---n6-client-ca.cnf | 28 + ...ntCA_policy-sect-config---n6-client-ca.cnf | 28 + ...eCA_policy-sect-config---n6-service-ca.cnf | 31 + .../missing-ca-sect-config---n6-client-ca.cnf | 25 + ...-certificate-opt-config---n6-client-ca.cnf | 15 + ..._extensions-sect-config---n6-client-ca.cnf | 21 + ...ntCA_policy-sect-config---n6-client-ca.cnf | 24 + ...g-default_ca-opt-config---n6-client-ca.cnf | 15 + ...lt_crl_days-opt-config---n6-service-ca.cnf | 13 + ...default_days-opt-config---n6-client-ca.cnf | 27 + ...g-default_md-opt-config---n6-client-ca.cnf | 27 + ...-default_md-opt-config---n6-service-ca.cnf | 13 + .../missing-dir-opt-config---n6-client-ca.cnf | 15 + ...missing-identifier-keys---n6-client-ca.pem | 25 + ...ssing-policy-opt-config---n6-client-ca.cnf | 20 + ...eCA_policy-sect-config---n6-service-ca.cnf | 29 + .../test_for_n6_manage_admin.csr | 16 + .../test_for_n6_manage_client.csr | 16 + .../test_for_n6_manage_component-inner.csr | 17 + .../test_for_n6_manage_component-outer.csr | 17 + .../test_for_n6_manage_private.key | 27 + .../n6lib/tests/test_amqp_getters_pushers.py | 83 +- N6Lib/n6lib/tests/test_amqp_helpers.py | 29 +- N6Lib/n6lib/tests/test_argument_parser.py | 2 +- N6Lib/n6lib/tests/test_auth_api.py | 334 +- N6Lib/n6lib/tests/test_auth_db_models.py | 202 +- N6Lib/n6lib/tests/test_auth_db_validation.py | 343 +- N6Lib/n6lib/tests/test_class_helpers.py | 10 +- N6Lib/n6lib/tests/test_common_helpers.py | 227 +- N6Lib/n6lib/tests/test_config.py | 302 +- N6Lib/n6lib/tests/test_data_backend_api.py | 287 +- N6Lib/n6lib/tests/test_data_spec.py | 36 +- N6Lib/n6lib/tests/test_data_spec_fields.py | 224 +- N6Lib/n6lib/tests/test_db_events.py | 42 +- .../n6lib/tests/test_generate_test_events.py | 14 +- N6Lib/n6lib/tests/test_log_helpers.py | 81 +- N6Lib/n6lib/tests/test_pyramid_commons.py | 858 +- .../tests/test_pyramid_commons_renderers.py | 395 +- N6Lib/n6lib/tests/test_record_dict.py | 1059 +- N6Lib/n6lib/tests/test_unit_test_helpers.py | 5 +- N6Lib/n6lib/tests/test_unpacking_helpers.py | 77 +- N6Lib/n6lib/threaded_async.py | 667 + N6Lib/n6lib/transaction_helpers.py | 118 - N6Lib/n6lib/typing_helpers.py | 59 +- N6Lib/n6lib/unit_test_helpers.py | 289 +- N6Lib/n6lib/unpacking_helpers.py | 79 +- N6Lib/n6lib/url_helpers.py | 443 +- N6Lib/requirements | 40 +- N6Lib/setup.py | 71 +- N6Portal/development.ini | 609 +- N6Portal/gui/package-lock.json | 4004 +++-- N6Portal/gui/package.json | 45 +- N6Portal/gui/src/App.vue | 108 +- N6Portal/gui/src/components/BaseCriterion.vue | 104 + N6Portal/gui/src/components/DashboardPage.vue | 524 + .../gui/src/components/EditConfigPage.vue | 14 + N6Portal/gui/src/components/ErrorPage.vue | 2 +- N6Portal/gui/src/components/FileCriterion.vue | 81 + N6Portal/gui/src/components/InfoPage.vue | 116 + .../gui/src/components/InputActionButton.vue | 62 + .../gui/src/components/InputActionButtons.vue | 27 + .../gui/src/components/InputAddButton.vue | 60 + .../gui/src/components/InputDeleteButton.vue | 57 + N6Portal/gui/src/components/InputLabel.vue | 69 + N6Portal/gui/src/components/LangControls.vue | 39 + N6Portal/gui/src/components/LoginPage.vue | 77 +- .../gui/src/components/MultiCriterion.vue | 24 + .../gui/src/components/MultiValueGroup.vue | 48 + .../gui/src/components/RadioCriterion.vue | 53 + N6Portal/gui/src/components/RegisterForm.vue | 285 + N6Portal/gui/src/components/RegisterPage.vue | 22 + .../gui/src/components/RegisterTermsBox.vue | 124 + N6Portal/gui/src/components/SearchForm.vue | 18 +- .../src/components/SearchPageSearchType.vue | 36 +- .../src/components/SearchResultsDisplay.vue | 9 +- N6Portal/gui/src/components/TheHeader.vue | 1 - .../src/components/TheHeaderNavigation.vue | 147 +- .../gui/src/components/UserConfigTable.vue | 274 + .../components/edit_config_form/BaseField.vue | 54 + .../edit_config_form/CheckboxField.vue | 107 + .../edit_config_form/DeletedInputsLabel.vue | 45 + .../edit_config_form/DeletedMultiCriteria.vue | 120 + .../edit_config_form/EditConfigForm.vue | 673 + .../InputDeleteFromStateButton.vue | 75 + .../edit_config_form/MultiField.vue | 54 + .../edit_config_form/MultiValueFieldGroup.vue | 135 + .../edit_config_form/RadioField.vue | 104 + .../edit_config_form/TextAreaField.vue | 54 + N6Portal/gui/src/config/config.json | 11 +- .../gui/src/config/dashboardCategories.js | 141 + N6Portal/gui/src/config/registerCriteria.js | 121 + N6Portal/gui/src/config/searchCriteria.js | 12 +- N6Portal/gui/src/config/userConfigCriteria.js | 99 + N6Portal/gui/src/helpers/constants.js | 20 + N6Portal/gui/src/helpers/lang.js | 39 + N6Portal/gui/src/helpers/validators.js | 41 + N6Portal/gui/src/images/lang_en.png | Bin 0 -> 599 bytes N6Portal/gui/src/images/lang_pl.png | Bin 0 -> 374 bytes .../locales/EN/edit_config_form_fields.json | 42 + .../gui/src/locales/EN/error_messages.json | 4 + .../gui/src/locales/EN/register_form.json | 11 + .../src/locales/EN/register_form_fields.json | 45 + .../gui/src/locales/EN/register_terms.json | 17 + .../gui/src/locales/EN/settings_form.json | 6 + .../locales/EN/validation_error_messages.json | 19 + .../locales/PL/edit_config_form_fields.json | 42 + .../gui/src/locales/PL/error_messages.json | 4 + .../gui/src/locales/PL/register_form.json | 9 + .../src/locales/PL/register_form_fields.json | 45 + .../gui/src/locales/PL/register_terms.json | 17 + .../gui/src/locales/PL/settings_form.json | 5 + .../locales/PL/validation_error_messages.json | 19 + N6Portal/gui/src/main.js | 9 +- .../gui/src/mixins/EditSettingsFormMixin.js | 40 + N6Portal/gui/src/mixins/LangSetMixin.js | 23 + .../gui/src/mixins/ValidationErrorMessages.js | 71 +- N6Portal/gui/src/router/index.js | 118 +- N6Portal/gui/src/store/dashboard.js | 90 + N6Portal/gui/src/store/form.js | 49 + N6Portal/gui/src/store/index.js | 8 + N6Portal/gui/src/store/lang.js | 48 + N6Portal/gui/src/store/session.js | 9 + N6Portal/gui/src/store/user.js | 171 + N6Portal/gui/src/styles/_values.scss | 18 + N6Portal/gui/src/styles/base.scss | 19 + N6Portal/gui/src/styles/flash-message.scss | 3 - N6Portal/n6portal/__init__.py | 195 +- N6Portal/n6portal/tests/__init__.py | 0 N6Portal/n6portal/tests/test_n6portal.py | 10 + N6Portal/production.ini | 623 +- N6Portal/react_app/.env.json | 8 + N6Portal/react_app/README.md | 16 + N6Portal/react_app/config/config_app.js | 378 + N6Portal/react_app/config/locale/en/tos.json | 11 + N6Portal/react_app/config/locale/pl/tos.json | 11 + N6Portal/react_app/config/run_app_server.js | 139 + N6Portal/react_app/config/schema/config.json | 4 + N6Portal/react_app/config/schema/en/tos.json | 7 + N6Portal/react_app/config/schema/pl/tos.json | 7 + N6Portal/react_app/config/static/app.js | 124 + N6Portal/react_app/config/static/style.css | 119 + N6Portal/react_app/config/views/index.pug | 71 + N6Portal/react_app/config/views/terms.pug | 161 + N6Portal/react_app/deploy.sh | 22 + N6Portal/react_app/package.json | 157 + .../favicons/android-chrome-192x192.png | Bin 0 -> 1531 bytes .../favicons/android-chrome-512x512.png | Bin 0 -> 3566 bytes .../public/favicons/apple-touch-icon.png | Bin 0 -> 1442 bytes .../public/favicons/browserconfig.xml | 9 + .../public/favicons/favicon-16x16.png | Bin 0 -> 588 bytes .../public/favicons/favicon-32x32.png | Bin 0 -> 741 bytes .../react_app/public/favicons/favicon.ico | Bin 0 -> 7406 bytes .../public/favicons/mstile-150x150.png | Bin 0 -> 1370 bytes .../public/favicons/safari-pinned-tab.svg | 31 + .../public/favicons/site.webmanifest | 19 + N6Portal/react_app/public/index.html | 112 + N6Portal/react_app/public/robots.txt | 3 + N6Portal/react_app/src/App.tsx | 57 + N6Portal/react_app/src/api/auth/index.ts | 123 + N6Portal/react_app/src/api/auth/types.ts | 18 + N6Portal/react_app/src/api/index.ts | 39 + N6Portal/react_app/src/api/orgConfig/index.ts | 28 + N6Portal/react_app/src/api/orgConfig/types.ts | 27 + N6Portal/react_app/src/api/register/index.ts | 9 + .../src/api/services/dashboard/index.ts | 19 + .../src/api/services/dashboard/types.ts | 7 + .../react_app/src/api/services/globalTypes.ts | 122 + .../react_app/src/api/services/info/index.ts | 32 + .../react_app/src/api/services/info/types.ts | 35 + .../src/api/services/report/index.ts | 40 + .../src/api/services/search/index.ts | 19 + .../components/errors/ApiLoaderFallback.tsx | 16 + .../errors/ErrorBoundaryFallback.tsx | 24 + .../src/components/errors/ErrorPage.tsx | 43 + .../src/components/forms/FormCheckbox.tsx | 104 + .../src/components/forms/FormDevTools.tsx | 16 + .../src/components/forms/FormFeedback.tsx | 26 + .../src/components/forms/FormFileUpload.tsx | 97 + .../src/components/forms/FormFilterInput.tsx | 149 + .../src/components/forms/FormInput.tsx | 167 + .../components/forms/FormInputReadonly.tsx | 28 + .../src/components/forms/FormRadio.tsx | 109 + .../forms/FormRenderCharCounter.tsx | 29 + .../components/forms/FormRenderErrorMsg.tsx | 41 + .../forms/FormRenderSelectedFile.tsx | 30 + .../src/components/forms/FormSelect.tsx | 99 + .../forms/datePicker/DatePicker.tsx | 158 + .../forms/datePicker/DatePickerCalendar.tsx | 152 + .../components/forms/datePicker/TimeInput.tsx | 50 + .../react_app/src/components/forms/utils.ts | 53 + .../forms/validation/validationRegexp.ts | 23 + .../forms/validation/validationSchema.ts | 96 + .../forms/validation/validationTypes.ts | 25 + .../components/forms/validation/validators.ts | 194 + .../src/components/layout/Footer.tsx | 19 + .../src/components/layout/Header.tsx | 66 + .../src/components/loading/ApiLoader.tsx | 37 + .../src/components/loading/AsyncLoader.tsx | 7 + .../src/components/loading/Loader.tsx | 16 + .../navigation/MobileNavigation.tsx | 46 + .../navigation/UserMenuNavigation.tsx | 74 + .../src/components/pages/account/Account.tsx | 246 + .../pages/editSettings/EditSettings.tsx | 16 + .../editSettings/EditSettingsFieldArray.tsx | 134 + .../pages/editSettings/EditSettingsForm.tsx | 353 + .../components/pages/editSettings/utils.ts | 95 + .../pages/forgotPassword/ForgotPassword.tsx | 39 + .../forgotPassword/ForgotPasswordError.tsx | 31 + .../forgotPassword/ForgotPasswordForm.tsx | 102 + .../forgotPassword/ForgotPasswordSuccess.tsx | 31 + .../forgotPassword/ResetPasswordError.tsx | 33 + .../forgotPassword/ResetPasswordForm.tsx | 117 + .../forgotPassword/ResetPasswordSuccess.tsx | 35 + .../components/pages/forgotPassword/utils.ts | 20 + .../components/pages/incidents/Incidents.tsx | 250 + .../pages/incidents/IncidentsFilter.tsx | 99 + .../pages/incidents/IncidentsForm.tsx | 132 + .../pages/incidents/IncidentsNoResources.tsx | 15 + .../src/components/pages/incidents/utils.ts | 169 + .../src/components/pages/login/Login.tsx | 31 + .../pages/login/LoginConfigMfaError.tsx | 27 + .../pages/login/LoginConfigMfaForm.tsx | 110 + .../pages/login/LoginConfigMfaSuccess.tsx | 51 + .../src/components/pages/login/LoginForm.tsx | 114 + .../components/pages/login/LoginMfaError.tsx | 27 + .../components/pages/login/LoginMfaForm.tsx | 100 + .../components/pages/noAccess/NoAccess.tsx | 21 + .../components/pages/notFound/NotFound.tsx | 21 + .../pages/organization/Organization.tsx | 51 + .../pages/organization/OrganizationCard.tsx | 31 + .../pages/organization/OrganizationHeader.tsx | 57 + .../src/components/pages/signUp/SignUp.tsx | 50 + .../components/pages/signUp/SignUpButtons.tsx | 39 + .../pages/signUp/SignUpFieldArray.tsx | 63 + .../components/pages/signUp/SignUpStepOne.tsx | 89 + .../components/pages/signUp/SignUpStepTwo.tsx | 175 + .../components/pages/signUp/SignUpSuccess.tsx | 25 + .../components/pages/signUp/SignUpWizard.tsx | 12 + .../pages/userSettings/UserSettings.tsx | 18 + .../pages/userSettings/UserSettingsApiKey.tsx | 31 + .../userSettings/UserSettingsApiKeyForm.tsx | 131 + .../UserSettingsConfirmationModal.tsx | 43 + .../pages/userSettings/UserSettingsMfa.tsx | 17 + .../UserSettingsMfaConfiguration.tsx | 62 + .../userSettings/UserSettingsMfaEdit.tsx | 63 + .../UserSettingsMfaConfigError.tsx | 24 + .../UserSettingsMfaConfigForm.tsx | 117 + .../UserSettingsMfaConfigSuccess.tsx | 24 + .../src/components/shared/ColumnFilter.tsx | 68 + .../src/components/shared/CustomButton.tsx | 65 + .../src/components/shared/ExportCSV.tsx | 113 + .../src/components/shared/ExportJSON.tsx | 48 + .../src/components/shared/LanguagePicker.tsx | 38 + .../src/components/shared/LoadingSpinner.tsx | 31 + .../src/components/shared/MfaQRCode.tsx | 37 + .../react_app/src/components/shared/Table.tsx | 145 + .../src/components/shared/Tooltip.tsx | 43 + .../src/components/shared/TrimmedUrl.tsx | 41 + .../src/components/shared/VirtualizedList.tsx | 25 + .../shared/customSelect/Components.tsx | 28 + .../shared/customSelect/CustomSelect.tsx | 129 + .../react_app/src/context/AuthContext.tsx | 89 + .../src/context/ForgotPasswordContext.tsx | 48 + .../src/context/LanguageProvider.tsx | 50 + .../react_app/src/context/LoginContext.tsx | 50 + .../src/context/MatchMediaContext.tsx | 47 + .../src/context/UserSettingsMfaContext.tsx | 54 + N6Portal/react_app/src/dictionary/index.ts | 733 + N6Portal/react_app/src/images/api-error.svg | 13 + N6Portal/react_app/src/images/appointment.svg | 14 + N6Portal/react_app/src/images/arrow_ico.svg | 3 + N6Portal/react_app/src/images/avatar.svg | 24 + N6Portal/react_app/src/images/calendar.svg | 9 + N6Portal/react_app/src/images/check-ico.svg | 10 + N6Portal/react_app/src/images/chevron.svg | 5 + N6Portal/react_app/src/images/close.svg | 9 + .../react_app/src/images/compress-ico.svg | 1 + N6Portal/react_app/src/images/email.svg | 24 + N6Portal/react_app/src/images/en-icon.svg | 30 + N6Portal/react_app/src/images/error.svg | 1 + N6Portal/react_app/src/images/error_ico.svg | 1 + N6Portal/react_app/src/images/expand-ico.svg | 1 + N6Portal/react_app/src/images/hierarchy.svg | 5 + N6Portal/react_app/src/images/logo_n6.svg | 12 + .../react_app/src/images/no-access-icon.svg | 7 + .../react_app/src/images/no-resources.svg | 1 + .../react_app/src/images/not-found-icon.svg | 24 + N6Portal/react_app/src/images/ok.svg | 1 + N6Portal/react_app/src/images/pl-icon.svg | 9 + N6Portal/react_app/src/images/plus.svg | 6 + .../react_app/src/images/question_mark.svg | 4 + N6Portal/react_app/src/images/reset.svg | 5 + N6Portal/react_app/src/images/restore.svg | 3 + N6Portal/react_app/src/images/success_ico.svg | 1 + N6Portal/react_app/src/images/update.svg | 14 + .../src/images/user-settings-api-key.svg | 8 + .../src/images/user-settings-mfa.svg | 15 + N6Portal/react_app/src/images/user.svg | 6 + N6Portal/react_app/src/index.tsx | 19 + N6Portal/react_app/src/react-app-env.d.ts | 1 + N6Portal/react_app/src/reportWebVitals.ts | 15 + .../react_app/src/routes/PrivateRoute.tsx | 22 + N6Portal/react_app/src/routes/Routes.tsx | 58 + .../react_app/src/routes/RoutesProvider.tsx | 23 + N6Portal/react_app/src/routes/routeList.ts | 15 + N6Portal/react_app/src/routes/types.ts | 8 + N6Portal/react_app/src/setupTests.ts | 5 + N6Portal/react_app/src/styles/_account.scss | 70 + N6Portal/react_app/src/styles/_buttons.scss | 125 + .../react_app/src/styles/_columnFilter.scss | 39 + .../react_app/src/styles/_customCheckbox.scss | 144 + .../react_app/src/styles/_customSelect.scss | 140 + .../react_app/src/styles/_datePicker.scss | 152 + .../react_app/src/styles/_editSettings.scss | 246 + N6Portal/react_app/src/styles/_error.scss | 63 + N6Portal/react_app/src/styles/_footer.scss | 4 + .../react_app/src/styles/_formFields.scss | 334 + N6Portal/react_app/src/styles/_functions.scss | 3 + N6Portal/react_app/src/styles/_header.scss | 74 + N6Portal/react_app/src/styles/_incidents.scss | 326 + N6Portal/react_app/src/styles/_layout.scss | 193 + N6Portal/react_app/src/styles/_login.scss | 239 + N6Portal/react_app/src/styles/_mixins.scss | 39 + .../react_app/src/styles/_organization.scss | 143 + .../react_app/src/styles/_reactTable.scss | 220 + N6Portal/react_app/src/styles/_signup.scss | 203 + N6Portal/react_app/src/styles/_spinner.scss | 46 + N6Portal/react_app/src/styles/_tooltip.scss | 24 + .../react_app/src/styles/_userSettings.scss | 139 + N6Portal/react_app/src/styles/_variables.scss | 63 + N6Portal/react_app/src/styles/style.scss | 30 + .../src/types/react-table-config.d.ts | 88 + .../react_app/src/utils/convertFormData.ts | 15 + .../src/utils/copyTextToClipboard.ts | 10 + .../react_app/src/utils/getScrollbarWidth.ts | 9 + N6Portal/react_app/src/utils/isObject.ts | 3 + N6Portal/react_app/src/utils/language.ts | 13 + N6Portal/react_app/src/utils/noop.ts | 2 + .../react_app/src/utils/parseRegisterData.ts | 13 + .../react_app/src/utils/parseResponseData.ts | 22 + .../react_app/src/utils/storageAvailable.ts | 11 + N6Portal/react_app/src/utils/trimUrl.ts | 4 + N6Portal/react_app/tsconfig.json | 25 + N6Portal/react_app/yarn.lock | 12973 ++++++++++++++++ N6Portal/setup.py | 52 +- N6RestApi/n6web/__init__.py | 39 +- .../n6web/tests/{test.py => test_n6web.py} | 120 +- N6RestApi/production.ini | 11 + N6RestApi/setup.py | 58 +- N6SDK-py2/.n6-version | 1 + N6SDK-py2/ACKNOWLEDGEMENTS.txt | 7 + N6SDK-py2/MANIFEST.in | 4 + N6SDK-py2/NEWS.rst | 661 + N6SDK-py2/README.rst | 117 + N6SDK-py2/docs/Makefile | 189 + N6SDK-py2/docs/source/_static/.gitkeep | 0 N6SDK-py2/docs/source/_templates/.gitkeep | 0 N6SDK-py2/docs/source/api_test_tool.rst | 111 + N6SDK-py2/docs/source/conf.py | 332 + N6SDK-py2/docs/source/front_matter.rst | 1 + N6SDK-py2/docs/source/index.rst | 61 + N6SDK-py2/docs/source/lib_basic/data_spec.rst | 5 + .../source/lib_basic/data_spec_fields.rst | 5 + .../docs/source/lib_basic/exceptions.rst | 6 + .../docs/source/lib_basic/pyramid_commons.rst | 5 + .../lib_basic/pyramid_commons_renderers.rst | 5 + .../docs/source/lib_helpers/addr_helpers.rst | 4 + .../docs/source/lib_helpers/class_helpers.rst | 4 + .../source/lib_helpers/datetime_helpers.rst | 4 + .../source/lib_helpers/encoding_helpers.rst | 4 + N6SDK-py2/docs/source/lib_helpers/regexes.rst | 4 + N6SDK-py2/docs/source/library_reference.rst | 43 + N6SDK-py2/docs/source/release_notes.rst | 5 + N6SDK-py2/docs/source/tutorial.rst | 3127 ++++ N6SDK-py2/n6sdk/__init__.py | 17 + N6SDK-py2/n6sdk/_api_test_tool/__init__.py | 0 .../n6sdk/_api_test_tool/api_test_tool.py | 308 + N6SDK-py2/n6sdk/_api_test_tool/client.py | 72 + .../n6sdk/_api_test_tool/config_base.ini | 27 + N6SDK-py2/n6sdk/_api_test_tool/data_test.py | 42 + N6SDK-py2/n6sdk/_api_test_tool/report.py | 67 + .../_api_test_tool/validator_exceptions.py | 9 + N6SDK-py2/n6sdk/addr_helpers.py | 45 + N6SDK-py2/n6sdk/class_helpers.py | 301 + N6SDK-py2/n6sdk/data_spec/__init__.py | 67 + N6SDK-py2/n6sdk/data_spec/_data_spec.py | 1101 ++ N6SDK-py2/n6sdk/data_spec/fields.py | 1313 ++ N6SDK-py2/n6sdk/data_spec/utils.py | 120 + N6SDK-py2/n6sdk/datetime_helpers.py | 595 + N6SDK-py2/n6sdk/encoding_helpers.py | 720 + N6SDK-py2/n6sdk/exceptions.py | 492 + N6SDK-py2/n6sdk/pyramid_commons/__init__.py | 63 + .../n6sdk/pyramid_commons/_pyramid_commons.py | 1132 ++ N6SDK-py2/n6sdk/pyramid_commons/renderers.py | 273 + N6SDK-py2/n6sdk/regexes.py | 278 + N6SDK-py2/requirements | 8 + N6SDK-py2/setup.py | 86 + N6SDK/.n6-version | 1 + N6SDK/.n6sdk-version | 1 - N6SDK/MANIFEST.in | 5 +- N6SDK/NEWS.rst | 41 +- N6SDK/README.rst | 117 + N6SDK/docs/source/api_test_tool.rst | 21 + N6SDK/docs/source/conf.py | 46 +- N6SDK/docs/source/index.rst | 21 + N6SDK/docs/source/library_reference.rst | 20 + N6SDK/docs/source/tutorial.rst | 28 +- N6SDK/n6sdk/__init__.py | 7 +- N6SDK/n6sdk/_api_test_tool/api_test_tool.py | 56 +- N6SDK/n6sdk/_api_test_tool/client.py | 10 +- N6SDK/n6sdk/_api_test_tool/data_test.py | 4 +- N6SDK/n6sdk/_api_test_tool/report.py | 26 +- .../_api_test_tool/validator_exceptions.py | 3 + N6SDK/n6sdk/addr_helpers.py | 27 +- N6SDK/n6sdk/class_helpers.py | 54 +- N6SDK/n6sdk/data_spec/__init__.py | 4 +- N6SDK/n6sdk/data_spec/_data_spec.py | 106 +- N6SDK/n6sdk/data_spec/fields.py | 401 +- N6SDK/n6sdk/data_spec/utils.py | 103 +- N6SDK/n6sdk/datetime_helpers.py | 149 +- N6SDK/n6sdk/encoding_helpers.py | 658 +- N6SDK/n6sdk/exceptions.py | 172 +- N6SDK/n6sdk/pyramid_commons/__init__.py | 4 +- .../n6sdk/pyramid_commons/_pyramid_commons.py | 478 +- N6SDK/n6sdk/pyramid_commons/renderers.py | 50 +- N6SDK/n6sdk/regexes.py | 34 +- N6SDK/n6sdk/scaffolds/__init__.py | 12 - .../+package+/__init__.py_tmpl | 103 - .../+package+/data_backend_api.py_tmpl | 116 - .../+package+/data_spec.py_tmpl | 161 - .../basic_n6sdk_scaffold/MANIFEST.in_tmpl | 2 - .../basic_n6sdk_scaffold/development.ini_tmpl | 60 - .../basic_n6sdk_scaffold/production.ini_tmpl | 54 - .../basic_n6sdk_scaffold/setup.py_tmpl | 38 - N6SDK/n6sdk/tests/_generic_helpers.py | 29 +- N6SDK/n6sdk/tests/test_data_spec.py | 528 +- N6SDK/n6sdk/tests/test_data_spec_fields.py | 5265 +++++-- N6SDK/n6sdk/tests/test_doctests.py | 22 +- N6SDK/n6sdk/tests/test_pyramid_commons.py | 169 +- N6SDK/n6sdk/tests/test_regexes.py | 590 +- N6SDK/requirements | 6 +- N6SDK/setup.py | 52 +- README.md | 19 +- do_setup.py | 204 +- docker-compose.yml | 62 +- docker/base/Dockerfile | 90 +- docker/base/entrypoint.sh | 2 +- docker/bin/wait-for-services-outside.sh | 42 - docker/bin/wait-for-services.sh | 6 +- docker/mailhog/Dockerfile | 2 + docker/mongo/Dockerfile | 2 + docker/mysql/Dockerfile | 3 +- docker/rabbit/Dockerfile | 3 +- docker/rabbit/entrypoint.sh | 4 - docker/redis/Dockerfile | 2 + docker/rt/Dockerfile | 2 + docker/web/Dockerfile | 28 +- docker/worker/Dockerfile | 10 +- docs/guides/intelmq/config.md | 84 + docs/guides/intelmq/index.md | 11 + docs/guides/intelmq/running.md | 25 + docs/guides/new_source/index.md | 3 + docs/installation/configuration.md | 43 +- docs/installation/n6_core.md | 34 +- docs/installation/n6_web.md | 9 +- docs/installation/supervisord.md | 1 - docs/installation/system.md | 41 +- .../sites-available/n6-adminpanel.conf | 4 +- etc/apache2/sites-available/n6-api.conf | 4 +- etc/apache2/sites-available/n6-portal.conf | 10 +- .../sites-available/n6-public-data.conf | 31 - etc/mysql/initdb/1_create_tables.sql | 14 +- etc/mysql/insertdb/insert_data_auth_db.sql | 78 + etc/n6/05_enrich.conf | 8 + etc/n6/07_aggregator.conf | 5 + etc/n6/09_auth_db.conf | 3 + etc/n6/09_manage.conf | 6 + etc/n6/21_recorder.conf | 14 +- etc/n6/70_abuse_ch.conf | 4 +- etc/n6/70_spam404.conf | 8 + etc/n6/70_zone_h.conf | 10 - etc/n6/admin_panel.conf | 307 +- etc/ssl/generate_certs.sh | 7 +- etc/ssl/openssl.cnf | 5 +- .../{n6enrich.conf.bak => n6enrich.conf} | 2 +- .../n6parser_abusechurlhausurls202001.conf | 15 - ...er_zonehrss.conf => n6parser_spam404.conf} | 4 +- etc/web/conf/api.ini | 31 + etc/web/conf/gui-config.json | 15 +- etc/web/conf/portal.ini | 620 +- etc/web/wsgi/adminpanel.wsgi | 4 + etc/web/wsgi/api.wsgi | 4 + etc/web/wsgi/portal.wsgi | 5 + test_do_setup.py | 548 +- 869 files changed, 140244 insertions(+), 13505 deletions(-) create mode 100644 CHANGELOG.md create mode 100644 N6AdminPanel/n6adminpanel/mail_notices_helpers.py create mode 100644 N6AdminPanel/n6adminpanel/org_request_helpers.py create mode 100644 N6AdminPanel/n6adminpanel/tests/__init__.py create mode 100644 N6AdminPanel/n6adminpanel/tests/test_tools.py delete mode 100644 N6Core/n6/archiver/mysqldb_patch.py create mode 100644 N6Core/n6/data/conf/11_jinja_rendering.conf create mode 100644 N6Core/n6/data/conf/11_mailing.conf create mode 100644 N6Core/n6/data/conf/pipeline.conf create mode 100644 N6Core/n6/tests/parsers/conftest.py create mode 100644 N6Core/n6/utils/recorder_conf_generator.py create mode 120000 N6CoreLib/.n6-version create mode 100644 N6CoreLib/MANIFEST.in create mode 100644 N6CoreLib/n6corelib/__init__.py rename {N6Lib/n6lib => N6CoreLib/n6corelib}/concurrency_helpers.py (93%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/email_message.py (94%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/manage_api/__init__.py (50%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/manage_api/_ca_env.py (65%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/manage_api/_manage_api.py (97%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/pki_related_test_helpers.py (84%) create mode 100644 N6CoreLib/n6corelib/tests/__init__.py rename {N6Lib/n6lib => N6CoreLib/n6corelib}/tests/test_manage_api.py (87%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/timeout_callback_manager.py (96%) rename {N6Lib/n6lib => N6CoreLib/n6corelib}/x509_helpers.py (96%) create mode 100644 N6CoreLib/requirements create mode 100644 N6CoreLib/setup.py create mode 120000 N6DataPipeline/.n6-version create mode 100644 N6DataPipeline/MANIFEST.in create mode 100644 N6DataPipeline/console_scripts create mode 100644 N6DataPipeline/n6datapipeline/__init__.py create mode 100644 N6DataPipeline/n6datapipeline/aux/__init__.py create mode 100644 N6DataPipeline/n6datapipeline/base.py create mode 100644 N6DataPipeline/n6datapipeline/data/.gitkeep create mode 100644 N6DataPipeline/n6datapipeline/intelmq/__init__.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/bots_config.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/helpers.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/utils/__init__.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/utils/intelmq_adapter.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/utils/intelmq_converter.py create mode 100644 N6DataPipeline/n6datapipeline/intelmq/wrapper.py create mode 100644 N6DataPipeline/n6datapipeline/tests/__init__.py create mode 100644 N6DataPipeline/n6datapipeline/tests/test_stub.py create mode 100644 N6DataPipeline/setup.py create mode 120000 N6DataSources/.n6-version create mode 100644 N6DataSources/MANIFEST.in create mode 100644 N6DataSources/console_scripts create mode 100644 N6DataSources/n6datasources/__init__.py create mode 100644 N6DataSources/n6datasources/data/.gitkeep create mode 100644 N6DataSources/n6datasources/tests/__init__.py create mode 100644 N6DataSources/n6datasources/tests/test_stub.py create mode 100644 N6DataSources/setup.py create mode 120000 N6Lib-py2/.n6-version create mode 100644 N6Lib-py2/MANIFEST.in create mode 100644 N6Lib-py2/n6lib/__init__.py create mode 100644 N6Lib-py2/n6lib/_picklable_objs.py create mode 100644 N6Lib-py2/n6lib/amqp_getters_pushers.py create mode 100644 N6Lib-py2/n6lib/amqp_helpers.py create mode 100644 N6Lib-py2/n6lib/argument_parser.py create mode 100644 N6Lib-py2/n6lib/auth_api.py create mode 100644 N6Lib-py2/n6lib/auth_db/__init__.py create mode 100644 N6Lib-py2/n6lib/auth_db/_before_alembic/__init__.py create mode 100644 N6Lib-py2/n6lib/auth_db/_before_alembic/legacy_models.py create mode 100644 N6Lib-py2/n6lib/auth_db/_before_alembic/legacy_simplified_validators.py create mode 100644 N6Lib-py2/n6lib/auth_db/_before_alembic/script_preparing_for_alembic.py create mode 100644 N6Lib-py2/n6lib/auth_db/_ddl_naming_convention.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic.ini create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/README.md create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/env.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/script.py.mako create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2020_03_26__210c30b4fe6a__initial_alembic_migration.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2020_03_27__9327d279a219__official_entity_related_stuff_revamped_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2020_05_28__896531976b41__registrationrequest_ticket_id_added.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2020_06_04__bed3681c9298__registrationrequest_orggroup_relation_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2020_12_15__f61f4c188981__registrationrequest_id_type_changed.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_03_23__542ccb6fc926__orgconfigupdaterequest_added_related_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_04_21__e6244c2249c9__registrationrequest_terms__version_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_04_24__1ca02fb79db7__new_official_entity_fields__alert_email_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_06_21__d3974815f709__user_api_key_id_api_key_id_modified_on_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_06_23__2dc50c28cf4b__new_stuff_related_to_improved_logging_.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_07_10__ec342464057e__registrationrequest_csr_made_nullable.py create mode 100644 N6Lib-py2/n6lib/auth_db/alembic/versions/2021_07_16__4f5d01396b61__table_user_token_replaced_with_web_token.py create mode 100644 N6Lib-py2/n6lib/auth_db/api.py create mode 100644 N6Lib-py2/n6lib/auth_db/audit_log.py create mode 100644 N6Lib-py2/n6lib/auth_db/config.py create mode 100644 N6Lib-py2/n6lib/auth_db/conftest.py create mode 100644 N6Lib-py2/n6lib/auth_db/fields.py create mode 100644 N6Lib-py2/n6lib/auth_db/models.py create mode 100644 N6Lib-py2/n6lib/auth_db/scripts.py create mode 100644 N6Lib-py2/n6lib/auth_db/sqlalchemy_helpers.py create mode 100644 N6Lib-py2/n6lib/auth_db/validators.py create mode 100644 N6Lib-py2/n6lib/class_helpers.py create mode 100644 N6Lib-py2/n6lib/common_helpers.py create mode 100644 N6Lib-py2/n6lib/config.py create mode 100644 N6Lib-py2/n6lib/const.py create mode 100644 N6Lib-py2/n6lib/context_helpers.py create mode 100644 N6Lib-py2/n6lib/csv_helpers.py create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__mfa_config_done__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__mfa_config_done__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__mfa_config_erased__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__mfa_config_erased__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__new_org_and_user_created__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__new_org_and_user_created__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_applied__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_applied__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_rejected__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_rejected__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_requested__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__org_config_update_requested__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__password_reset_done__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__password_reset_done__PL.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__password_reset_requested__EN.txt create mode 100644 N6Lib-py2/n6lib/data/templates/mail_notice__password_reset_requested__PL.txt create mode 100644 N6Lib-py2/n6lib/data_backend_api.py create mode 100644 N6Lib-py2/n6lib/data_spec/__init__.py create mode 100644 N6Lib-py2/n6lib/data_spec/_data_spec.py create mode 100644 N6Lib-py2/n6lib/data_spec/fields.py create mode 100644 N6Lib-py2/n6lib/data_spec/typing_helpers.py create mode 100644 N6Lib-py2/n6lib/datetime_helpers.py create mode 100644 N6Lib-py2/n6lib/db_events.py create mode 100644 N6Lib-py2/n6lib/db_filtering_abstractions.py create mode 100644 N6Lib-py2/n6lib/generate_test_events.py create mode 100644 N6Lib-py2/n6lib/http_helpers.py create mode 100644 N6Lib-py2/n6lib/jinja_helpers.py create mode 100644 N6Lib-py2/n6lib/jwt_helpers.py create mode 100644 N6Lib-py2/n6lib/ldap_api_replacement.py create mode 100644 N6Lib-py2/n6lib/ldap_related_test_helpers.py create mode 100644 N6Lib-py2/n6lib/log_helpers.py create mode 100644 N6Lib-py2/n6lib/mail_notices_api.py create mode 100644 N6Lib-py2/n6lib/mail_sending_api.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/__init__.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/_config_converters.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/_generic_view_mixins.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/_pyramid_commons.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/_tween_factories.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/data_spec_fields.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/mfa_helpers.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/renderers.py create mode 100644 N6Lib-py2/n6lib/pyramid_commons/web_token_helpers.py create mode 100644 N6Lib-py2/n6lib/record_dict.py create mode 100644 N6Lib-py2/n6lib/rt_client_api.py create mode 100644 N6Lib-py2/n6lib/sqlalchemy_related_test_helpers.py create mode 100644 N6Lib-py2/n6lib/structured_data_conversion/__init__.py create mode 100644 N6Lib-py2/n6lib/structured_data_conversion/converters.py create mode 100644 N6Lib-py2/n6lib/structured_data_conversion/exceptions.py create mode 100644 N6Lib-py2/n6lib/structured_data_conversion/interfaces.py create mode 100644 N6Lib-py2/n6lib/structured_data_conversion/namespace_conversion_helpers.py create mode 100644 N6Lib-py2/n6lib/tests/__init__.py create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/IMPORTANT.txt create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/admin-internal-cert---n6-service-ca-af2f68651a16f6567e07.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/admin-internal-csr---n6-service-ca-af2f68651a16f6567e07.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/admin-user-cert---n6-service-ca-765496b0d44901863497.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/admin-user-csr---n6-service-ca-765496b0d44901863497.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ads-adm-cert---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ads-adm-csr---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ads-adm-key---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/app-user-cert---n6-client-ca-0000000000000000abcd.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/app-user-csr---n6-client-ca-0000000000000000abcd.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/app-user-key---n6-client-ca-0000000000000000abcd.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/app-user-wrong-cn-cert---n6-client-ca-000000000000000eabcd.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/app-user-wrong-cn-csr---n6-client-ca-000000000000000eabcd.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-cert-n6-client-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-cert-n6-service-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-cert-root.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-config-n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-config-n6-service-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-csr-n6-client-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-csr-n6-service-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-key-n6-client-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-key-n6-service-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/ca-key-root.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/comp-not-matching-ou-cert---n6-service-ca-9956a34b77371f3931c1.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/comp-not-matching-ou-csr---n6-service-ca-9956a34b77371f3931c1.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-cert---n6-service-ca-f9962d93676e439cdcb5.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-csr---n6-service-ca-f9962d93676e439cdcb5.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-internal-cert---n6-service-ca-ce0c519c49fd5659271d.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-internal-csr---n6-service-ca-ce0c519c49fd5659271d.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-other-internal-cert---n6-service-ca-5b2637aaa005c88856d9.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/component-other-internal-csr---n6-service-ca-5b2637aaa005c88856d9.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/empty-ca-sect-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/empty-clientCA_policy-sect-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/empty-server_component_serviceCA_policy-sect-config---n6-service-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/inv-app-user-with-wrong-user-cert---n6-client-ca-1234.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/inv-app-user-with-wrong-user-csr---n6-client-ca-1234.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/inv-app-user-with-wrong-user-key---n6-client-ca-1234.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-ca-sect-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-certificate-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-certificate_extensions-sect-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-clientCA_policy-sect-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-default_ca-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-default_crl_days-opt-config---n6-service-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-default_days-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-default_md-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-default_md-opt-config---n6-service-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-dir-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-identifier-keys---n6-client-ca.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-policy-opt-config---n6-client-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/missing-server_component_serviceCA_policy-sect-config---n6-service-ca.cnf create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-o-cert---n6-service-ca-fedcba12345678000000.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-o-csr---n6-service-ca-fedcba12345678000000.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-org-cert---n6-client-ca-00000000000000001200.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-org-csr---n6-client-ca-00000000000000001200.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-ou-cert---n6-service-ca-1a26b67f5df2e5ba3eba.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/no-ou-csr---n6-service-ca-1a26b67f5df2e5ba3eba.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/other-admin-cert---n6-service-ca-f908c0489127701717b4.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/other-admin-csr---n6-service-ca-f908c0489127701717b4.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/other-revocation-cert---n6-client-ca-c55fd65ffe0671c4ba19.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/other-revocation-csr---n6-client-ca-c55fd65ffe0671c4ba19.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/revocation-fields-cert---n6-client-ca-48a43f0059fbc1eb82b2.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/revocation-fields-csr---n6-client-ca-48a43f0059fbc1eb82b2.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/revoked-cert---n6-client-ca-e61753a2f8e887770288.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/revoked-csr---n6-client-ca-e61753a2f8e887770288.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/serv-comp-cert---n6-service-ca-00000000000000123456.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/serv-comp-csr---n6-service-ca-00000000000000123456.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/serv-comp-inv-org-csr---n6-service-ca-2a64f0eee4ce12a2bdc9.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_admin.csr create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_client.csr create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_component-inner.csr create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_component-outer.csr create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_private.key create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/user-not-matching-ou-cert---n6-service-ca-a1717cc76c11b4b84faf.pem create mode 100644 N6Lib-py2/n6lib/tests/certs_and_requests_for_testing/user-not-matching-ou-csr---n6-service-ca-a1717cc76c11b4b84faf.pem create mode 100644 N6Lib-py2/n6lib/threaded_async.py create mode 100644 N6Lib-py2/n6lib/typing_helpers.py create mode 100644 N6Lib-py2/n6lib/unit_test_helpers.py create mode 100644 N6Lib-py2/n6lib/unpacking_helpers.py create mode 100644 N6Lib-py2/n6lib/url_helpers.py create mode 100644 N6Lib-py2/requirements create mode 100644 N6Lib-py2/setup.py delete mode 120000 N6Lib/.n6sdk-version create mode 100644 N6Lib/n6lib/auth_db/_before_alembic/__init__.py create mode 100644 N6Lib/n6lib/auth_db/_before_alembic/legacy_models.py create mode 100644 N6Lib/n6lib/auth_db/_before_alembic/legacy_simplified_validators.py create mode 100644 N6Lib/n6lib/auth_db/_before_alembic/script_preparing_for_alembic.py create mode 100644 N6Lib/n6lib/auth_db/_ddl_naming_convention.py create mode 100644 N6Lib/n6lib/auth_db/alembic.ini create mode 100644 N6Lib/n6lib/auth_db/alembic/README.md create mode 100644 N6Lib/n6lib/auth_db/alembic/env.py create mode 100644 N6Lib/n6lib/auth_db/alembic/script.py.mako create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2020_03_26__210c30b4fe6a__initial_alembic_migration.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2020_03_27__9327d279a219__official_entity_related_stuff_revamped_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2020_05_28__896531976b41__registrationrequest_ticket_id_added.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2020_06_04__bed3681c9298__registrationrequest_orggroup_relation_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2020_12_15__f61f4c188981__registrationrequest_id_type_changed.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_03_23__542ccb6fc926__orgconfigupdaterequest_added_related_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_04_21__e6244c2249c9__registrationrequest_terms__version_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_04_24__1ca02fb79db7__new_official_entity_fields__alert_email_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_06_21__d3974815f709__user_api_key_id_api_key_id_modified_on_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_06_23__2dc50c28cf4b__new_stuff_related_to_improved_logging_.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_07_10__ec342464057e__registrationrequest_csr_made_nullable.py create mode 100644 N6Lib/n6lib/auth_db/alembic/versions/2021_07_16__4f5d01396b61__table_user_token_replaced_with_web_token.py create mode 100644 N6Lib/n6lib/auth_db/conftest.py create mode 100644 N6Lib/n6lib/auth_db/sqlalchemy_helpers.py create mode 100644 N6Lib/n6lib/data/templates/mail_notice__mfa_config_done__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__mfa_config_done__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__mfa_config_erased__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__mfa_config_erased__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__new_org_and_user_created__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__new_org_and_user_created__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_applied__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_applied__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_rejected__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_rejected__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_requested__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__org_config_update_requested__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__password_reset_done__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__password_reset_done__PL.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__password_reset_requested__EN.txt create mode 100644 N6Lib/n6lib/data/templates/mail_notice__password_reset_requested__PL.txt create mode 100644 N6Lib/n6lib/data_spec/typing_helpers.py create mode 100644 N6Lib/n6lib/jinja_helpers.py create mode 100644 N6Lib/n6lib/jwt_helpers.py create mode 100644 N6Lib/n6lib/mail_notices_api.py create mode 100644 N6Lib/n6lib/mail_sending_api.py create mode 100644 N6Lib/n6lib/pyramid_commons/_config_converters.py create mode 100644 N6Lib/n6lib/pyramid_commons/_generic_view_mixins.py create mode 100644 N6Lib/n6lib/pyramid_commons/_tween_factories.py create mode 100644 N6Lib/n6lib/pyramid_commons/data_spec_fields.py create mode 100644 N6Lib/n6lib/pyramid_commons/mfa_helpers.py create mode 100644 N6Lib/n6lib/pyramid_commons/web_token_helpers.py create mode 100644 N6Lib/n6lib/rt_client_api.py create mode 100644 N6Lib/n6lib/structured_data_conversion/__init__.py create mode 100644 N6Lib/n6lib/structured_data_conversion/converters.py create mode 100644 N6Lib/n6lib/structured_data_conversion/exceptions.py create mode 100644 N6Lib/n6lib/structured_data_conversion/interfaces.py create mode 100644 N6Lib/n6lib/structured_data_conversion/namespace_conversion_helpers.py rename N6Lib/n6lib/tests/{auth_api_with_ldap_api_replacement_quicktest.py => auth_related_quicktest.py} (77%) create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/ads-adm-cert---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/ads-adm-csr---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/ads-adm-key---n6-service-ca-00000000000000000018.pem create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/empty-ca-sect-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/empty-clientCA_policy-sect-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/empty-server_component_serviceCA_policy-sect-config---n6-service-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-ca-sect-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-certificate-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-certificate_extensions-sect-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-clientCA_policy-sect-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-default_ca-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-default_crl_days-opt-config---n6-service-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-default_days-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-default_md-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-default_md-opt-config---n6-service-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-dir-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-identifier-keys---n6-client-ca.pem create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-policy-opt-config---n6-client-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/missing-server_component_serviceCA_policy-sect-config---n6-service-ca.cnf create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_admin.csr create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_client.csr create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_component-inner.csr create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_component-outer.csr create mode 100644 N6Lib/n6lib/tests/certs_and_requests_for_testing/test_for_n6_manage_private.key create mode 100644 N6Lib/n6lib/threaded_async.py delete mode 100644 N6Lib/n6lib/transaction_helpers.py create mode 100644 N6Portal/gui/src/components/BaseCriterion.vue create mode 100644 N6Portal/gui/src/components/DashboardPage.vue create mode 100644 N6Portal/gui/src/components/EditConfigPage.vue create mode 100644 N6Portal/gui/src/components/FileCriterion.vue create mode 100644 N6Portal/gui/src/components/InfoPage.vue create mode 100644 N6Portal/gui/src/components/InputActionButton.vue create mode 100644 N6Portal/gui/src/components/InputActionButtons.vue create mode 100644 N6Portal/gui/src/components/InputAddButton.vue create mode 100644 N6Portal/gui/src/components/InputDeleteButton.vue create mode 100644 N6Portal/gui/src/components/InputLabel.vue create mode 100644 N6Portal/gui/src/components/LangControls.vue create mode 100644 N6Portal/gui/src/components/MultiCriterion.vue create mode 100644 N6Portal/gui/src/components/MultiValueGroup.vue create mode 100644 N6Portal/gui/src/components/RadioCriterion.vue create mode 100644 N6Portal/gui/src/components/RegisterForm.vue create mode 100644 N6Portal/gui/src/components/RegisterPage.vue create mode 100644 N6Portal/gui/src/components/RegisterTermsBox.vue create mode 100644 N6Portal/gui/src/components/UserConfigTable.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/BaseField.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/CheckboxField.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/DeletedInputsLabel.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/DeletedMultiCriteria.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/EditConfigForm.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/InputDeleteFromStateButton.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/MultiField.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/MultiValueFieldGroup.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/RadioField.vue create mode 100644 N6Portal/gui/src/components/edit_config_form/TextAreaField.vue create mode 100644 N6Portal/gui/src/config/dashboardCategories.js create mode 100644 N6Portal/gui/src/config/registerCriteria.js create mode 100644 N6Portal/gui/src/config/userConfigCriteria.js create mode 100644 N6Portal/gui/src/helpers/constants.js create mode 100644 N6Portal/gui/src/helpers/lang.js create mode 100644 N6Portal/gui/src/images/lang_en.png create mode 100755 N6Portal/gui/src/images/lang_pl.png create mode 100644 N6Portal/gui/src/locales/EN/edit_config_form_fields.json create mode 100644 N6Portal/gui/src/locales/EN/error_messages.json create mode 100644 N6Portal/gui/src/locales/EN/register_form.json create mode 100644 N6Portal/gui/src/locales/EN/register_form_fields.json create mode 100644 N6Portal/gui/src/locales/EN/register_terms.json create mode 100644 N6Portal/gui/src/locales/EN/settings_form.json create mode 100644 N6Portal/gui/src/locales/EN/validation_error_messages.json create mode 100644 N6Portal/gui/src/locales/PL/edit_config_form_fields.json create mode 100644 N6Portal/gui/src/locales/PL/error_messages.json create mode 100644 N6Portal/gui/src/locales/PL/register_form.json create mode 100644 N6Portal/gui/src/locales/PL/register_form_fields.json create mode 100644 N6Portal/gui/src/locales/PL/register_terms.json create mode 100644 N6Portal/gui/src/locales/PL/settings_form.json create mode 100644 N6Portal/gui/src/locales/PL/validation_error_messages.json create mode 100644 N6Portal/gui/src/mixins/EditSettingsFormMixin.js create mode 100644 N6Portal/gui/src/mixins/LangSetMixin.js create mode 100644 N6Portal/gui/src/store/dashboard.js create mode 100644 N6Portal/gui/src/store/form.js create mode 100644 N6Portal/gui/src/store/lang.js create mode 100644 N6Portal/gui/src/store/user.js delete mode 100644 N6Portal/gui/src/styles/flash-message.scss create mode 100644 N6Portal/n6portal/tests/__init__.py create mode 100644 N6Portal/n6portal/tests/test_n6portal.py create mode 100644 N6Portal/react_app/.env.json create mode 100644 N6Portal/react_app/README.md create mode 100644 N6Portal/react_app/config/config_app.js create mode 100644 N6Portal/react_app/config/locale/en/tos.json create mode 100644 N6Portal/react_app/config/locale/pl/tos.json create mode 100644 N6Portal/react_app/config/run_app_server.js create mode 100644 N6Portal/react_app/config/schema/config.json create mode 100644 N6Portal/react_app/config/schema/en/tos.json create mode 100644 N6Portal/react_app/config/schema/pl/tos.json create mode 100644 N6Portal/react_app/config/static/app.js create mode 100644 N6Portal/react_app/config/static/style.css create mode 100644 N6Portal/react_app/config/views/index.pug create mode 100644 N6Portal/react_app/config/views/terms.pug create mode 100644 N6Portal/react_app/deploy.sh create mode 100644 N6Portal/react_app/package.json create mode 100644 N6Portal/react_app/public/favicons/android-chrome-192x192.png create mode 100644 N6Portal/react_app/public/favicons/android-chrome-512x512.png create mode 100644 N6Portal/react_app/public/favicons/apple-touch-icon.png create mode 100644 N6Portal/react_app/public/favicons/browserconfig.xml create mode 100644 N6Portal/react_app/public/favicons/favicon-16x16.png create mode 100644 N6Portal/react_app/public/favicons/favicon-32x32.png create mode 100644 N6Portal/react_app/public/favicons/favicon.ico create mode 100644 N6Portal/react_app/public/favicons/mstile-150x150.png create mode 100644 N6Portal/react_app/public/favicons/safari-pinned-tab.svg create mode 100644 N6Portal/react_app/public/favicons/site.webmanifest create mode 100644 N6Portal/react_app/public/index.html create mode 100644 N6Portal/react_app/public/robots.txt create mode 100644 N6Portal/react_app/src/App.tsx create mode 100644 N6Portal/react_app/src/api/auth/index.ts create mode 100644 N6Portal/react_app/src/api/auth/types.ts create mode 100644 N6Portal/react_app/src/api/index.ts create mode 100644 N6Portal/react_app/src/api/orgConfig/index.ts create mode 100644 N6Portal/react_app/src/api/orgConfig/types.ts create mode 100644 N6Portal/react_app/src/api/register/index.ts create mode 100644 N6Portal/react_app/src/api/services/dashboard/index.ts create mode 100644 N6Portal/react_app/src/api/services/dashboard/types.ts create mode 100644 N6Portal/react_app/src/api/services/globalTypes.ts create mode 100644 N6Portal/react_app/src/api/services/info/index.ts create mode 100644 N6Portal/react_app/src/api/services/info/types.ts create mode 100644 N6Portal/react_app/src/api/services/report/index.ts create mode 100644 N6Portal/react_app/src/api/services/search/index.ts create mode 100644 N6Portal/react_app/src/components/errors/ApiLoaderFallback.tsx create mode 100644 N6Portal/react_app/src/components/errors/ErrorBoundaryFallback.tsx create mode 100644 N6Portal/react_app/src/components/errors/ErrorPage.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormCheckbox.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormDevTools.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormFeedback.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormFileUpload.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormFilterInput.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormInput.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormInputReadonly.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormRadio.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormRenderCharCounter.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormRenderErrorMsg.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormRenderSelectedFile.tsx create mode 100644 N6Portal/react_app/src/components/forms/FormSelect.tsx create mode 100644 N6Portal/react_app/src/components/forms/datePicker/DatePicker.tsx create mode 100644 N6Portal/react_app/src/components/forms/datePicker/DatePickerCalendar.tsx create mode 100644 N6Portal/react_app/src/components/forms/datePicker/TimeInput.tsx create mode 100644 N6Portal/react_app/src/components/forms/utils.ts create mode 100644 N6Portal/react_app/src/components/forms/validation/validationRegexp.ts create mode 100644 N6Portal/react_app/src/components/forms/validation/validationSchema.ts create mode 100644 N6Portal/react_app/src/components/forms/validation/validationTypes.ts create mode 100644 N6Portal/react_app/src/components/forms/validation/validators.ts create mode 100644 N6Portal/react_app/src/components/layout/Footer.tsx create mode 100644 N6Portal/react_app/src/components/layout/Header.tsx create mode 100644 N6Portal/react_app/src/components/loading/ApiLoader.tsx create mode 100644 N6Portal/react_app/src/components/loading/AsyncLoader.tsx create mode 100644 N6Portal/react_app/src/components/loading/Loader.tsx create mode 100644 N6Portal/react_app/src/components/navigation/MobileNavigation.tsx create mode 100644 N6Portal/react_app/src/components/navigation/UserMenuNavigation.tsx create mode 100644 N6Portal/react_app/src/components/pages/account/Account.tsx create mode 100644 N6Portal/react_app/src/components/pages/editSettings/EditSettings.tsx create mode 100644 N6Portal/react_app/src/components/pages/editSettings/EditSettingsFieldArray.tsx create mode 100644 N6Portal/react_app/src/components/pages/editSettings/EditSettingsForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/editSettings/utils.ts create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ForgotPassword.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ForgotPasswordError.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ForgotPasswordForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ForgotPasswordSuccess.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ResetPasswordError.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ResetPasswordForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/ResetPasswordSuccess.tsx create mode 100644 N6Portal/react_app/src/components/pages/forgotPassword/utils.ts create mode 100644 N6Portal/react_app/src/components/pages/incidents/Incidents.tsx create mode 100644 N6Portal/react_app/src/components/pages/incidents/IncidentsFilter.tsx create mode 100644 N6Portal/react_app/src/components/pages/incidents/IncidentsForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/incidents/IncidentsNoResources.tsx create mode 100644 N6Portal/react_app/src/components/pages/incidents/utils.ts create mode 100644 N6Portal/react_app/src/components/pages/login/Login.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginConfigMfaError.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginConfigMfaForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginConfigMfaSuccess.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginMfaError.tsx create mode 100644 N6Portal/react_app/src/components/pages/login/LoginMfaForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/noAccess/NoAccess.tsx create mode 100644 N6Portal/react_app/src/components/pages/notFound/NotFound.tsx create mode 100644 N6Portal/react_app/src/components/pages/organization/Organization.tsx create mode 100644 N6Portal/react_app/src/components/pages/organization/OrganizationCard.tsx create mode 100644 N6Portal/react_app/src/components/pages/organization/OrganizationHeader.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUp.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpButtons.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpFieldArray.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpStepOne.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpStepTwo.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpSuccess.tsx create mode 100644 N6Portal/react_app/src/components/pages/signUp/SignUpWizard.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettings.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsApiKey.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsApiKeyForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsConfirmationModal.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsMfa.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsMfaConfiguration.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettings/UserSettingsMfaEdit.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettingsMfaConfig/UserSettingsMfaConfigError.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettingsMfaConfig/UserSettingsMfaConfigForm.tsx create mode 100644 N6Portal/react_app/src/components/pages/userSettingsMfaConfig/UserSettingsMfaConfigSuccess.tsx create mode 100644 N6Portal/react_app/src/components/shared/ColumnFilter.tsx create mode 100644 N6Portal/react_app/src/components/shared/CustomButton.tsx create mode 100644 N6Portal/react_app/src/components/shared/ExportCSV.tsx create mode 100644 N6Portal/react_app/src/components/shared/ExportJSON.tsx create mode 100644 N6Portal/react_app/src/components/shared/LanguagePicker.tsx create mode 100644 N6Portal/react_app/src/components/shared/LoadingSpinner.tsx create mode 100644 N6Portal/react_app/src/components/shared/MfaQRCode.tsx create mode 100644 N6Portal/react_app/src/components/shared/Table.tsx create mode 100644 N6Portal/react_app/src/components/shared/Tooltip.tsx create mode 100644 N6Portal/react_app/src/components/shared/TrimmedUrl.tsx create mode 100644 N6Portal/react_app/src/components/shared/VirtualizedList.tsx create mode 100644 N6Portal/react_app/src/components/shared/customSelect/Components.tsx create mode 100644 N6Portal/react_app/src/components/shared/customSelect/CustomSelect.tsx create mode 100644 N6Portal/react_app/src/context/AuthContext.tsx create mode 100644 N6Portal/react_app/src/context/ForgotPasswordContext.tsx create mode 100644 N6Portal/react_app/src/context/LanguageProvider.tsx create mode 100644 N6Portal/react_app/src/context/LoginContext.tsx create mode 100644 N6Portal/react_app/src/context/MatchMediaContext.tsx create mode 100644 N6Portal/react_app/src/context/UserSettingsMfaContext.tsx create mode 100644 N6Portal/react_app/src/dictionary/index.ts create mode 100644 N6Portal/react_app/src/images/api-error.svg create mode 100644 N6Portal/react_app/src/images/appointment.svg create mode 100644 N6Portal/react_app/src/images/arrow_ico.svg create mode 100644 N6Portal/react_app/src/images/avatar.svg create mode 100644 N6Portal/react_app/src/images/calendar.svg create mode 100644 N6Portal/react_app/src/images/check-ico.svg create mode 100644 N6Portal/react_app/src/images/chevron.svg create mode 100644 N6Portal/react_app/src/images/close.svg create mode 100644 N6Portal/react_app/src/images/compress-ico.svg create mode 100644 N6Portal/react_app/src/images/email.svg create mode 100644 N6Portal/react_app/src/images/en-icon.svg create mode 100644 N6Portal/react_app/src/images/error.svg create mode 100644 N6Portal/react_app/src/images/error_ico.svg create mode 100644 N6Portal/react_app/src/images/expand-ico.svg create mode 100644 N6Portal/react_app/src/images/hierarchy.svg create mode 100644 N6Portal/react_app/src/images/logo_n6.svg create mode 100644 N6Portal/react_app/src/images/no-access-icon.svg create mode 100644 N6Portal/react_app/src/images/no-resources.svg create mode 100644 N6Portal/react_app/src/images/not-found-icon.svg create mode 100644 N6Portal/react_app/src/images/ok.svg create mode 100644 N6Portal/react_app/src/images/pl-icon.svg create mode 100644 N6Portal/react_app/src/images/plus.svg create mode 100644 N6Portal/react_app/src/images/question_mark.svg create mode 100644 N6Portal/react_app/src/images/reset.svg create mode 100644 N6Portal/react_app/src/images/restore.svg create mode 100644 N6Portal/react_app/src/images/success_ico.svg create mode 100644 N6Portal/react_app/src/images/update.svg create mode 100644 N6Portal/react_app/src/images/user-settings-api-key.svg create mode 100644 N6Portal/react_app/src/images/user-settings-mfa.svg create mode 100644 N6Portal/react_app/src/images/user.svg create mode 100644 N6Portal/react_app/src/index.tsx create mode 100644 N6Portal/react_app/src/react-app-env.d.ts create mode 100644 N6Portal/react_app/src/reportWebVitals.ts create mode 100644 N6Portal/react_app/src/routes/PrivateRoute.tsx create mode 100644 N6Portal/react_app/src/routes/Routes.tsx create mode 100644 N6Portal/react_app/src/routes/RoutesProvider.tsx create mode 100644 N6Portal/react_app/src/routes/routeList.ts create mode 100644 N6Portal/react_app/src/routes/types.ts create mode 100644 N6Portal/react_app/src/setupTests.ts create mode 100644 N6Portal/react_app/src/styles/_account.scss create mode 100644 N6Portal/react_app/src/styles/_buttons.scss create mode 100644 N6Portal/react_app/src/styles/_columnFilter.scss create mode 100644 N6Portal/react_app/src/styles/_customCheckbox.scss create mode 100644 N6Portal/react_app/src/styles/_customSelect.scss create mode 100644 N6Portal/react_app/src/styles/_datePicker.scss create mode 100644 N6Portal/react_app/src/styles/_editSettings.scss create mode 100644 N6Portal/react_app/src/styles/_error.scss create mode 100644 N6Portal/react_app/src/styles/_footer.scss create mode 100644 N6Portal/react_app/src/styles/_formFields.scss create mode 100644 N6Portal/react_app/src/styles/_functions.scss create mode 100644 N6Portal/react_app/src/styles/_header.scss create mode 100644 N6Portal/react_app/src/styles/_incidents.scss create mode 100644 N6Portal/react_app/src/styles/_layout.scss create mode 100644 N6Portal/react_app/src/styles/_login.scss create mode 100644 N6Portal/react_app/src/styles/_mixins.scss create mode 100644 N6Portal/react_app/src/styles/_organization.scss create mode 100644 N6Portal/react_app/src/styles/_reactTable.scss create mode 100644 N6Portal/react_app/src/styles/_signup.scss create mode 100644 N6Portal/react_app/src/styles/_spinner.scss create mode 100644 N6Portal/react_app/src/styles/_tooltip.scss create mode 100644 N6Portal/react_app/src/styles/_userSettings.scss create mode 100644 N6Portal/react_app/src/styles/_variables.scss create mode 100644 N6Portal/react_app/src/styles/style.scss create mode 100644 N6Portal/react_app/src/types/react-table-config.d.ts create mode 100644 N6Portal/react_app/src/utils/convertFormData.ts create mode 100644 N6Portal/react_app/src/utils/copyTextToClipboard.ts create mode 100644 N6Portal/react_app/src/utils/getScrollbarWidth.ts create mode 100644 N6Portal/react_app/src/utils/isObject.ts create mode 100644 N6Portal/react_app/src/utils/language.ts create mode 100644 N6Portal/react_app/src/utils/noop.ts create mode 100644 N6Portal/react_app/src/utils/parseRegisterData.ts create mode 100644 N6Portal/react_app/src/utils/parseResponseData.ts create mode 100644 N6Portal/react_app/src/utils/storageAvailable.ts create mode 100644 N6Portal/react_app/src/utils/trimUrl.ts create mode 100644 N6Portal/react_app/tsconfig.json create mode 100644 N6Portal/react_app/yarn.lock rename N6RestApi/n6web/tests/{test.py => test_n6web.py} (89%) create mode 120000 N6SDK-py2/.n6-version create mode 100644 N6SDK-py2/ACKNOWLEDGEMENTS.txt create mode 100644 N6SDK-py2/MANIFEST.in create mode 100644 N6SDK-py2/NEWS.rst create mode 100644 N6SDK-py2/README.rst create mode 100644 N6SDK-py2/docs/Makefile create mode 100644 N6SDK-py2/docs/source/_static/.gitkeep create mode 100644 N6SDK-py2/docs/source/_templates/.gitkeep create mode 100644 N6SDK-py2/docs/source/api_test_tool.rst create mode 100644 N6SDK-py2/docs/source/conf.py create mode 100644 N6SDK-py2/docs/source/front_matter.rst create mode 100644 N6SDK-py2/docs/source/index.rst create mode 100644 N6SDK-py2/docs/source/lib_basic/data_spec.rst create mode 100644 N6SDK-py2/docs/source/lib_basic/data_spec_fields.rst create mode 100644 N6SDK-py2/docs/source/lib_basic/exceptions.rst create mode 100644 N6SDK-py2/docs/source/lib_basic/pyramid_commons.rst create mode 100644 N6SDK-py2/docs/source/lib_basic/pyramid_commons_renderers.rst create mode 100644 N6SDK-py2/docs/source/lib_helpers/addr_helpers.rst create mode 100644 N6SDK-py2/docs/source/lib_helpers/class_helpers.rst create mode 100644 N6SDK-py2/docs/source/lib_helpers/datetime_helpers.rst create mode 100644 N6SDK-py2/docs/source/lib_helpers/encoding_helpers.rst create mode 100644 N6SDK-py2/docs/source/lib_helpers/regexes.rst create mode 100644 N6SDK-py2/docs/source/library_reference.rst create mode 100644 N6SDK-py2/docs/source/release_notes.rst create mode 100644 N6SDK-py2/docs/source/tutorial.rst create mode 100644 N6SDK-py2/n6sdk/__init__.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/__init__.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/api_test_tool.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/client.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/config_base.ini create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/data_test.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/report.py create mode 100644 N6SDK-py2/n6sdk/_api_test_tool/validator_exceptions.py create mode 100644 N6SDK-py2/n6sdk/addr_helpers.py create mode 100644 N6SDK-py2/n6sdk/class_helpers.py create mode 100644 N6SDK-py2/n6sdk/data_spec/__init__.py create mode 100644 N6SDK-py2/n6sdk/data_spec/_data_spec.py create mode 100644 N6SDK-py2/n6sdk/data_spec/fields.py create mode 100644 N6SDK-py2/n6sdk/data_spec/utils.py create mode 100644 N6SDK-py2/n6sdk/datetime_helpers.py create mode 100644 N6SDK-py2/n6sdk/encoding_helpers.py create mode 100644 N6SDK-py2/n6sdk/exceptions.py create mode 100644 N6SDK-py2/n6sdk/pyramid_commons/__init__.py create mode 100644 N6SDK-py2/n6sdk/pyramid_commons/_pyramid_commons.py create mode 100644 N6SDK-py2/n6sdk/pyramid_commons/renderers.py create mode 100644 N6SDK-py2/n6sdk/regexes.py create mode 100644 N6SDK-py2/requirements create mode 100644 N6SDK-py2/setup.py create mode 120000 N6SDK/.n6-version delete mode 100644 N6SDK/.n6sdk-version create mode 100644 N6SDK/README.rst delete mode 100644 N6SDK/n6sdk/scaffolds/__init__.py delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/+package+/__init__.py_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/+package+/data_backend_api.py_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/+package+/data_spec.py_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/MANIFEST.in_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/development.ini_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/production.ini_tmpl delete mode 100644 N6SDK/n6sdk/scaffolds/basic_n6sdk_scaffold/setup.py_tmpl delete mode 100755 docker/bin/wait-for-services-outside.sh create mode 100644 docker/mailhog/Dockerfile create mode 100644 docker/mongo/Dockerfile create mode 100644 docker/redis/Dockerfile create mode 100644 docker/rt/Dockerfile create mode 100644 docs/guides/intelmq/config.md create mode 100644 docs/guides/intelmq/index.md create mode 100644 docs/guides/intelmq/running.md delete mode 100644 etc/apache2/sites-available/n6-public-data.conf create mode 100644 etc/mysql/insertdb/insert_data_auth_db.sql create mode 100644 etc/n6/09_manage.conf create mode 100644 etc/n6/70_spam404.conf delete mode 100644 etc/n6/70_zone_h.conf rename etc/supervisord/programs/{n6enrich.conf.bak => n6enrich.conf} (89%) delete mode 100644 etc/supervisord/programs/n6parser_abusechurlhausurls202001.conf rename etc/supervisord/programs/{n6parser_zonehrss.conf => n6parser_spam404.conf} (86%) diff --git a/.gitignore b/.gitignore index fb3f8a0..6abaffc 100644 --- a/.gitignore +++ b/.gitignore @@ -91,5 +91,9 @@ pids /N6*/dist/ /N6*/docs/build/ +# docs builds +docs/_build/ +site/ + # n6 data on docker runtime volume diff --git a/.n6-version b/.n6-version index 36af449..a1513ac 100644 --- a/.n6-version +++ b/.n6-version @@ -1 +1 @@ -2.0.6a1 +3.0.0b1 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..88dc02a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,12 @@ +# Changelog + +Starting with 3.x.x release series, all notable changes applied to the +code of _n6_ are continuously documented in this file. + +The format of this file is based, to much extent, on +[Keep a Changelog](https://keepachangelog.com/). + + +## 3.0.0b1 - 2021-10-13 + +TBD diff --git a/N6AdminPanel/adminpanel.wsgi b/N6AdminPanel/adminpanel.wsgi index b851c8c..06ad317 100644 --- a/N6AdminPanel/adminpanel.wsgi +++ b/N6AdminPanel/adminpanel.wsgi @@ -1,3 +1,7 @@ #!/usr/bin/python + +# Let's apply n6 specific monkey-patching as early as possible. +import n6lib # noqa + from n6adminpanel.app import get_app application = get_app() diff --git a/N6AdminPanel/n6adminpanel/__init__.py b/N6AdminPanel/n6adminpanel/__init__.py index e69de29..c8eb264 100644 --- a/N6AdminPanel/n6adminpanel/__init__.py +++ b/N6AdminPanel/n6adminpanel/__init__.py @@ -0,0 +1,3 @@ +# Ensure all monkey-patching provided by `n6lib` +# and `n6sdk` is applied as early as possible. +import n6lib # noqa \ No newline at end of file diff --git a/N6AdminPanel/n6adminpanel/admin_panel.conf b/N6AdminPanel/n6adminpanel/admin_panel.conf index 63bc07b..9027561 100644 --- a/N6AdminPanel/n6adminpanel/admin_panel.conf +++ b/N6AdminPanel/n6adminpanel/admin_panel.conf @@ -3,6 +3,8 @@ + + [admin_panel] ## the value of `app_secret_key` must be set to some unpredictable @@ -12,10 +14,13 @@ + + ## IMPORTANT: the following 3 config sections should be uncommented ## and adjusted *ONLY* if the n6 Admin Panel application does *NOT* -## have access to the 09_auth_db.conf (being part of the N6Core -## configuration) which already contains these sections! +## have access to the `09_auth_db.conf` file (being, typically, a part +## of the N6Core/N6Pipeline configuration) which (typically) already +## contains these sections! # #[auth_db] # @@ -52,3 +57,301 @@ #pool_timeout = 20 #pool_size = 15 #max_overflow = 12 + + + + + +## IMPORTANT: the following 3 config sections should be kept here +## uncommented *ONLY* if the n6 Admin Panel application does *NOT* have +## access to the `11_mailing.conf` and `11_jinja_rendering.conf` files +## which, if exist (as a part of the N6Core/N6Pipeline configuration), +## typically already contain these sections! + + +# Note: the *mail notices* feature engages the 3 configuration sections: +# +# * the `[mail_notices_api]` section (see below) +# -- directly related to `MailNoticesAPI` from `n6lib.mail_notices_api`, +# +# * the `[mail_sending_api]` section (see below) +# -- directly related to `MailSendingAPI` from `n6lib.mail_sending_api`, +# +# * the `[jinja_template_based_renderer]` section +# -- directly related to `JinjaTemplateBasedRenderer.from_predefined()` +# from `n6lib.jinja_helpers`. + + +[mail_notices_api] + +# Should mail notices be dispatched at all? If this option is +# false then any invocations of a dispatcher obtained from a +# context manager returned by the `MailNoticesAPI.dispatcher()` +# method do nothing, and *no* other options from this section or +# from the `[mail_sending_api]`/`[jinja_template_based_renderer]` +# sections (which normally are also engaged) are used by the +# `MailNoticesAPI` stuff. +active = false + +# The value of the following option, if not being empty, should +# be a Python dict literal representing a dict that maps *notice +# keys* (str, e.g.: 'org_config_update_requested') to dicts that +# map 2-character codes of a supported *language* (such as 'EN' +# or 'PL) to dicts specifying the following mail components: +# *body*, *subject*, *sender* and (optionally) *misc headers* +# (which stands for *miscellaneous mail headers*). +# +# Lack of a certain *notice key* means that the mail notices +# stuff is not active for that *notice key* (meaning that any +# invocations of a dispatcher obtained from a context manager +# returned by any `MailNoticesAPI.dispatcher()` +# call do nothing). +# +# Each of the *mail components* dicts (i.e., the dicts mentioned +# above as those specifying mail components) contains some or +# all of the following items: +# +# * 'body' -- a *string value* (required), +# +# * 'subject' -- a *string value* (required), +# +# * 'sender' -- a *string value* (required if the value of +# the `default_sender` option [see below] is left empty, +# otherwise optional), +# +# * 'misc_headers' -- a dict that maps any mail header names +# to their values, specified as *string values* (optional); +# +# **Important note:** each of the *string values* mentioned +# above shall be a string which is: +# +# * (1) **either** a Jinja template name preceded with a `$:` +# (*dollar sign* followed by *colon*) marker, +# +# * (2) **or** any other string -- which *literally* specifies +# the item's value (**without** any HTML/XML escaping!). +# +# Ad (1): those Jinja templates will be used by an instance of +# `JinjaTemplateBasedRenderer` (see `n6lib.jinja_helpers` and +# the `[jinja_template_based_renderer]` config section) as the +# basis for rendering of actual values -- with the *rendering +# context* containing the `data_dict` variable being a deep copy +# of the `notice_data` dict passed in to the dispatcher [where +# *dispatcher* is a callable object obtained as the `as` target +# (`__enter__()`'s return value) of a context manager returned +# by `MailNoticesAPI.dispatcher()`]. +# +# **Beware** that HTML/XML escaping is applied **only** if the +# template name has a `.html`, `.htm` or `.xml` suffix (checked +# in a case-insensitive manner). +# +# For example templates -- see the template files in the +# `data/templates` subdirectory of the `n6lib` package source +# tree. +# +# The default value of this option seems to be quite sensible +# for most important use cases. The basic versions of the +# Jinja templates it refers to are already defined in the +# `data/templates` subdirectory of the `n6lib` package; note: +# you can customize them by creating your own template files -- +# named the same but placed in (an)other location(s) (specified +# with the `template_locations` configuration option in the +# section `[jinja_template_based_renderer]`). +;notice_key_to_lang_to_mail_components = +; { +; 'mfa_config_done': { +; 'EN': { +; 'subject': +; 'New configuration of multi-factor authentication', +; 'body': '$:mail_notice__mfa_config_done__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Nowa konfiguracja uwierzytelniania wielosk\u0142adnikowego', +; 'body': '$:mail_notice__mfa_config_done__PL.txt', +; }, +; }, +; 'mfa_config_erased': { +; 'EN': { +; 'subject': +; 'Deleted configuration of multi-factor authentication', +; 'body': '$:mail_notice__mfa_config_erased__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Usuni\u0119ta konfiguracja uwierzytelniania wielosk\u0142adnikowego', +; 'body': '$:mail_notice__mfa_config_erased__PL.txt', +; }, +; }, +; +; 'new_org_and_user_created': { +; 'EN': { +; 'subject': +; 'Welcome to the n6 system', +; 'body': '$:mail_notice__new_org_and_user_created__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Witamy w systemie n6', +; 'body': '$:mail_notice__new_org_and_user_created__PL.txt', +; }, +; }, +; +; 'org_config_update_requested': { +; 'EN': { +; 'subject': +; 'A new request to update the organization configuration', +; 'body': '$:mail_notice__org_config_update_requested__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Nowa propozycja zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_requested__PL.txt', +; }, +; }, +; 'org_config_update_applied': { +; 'EN': { +; 'subject': +; 'Acceptance of the requested update of the organization configuration', +; 'body': '$:mail_notice__org_config_update_applied__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Akceptacja zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_applied__PL.txt', +; }, +; }, +; 'org_config_update_rejected': { +; 'EN': { +; 'subject': +; 'Rejection of the requested update of the organization configuration', +; 'body': '$:mail_notice__org_config_update_rejected__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Odmowa wprowadzenia zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_rejected__PL.txt', +; }, +; }, +; +; 'password_reset_done': { +; 'EN': { +; 'subject': +; 'New log-in password', +; 'body': '$:mail_notice__password_reset_done__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Nowe has\u0142o logowania', +; 'body': '$:mail_notice__password_reset_done__PL.txt', +; }, +; }, +; 'password_reset_requested': { +; 'EN': { +; 'subject': +; 'Setting new log-in password', +; 'body': '$:mail_notice__password_reset_requested__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Ustawianie nowego has\u0142a logowania', +; 'body': '$:mail_notice__password_reset_requested__PL.txt', +; }, +; }, +; } + +# The following option specifies (using a 2-character string) +# the *default language* -- to be used when *neither* of the +# `MailNoticesAPI.dispatcher()` and `()` +# invocations has included the `lang` argument (specifying the +# desired mail notice language variant); but also when it has +# been included but its value is missing from the *notice key*- +# specific subdict of the `notice_key_to_lang_to_mail_components` +# dict (see its description above). +;default_lang = EN + +# The value of the following option, if not left empty, should +# be a text to be used as the default value of the 'sender' +# item of subdicts that define mail components (see the above +# description of the `notice_key_to_lang_to_mail_components` +# option; the remarks about `$:`-prepended *template names* +# and HTML/XML escaping apply also here). +default_sender = n6notices@example.org + +# The value of the following option, if not left empty, should +# be a Python dict literal that defines additional mail headers, +# to be used to complement (but never overwrite) the items of +# each 'misc_headers' dict (ad 'misc_headers` -- see the above +# description of the `notice_key_to_lang_to_mail_components` +# option; the remarks about `$:`-prepended *template names* and +# HTML/XML escaping apply also here). +;common_misc_headers = + + +[mail_sending_api] + +smtp_host=localhost +smtp_port=25 +;smtp_login= +;smtp_password= + + +[jinja_template_based_renderer] +############################################################################ +# This configuration section is needed only if the `from_predefined()` # +# constructor provided by `n6lib.jinja_helpers.JinjaTemplateBasedRenderer` # +# is used (note: this is also the case when `MailNoticesAPI` from the # +# `n6lib.mail_notices_api` module and/or `MailMessageBuilder` from the # +# `n6lib.mail_sending_api` module are in use). Other constructors provided # +# by `JinjaTemplateBasedRenderer` do not need any configuration at all. # +############################################################################ + +# The value of the following option should consist of (one or +# more) comma-separated template locations that will be tried, +# in the specified order, by Jinja template loaders when +# searching for templates. +# +# Each of these locations should be: +# +# * An *absolute* path of a directory (aka folder); if it makes +# use of a tilde-based home directory placeholder prefix, such +# as `~` or `~username`, the placeholder will be automatically +# expanded. +# Examples: +# /etc/n6/templates +# ~/my-own-n6-stuff/jinja-related +# ~dataman/.n6/our-custom-fancy-templates +# +# *OR* +# +# * A specification in the following format: +# @: +# where: +# * is a Python package name +# (see also: the docs of the `jinja2.PackageLoader`'s +# parameter `package_name`); +# * is a *relative* path of +# a directory (folder) in that package's source tree +# (see also: the docs of the `jinja2.PackageLoader`'s +# parameter `package_path`). +# Examples: +# @n6lib:data/templates +# @my.own.package:some-dir/sub-dir/sub-sub-dir +template_locations = @n6lib:data/templates + +# The default value ("utf-8") of the following option, should be +# OK in nearly all cases. +;template_encoding = utf-8 + +# The following option is relevant *only* to template locations +# specified as absolute paths of directories (*not* to those in +# the `@:` format). +;follow_symlinks = False + +# The value of the following option should consist of (zero or +# more) comma-separated *import names* of Jinja extensions (see: +# https://jinja.palletsprojects.com/extensions/). Typically, it +# should contain, at the minimum, the "jinja2.ext.do" name -- at +# least, as long as any of the default templates (those bundled +# with *n6*) are in use. +;jinja_extensions = jinja2.ext.do diff --git a/N6AdminPanel/n6adminpanel/app.py b/N6AdminPanel/n6adminpanel/app.py index 89ca8f9..840f2b0 100644 --- a/N6AdminPanel/n6adminpanel/app.py +++ b/N6AdminPanel/n6adminpanel/app.py @@ -1,10 +1,14 @@ -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. +import ast import os +import uuid from flask import ( Flask, + flash, request, + g, ) from flask_admin import ( Admin, @@ -13,8 +17,8 @@ ) from flask_admin.actions import ActionsMixin from flask_admin.contrib.sqla import ( - form, ModelView, + form as fa_sqla_form, ) from flask_admin.form import ( TimeField, @@ -31,42 +35,57 @@ sessionmaker, ) from wtforms import ( - PasswordField, BooleanField, + PasswordField, + StringField, ) from wtforms.fields import Field from wtforms.widgets import PasswordInput +from n6adminpanel import org_request_helpers +from n6adminpanel.mail_notices_helpers import MailNoticesMixin from n6adminpanel.patches import ( PatchedInlineModelConverter, get_patched_get_form, get_patched_init_actions, patched_populate_obj, ) +from n6lib.auth_db.api import AuthManageAPI from n6lib.auth_db.audit_log import AuditLog from n6lib.auth_db.config import SQLAuthDBConfigMixin from n6lib.auth_db.models import ( CACert, Cert, Component, - ContactPoint, CriteriaASN, CriteriaCC, CriteriaContainer, CriteriaIPNetwork, CriteriaName, + DependantEntity, EMailNotificationAddress, EMailNotificationTime, - EntityType, - ExtraId, - ExtraIdType, + Entity, + EntityASN, + EntityContactPoint, + EntityContactPointPhone, + EntityExtraId, + EntityExtraIdType, + EntityFQDN, + EntityIPNetwork, + EntitySector, InsideFilterASN, InsideFilterCC, InsideFilterFQDN, InsideFilterIPNetwork, InsideFilterURL, - LocationType, Org, + OrgConfigUpdateRequest, + OrgConfigUpdateRequestASN, + OrgConfigUpdateRequestEMailNotificationAddress, + OrgConfigUpdateRequestEMailNotificationTime, + OrgConfigUpdateRequestFQDN, + OrgConfigUpdateRequestIPNetwork, OrgGroup, RegistrationRequest, RegistrationRequestASN, @@ -79,6 +98,7 @@ SystemGroup, User, ) +from n6lib.class_helpers import attr_required from n6lib.common_helpers import ThreadLocalNamespace from n6lib.config import ConfigMixin from n6lib.const import ( @@ -89,6 +109,7 @@ get_logger, logging_configured, ) +from n6lib.mail_notices_api import MailNoticesAPI LOGGER = get_logger(__name__) @@ -185,18 +206,90 @@ def populate_obj(self, obj, name): class _PasswordFieldHandlerMixin(object): - form_extra_fields = { - 'password': CustomPasswordField(), - 'delete_password': BooleanField('Delete Password') - } + @property + def form_extra_fields(self): + sup = super(_PasswordFieldHandlerMixin, self) + from_super = getattr(sup, 'form_extra_fields', None) or {} + return dict(from_super, + password=CustomPasswordField(), + delete_password=BooleanField('Delete Password')) def on_model_change(self, form, model, is_created): + # noinspection PyUnresolvedReferences + super(_PasswordFieldHandlerMixin, self).on_model_change(form, model, is_created) if hasattr(form, 'delete_password') and form.delete_password and form.delete_password.data: model.password = None elif hasattr(form, 'password') and form.password and form.password.data: model.password = model.get_password_hash_or_none(form.password.data) +class _APIKeyFieldHandlerMixin(object): + + @property + def form_extra_fields(self): + sup = super(_APIKeyFieldHandlerMixin, self) + from_super = getattr(sup, 'form_extra_fields', None) or {} + return dict(from_super, + api_key_id=StringField(), + delete_api_key_id=BooleanField('Delete Api Key Id'), + generate_new_api_key_id=BooleanField('Generate New Api Key Id')) + + @property + def form_widget_args(self): + sup = super(_APIKeyFieldHandlerMixin, self) + from_super = getattr(sup, 'form_widget_args', None) or {} + return dict(from_super, + api_key_id={'readonly': True}, + api_key_id_modified_on={'disabled': True}) + + def on_model_change(self, form, model, is_created): + # noinspection PyUnresolvedReferences + super(_APIKeyFieldHandlerMixin, self).on_model_change(form, model, is_created) + if hasattr(form, 'generate_new_api_key_id') and (form.generate_new_api_key_id + and form.generate_new_api_key_id.data): + model.api_key_id = str(uuid.uuid4()) + elif hasattr(form, 'delete_api_key_id') and (form.delete_api_key_id + and form.delete_api_key_id.data): + model.api_key_id = None + + +class _MFAKeyBaseFieldHandlerMixin(MailNoticesMixin): + + @property + def form_extra_fields(self): + sup = super(_MFAKeyBaseFieldHandlerMixin, self) + from_super = getattr(sup, 'form_extra_fields', None) or {} + return dict(from_super, + mfa_key_base=StringField(), + delete_mfa_key_base=BooleanField('Delete Mfa Key Base')) + + @property + def form_widget_args(self): + sup = super(_MFAKeyBaseFieldHandlerMixin, self) + from_super = getattr(sup, 'form_widget_args', None) or {} + return dict(from_super, + mfa_key_base={'readonly': True}, + mfa_key_base_modified_on={'disabled': True}) + + def on_model_change(self, form, model, is_created): + # noinspection PyUnresolvedReferences + super(_MFAKeyBaseFieldHandlerMixin, self).on_model_change(form, model, is_created) + if hasattr(form, 'delete_mfa_key_base') and (form.delete_mfa_key_base + and form.delete_mfa_key_base.data): + model.mfa_key_base = None + if isinstance(model, User) and not is_created: + g.n6_user_mfa_key_base_erased = True + + def after_model_change(self, form, model, is_created): + if g.n6_user_mfa_key_base_erased: + assert isinstance(model, User) + self.try_to_send_mail_notices( + notice_key='mfa_config_erased', + user_login=model.login) + # noinspection PyUnresolvedReferences + super(_MFAKeyBaseFieldHandlerMixin, self).after_model_change(form, model, is_created) + + class _ExtraCSSMixin(object): def render(self, *args, **kwargs): @@ -205,7 +298,10 @@ def render(self, *args, **kwargs): return super(_ExtraCSSMixin, self).render(*args, **kwargs) -class UserInlineFormAdmin(_PasswordFieldHandlerMixin, InlineFormAdmin): +class UserInlineFormAdmin(_PasswordFieldHandlerMixin, + _APIKeyFieldHandlerMixin, + _MFAKeyBaseFieldHandlerMixin, + InlineFormAdmin): column_display_pk = False column_descriptions = { @@ -213,9 +309,22 @@ class UserInlineFormAdmin(_PasswordFieldHandlerMixin, InlineFormAdmin): } form_columns = [ 'id', + + 'is_blocked', 'login', + 'password', 'delete_password', + + 'mfa_key_base', + 'mfa_key_base_modified_on', + 'delete_mfa_key_base', + + 'api_key_id', + 'api_key_id_modified_on', + 'delete_api_key_id', + 'generate_new_api_key_id', + 'system_groups', 'created_certs', @@ -224,20 +333,6 @@ class UserInlineFormAdmin(_PasswordFieldHandlerMixin, InlineFormAdmin): ] -class ContactPointInlineFormAdmin(InlineFormAdmin): - - column_display_pk = False - form_columns = [ - 'id', - - 'title', - 'name', - 'surname', - 'email', - 'phone', - ] - - class NotificationTimeInlineFormAdmin(InlineFormAdmin): form_args = { @@ -314,7 +409,7 @@ class ShortTimeField(TimeField): widget = ShortTimePickerWidget() -class OrgModelConverter(form.AdminModelConverter): +class ModelWithShortTimeFieldConverter(fa_sqla_form.AdminModelConverter): @converts('Time') def convert_time(self, field_args, **extra): @@ -325,7 +420,7 @@ class OrgView(CustomWithInlineFormsModelView): # create_modal = True # edit_modal = True - model_form_converter = OrgModelConverter + model_form_converter = ModelWithShortTimeFieldConverter column_descriptions = { 'org_id': "Organization's identifier (domain name).", } @@ -343,14 +438,13 @@ class OrgView(CustomWithInlineFormsModelView): 'access_to_inside', 'access_to_threats', 'access_to_search', - 'public_entity', - 'verified', ] form_columns = [ 'org_id', 'actual_name', 'org_groups', 'users', + 'entity', 'full_access', 'stream_api_enabled', # authorization: @@ -381,16 +475,6 @@ class OrgView(CustomWithInlineFormsModelView): 'inside_filter_fqdns', 'inside_filter_ip_networks', 'inside_filter_urls', - # official data: - 'public_entity', - 'verified', - 'entity_type', - 'location_type', - 'location', - 'location_coords', - 'address', - 'extra_ids', - 'contact_points', ] form_rules = [ rules.Header('Organization basic data'), @@ -398,9 +482,9 @@ class OrgView(CustomWithInlineFormsModelView): rules.Field('actual_name'), rules.Field('full_access'), rules.Field('stream_api_enabled'), - rules.Field('org_groups'), - rules.Header('Users'), + rules.Header('Groups and users'), + rules.Field('org_groups'), rules.Field('users'), rules.Header('"Inside" access zone'), @@ -438,18 +522,8 @@ class OrgView(CustomWithInlineFormsModelView): rules.Field('inside_filter_ip_networks'), rules.Field('inside_filter_urls'), - rules.Header('Official data'), - rules.Field('public_entity'), - rules.Field('verified'), - rules.Field('entity_type'), - rules.Field('location_type'), - rules.Field('location'), - rules.Field('location_coords'), - rules.Field('address'), - rules.Field('extra_ids'), - - rules.Header('Official contact points'), - rules.Field('contact_points'), + rules.Header('Related entity'), + rules.Field('entity'), ] inline_models = [ UserInlineFormAdmin(User), @@ -460,22 +534,46 @@ class OrgView(CustomWithInlineFormsModelView): InsideFilterFQDN, InsideFilterIPNetwork, InsideFilterURL, - ExtraId, - ContactPointInlineFormAdmin(ContactPoint), ] -class RegistrationRequestView(CustomWithInlineFormsModelView): +class OrgRequestViewMixin(object): + can_create = False can_view_details = True + # essential to display PK column in the "list" view column_display_pk = True - column_searchable_list = ['status', 'org_id', 'actual_name', 'email'] + + # to be set in subclasses to one of the handler kits + # defined in `n6adminpanel.org_request_helpers` + org_request_handler_kit = None + + @attr_required('org_request_handler_kit') + def on_model_change(self, form, model, is_created): + assert not is_created, "isn't `can_create` set to False?!" + # (The handler called here makes use of `ACTIONS_FIELD...`) + self.org_request_handler_kit.just_before_commit(form, model) + # noinspection PyUnresolvedReferences + return super(OrgRequestViewMixin, self).on_model_change(form, model, is_created) + + @attr_required('org_request_handler_kit') + def after_model_change(self, form, model, is_created): + assert not is_created, "isn't `can_create` set to False?!" + self.org_request_handler_kit.just_after_commit(model) + # noinspection PyUnresolvedReferences + return super(OrgRequestViewMixin, self).after_model_change(form, model, is_created) + + +class RegistrationRequestView(OrgRequestViewMixin, CustomWithInlineFormsModelView): + + column_searchable_list = ['status', 'ticket_id', 'org_id', 'actual_name', 'email'] column_list = [ 'id', 'submitted_on', 'modified_on', 'status', + 'ticket_id', 'org_id', 'actual_name', @@ -484,15 +582,27 @@ class RegistrationRequestView(CustomWithInlineFormsModelView): 'email_notification_language', ] + column_descriptions = { + 'terms_version': 'The version of the legal terms accepted by the client.', + 'terms_lang': 'The language variant of the legal terms accepted by the client.', + } + + form_extra_fields = { + org_request_helpers.ACTIONS_FIELD_NAME: + org_request_helpers.ACTIONS_FIELD_FOR_REGISTRATION, + } form_columns = [ 'id', 'submitted_on', 'modified_on', 'status', + 'ticket_id', + 'org_group', + org_request_helpers.ACTIONS_FIELD_NAME, + 'org_id', 'actual_name', - 'email', 'submitter_title', 'submitter_firstname_and_surname', @@ -504,14 +614,32 @@ class RegistrationRequestView(CustomWithInlineFormsModelView): 'asns', 'fqdns', 'ip_networks', + + 'terms_version', + 'terms_lang', ] + form_widget_args = { + # Let it be visible but inactive. (State changes + # can be made only with the custom buttons which + # fill out the target-status-dedicated invisible + # input; that input and those buttons are provided + # by `org_request_helpers.ACTIONS_FIELD...` + # -- see `form_extra_fields` below.) + 'status': {'disabled': True}, + + 'terms_version': {'readonly': True}, + 'terms_lang': {'readonly': True}, + } form_rules = [ - rules.Header('Basic data'), + rules.Header('Registration request consideration'), rules.Field('status'), + rules.Field('ticket_id'), + rules.Field('org_group'), + rules.Field(org_request_helpers.ACTIONS_FIELD_NAME), + + rules.Header('Basic and access-related data'), rules.Field('org_id'), rules.Field('actual_name'), - - rules.Header('Contact data and access-related stuff'), rules.Field('email'), rules.Field('submitter_title'), rules.Field('submitter_firstname_and_surname'), @@ -525,7 +653,12 @@ class RegistrationRequestView(CustomWithInlineFormsModelView): rules.Header('E-mail notifications preferences'), rules.Field('email_notification_language'), rules.Field('email_notification_addresses'), + + rules.Header('Legal information'), + rules.Field('terms_version'), + rules.Field('terms_lang'), ] + org_request_handler_kit = org_request_helpers.registration_request_handler_kit inline_models = [ RegistrationRequestEMailNotificationAddress, RegistrationRequestASN, @@ -534,19 +667,140 @@ class RegistrationRequestView(CustomWithInlineFormsModelView): ] +class OrgConfigUpdateRequestView(OrgRequestViewMixin, CustomWithInlineFormsModelView): + + column_searchable_list = ['status', 'ticket_id', 'org_id'] + column_list = [ + 'id', + 'submitted_on', + 'modified_on', + 'status', + 'ticket_id', + 'org_id', + ] + + form_extra_fields = { + org_request_helpers.ACTIONS_FIELD_NAME: + org_request_helpers.ACTIONS_FIELD_FOR_ORG_CONFIG_UPDATE, + } + form_columns = [ + 'id', + 'submitted_on', + 'modified_on', + + 'status', + 'ticket_id', + 'org_id', + 'requesting_user_login', + 'additional_comment', + org_request_helpers.ACTIONS_FIELD_NAME, -class UserView(_PasswordFieldHandlerMixin, N6ModelView): + 'actual_name_upd', + 'actual_name', + + 'email_notification_enabled_upd', + 'email_notification_enabled', + + 'email_notification_language_upd', + 'email_notification_language', + + 'email_notification_addresses_upd', + 'email_notification_addresses', + + 'email_notification_times_upd', + 'email_notification_times', + + 'asns_upd', + 'asns', + + 'fqdns_upd', + 'fqdns', + + 'ip_networks_upd', + 'ip_networks', + ] + form_widget_args = { + # Let it be visible but inactive. (State changes + # can be made only with the custom buttons which + # fill out the target-status-dedicated invisible + # input; that input and those buttons are provided + # by `org_request_helpers.ACTIONS_FIELD...` + # -- see `form_extra_fields` below.) + 'status': {'disabled': True}, + + 'org_id': {'readonly': True}, + 'requesting_user_login': {'readonly': True}, + 'additional_comment': {'readonly': True}, + } + form_rules = [ + rules.Header('Org config update request consideration'), + rules.Field('status'), + rules.Field('ticket_id'), + rules.Field('org_id'), + rules.Field('requesting_user_login'), + rules.Field('additional_comment'), + rules.Field(org_request_helpers.ACTIONS_FIELD_NAME), + + rules.Header('Updates of basic data'), + rules.Field('actual_name_upd'), + rules.Field('actual_name'), + + rules.Header('Updates of "Inside" event criteria'), + rules.Field('asns_upd'), + rules.Field('asns'), + rules.Field('fqdns_upd'), + rules.Field('fqdns'), + rules.Field('ip_networks_upd'), + rules.Field('ip_networks'), + + rules.Header('Updates of e-mail notifications preferences'), + rules.Field('email_notification_enabled_upd'), + rules.Field('email_notification_enabled'), + rules.Field('email_notification_language_upd'), + rules.Field('email_notification_language'), + rules.Field('email_notification_addresses_upd'), + rules.Field('email_notification_addresses'), + rules.Field('email_notification_times_upd'), + rules.Field('email_notification_times'), + ] + org_request_handler_kit = org_request_helpers.org_config_update_request_handler_kit + inline_models = [ + OrgConfigUpdateRequestEMailNotificationAddress, + OrgConfigUpdateRequestEMailNotificationTime, + OrgConfigUpdateRequestASN, + OrgConfigUpdateRequestFQDN, + OrgConfigUpdateRequestIPNetwork, + ] + + +class UserView(_PasswordFieldHandlerMixin, + _APIKeyFieldHandlerMixin, + _MFAKeyBaseFieldHandlerMixin, + N6ModelView): column_descriptions = { 'login': 'User\'s login (e-mail address).', } column_list = ['login', 'org', 'system_groups'] form_columns = [ + 'is_blocked', 'login', + 'password', 'delete_password', + + 'mfa_key_base', + 'mfa_key_base_modified_on', + 'delete_mfa_key_base', + + 'api_key_id', + 'api_key_id_modified_on', + 'delete_api_key_id', + 'generate_new_api_key_id', + 'org', 'system_groups', + 'created_certs', 'owned_certs', 'revoked_certs', @@ -657,6 +911,114 @@ class CACertView(_ExtraCSSMixin, CustomColumnListView): ] +class EntityContactPointPhoneInlineFormAdmin(InlineFormAdmin): + + form_columns = [ + 'id', + 'phone_number', + 'availability', + ] + + +class EntityContactPointInlineFormAdmin(CustomWithInlineFormsModelView): + + form_columns = [ + 'id', + + 'name', + 'position', + 'email', + + 'phones', + + 'external_placement', + 'external_entity_name', + 'external_entity_address', + ] + inline_models = [ + EntityContactPointPhoneInlineFormAdmin(EntityContactPointPhone), + ] + + +class EntityView(CustomWithInlineFormsModelView): + + model_form_converter = ModelWithShortTimeFieldConverter + can_view_details = True + # essential to display PK column in the "list" view + column_display_pk = True + column_searchable_list = [ + 'full_name', 'short_name', 'email', 'city', 'sector_label', 'ticket_id', + ] + column_list = [ + 'full_name', 'short_name', 'email', 'city', 'sector_label', 'ticket_id', + ] + form_columns = [ + # official data: + 'id', + 'full_name', + 'short_name', + 'verified', + 'email', + 'address', + 'city', + 'postal_code', + 'public_essential_service', + 'sector', + 'ticket_id', + 'internal_id', + 'extra_ids', + 'additional_information', + 'asns', + 'fqdns', + 'ip_networks', + 'alert_email', + 'contact_points', + 'dependant_entities', + 'org', + ] + form_rules = [ + rules.Header('Basic data'), + rules.Field('full_name'), + rules.Field('short_name'), + rules.Field('verified'), + rules.Field('email'), + rules.Field('address'), + rules.Field('city'), + rules.Field('postal_code'), + + rules.Header('Supplementary data'), + rules.Field('public_essential_service'), + rules.Field('sector'), + rules.Field('ticket_id'), + rules.Field('internal_id'), + rules.Field('extra_ids'), + rules.Field('additional_information'), + + rules.Header('Own network data'), + rules.Field('asns'), + rules.Field('fqdns'), + rules.Field('ip_networks'), + + rules.Header('Contact data'), + rules.Field('alert_email'), + rules.Field('contact_points'), + + rules.Header('Dependant entities'), + rules.Field('dependant_entities'), + + rules.Header('Related n6 client Org'), + rules.Field('org'), + ] + inline_models = [ + EntityExtraId, + EntityASN, + EntityFQDN, + EntityIPNetwork, + EntityContactPointInlineFormAdmin(EntityContactPoint, db_session), + DependantEntity, + ] + + class CustomIndexView(AdminIndexView): @expose('/') @@ -664,6 +1026,27 @@ def index(self): return self.render('home.html') +class _AuthManageAPIAdapter(AuthManageAPI): + + class _DBConnectorReplacement(object): + def get_current_session(self): + return db_session() + def __enter__(self): pass + def __exit__(self, exc_type, exc_value, tb): pass + def set_audit_log_external_meta_items(self, **_): + raise AssertionError('method invocation not expected') + + # noinspection PyMissingConstructor + def __init__(self): + self._db_connector = self._DBConnectorReplacement() + + def _try_to_get_client_error(self, *args, **kwargs): + err = super(_AuthManageAPIAdapter, self)._try_to_get_client_error(*args, **kwargs) + if err is not None: + flash(err.public_message, 'error') + return err + + class AdminPanel(ConfigMixin): config_spec = ''' @@ -675,6 +1058,7 @@ class AdminPanel(ConfigMixin): engine_config_prefix = '' table_views = [ (Org, OrgView), + (OrgConfigUpdateRequest, OrgConfigUpdateRequestView), (User, UserView), (Source, SourceView), (Subsource, CustomColumnAutoPKView), @@ -685,13 +1069,15 @@ class AdminPanel(ConfigMixin): (Component, ComponentView), (CACert, CACertView), (Cert, CertView), - (EntityType, CustomColumnListView), - (LocationType, CustomColumnListView), - (ExtraIdType, CustomColumnListView), (RegistrationRequest, RegistrationRequestView), + (Entity, EntityView), + (EntitySector, CustomColumnListView), + (EntityExtraIdType, CustomColumnListView), ] def __init__(self, engine): + self._mail_notices_api = MailNoticesAPI() + self._auth_manage_api_adapter = _AuthManageAPIAdapter() self.app_config = self.get_config_section() self.app = Flask(__name__) self.app.secret_key = self.app_config['app_secret_key'] @@ -725,6 +1111,12 @@ def _before_request(self): ('request_user_id', request.environ.get(WSGI_SSL_USER_ID_FIELD)), ] if value is not None} + # Attributes used by the `org_request_helpers` and/or `mail_notices_helpers` stuff: + g.n6_mail_notices_api = self._mail_notices_api + g.n6_auth_manage_api_adapter = self._auth_manage_api_adapter + g.n6_org_config_info = None + # Attributes used by the `_MFAKeyBaseFieldHandlerMixin` stuff: + g.n6_user_mfa_key_base_erased = False @staticmethod def _teardown_request(exception=None): @@ -736,7 +1128,7 @@ def _populate_views(self): def monkey_patch_flask_admin(): - setattr(form, 'get_form', get_patched_get_form(form.get_form)) + setattr(fa_sqla_form, 'get_form', get_patched_get_form(fa_sqla_form.get_form)) setattr(Field, 'populate_obj', patched_populate_obj) setattr(ActionsMixin, 'init_actions', get_patched_init_actions(ActionsMixin.init_actions)) @@ -756,10 +1148,22 @@ def get_app(): return admin_panel.app -if __name__ == '__main__': - # run admin panel on development server - # (note: you can set FLASK_ENV to '' -- to turn off the - # development-specific stuff, such as debug messages...) +def dev_server_main(): + """ + Run the n6 Admin Panel using the Flask development server. + + (*Not* for production!) + """ + # (note: you can set the FLASK_ENV environment variable to '' -- to + # turn off the development-specific stuff, such as debug messages...) os.environ.setdefault('FLASK_ENV', 'development') a = get_app() - a.run() + # (note: you can set the N6_ADMIN_PANEL_DEV_RUN_KWARGS environment + # variable to customize the keyword arguments to be passed to the + # application's `run()` method, e.g.: '{"host": "0.0.0.0"}') + run_kwargs = ast.literal_eval(os.environ.get('N6_ADMIN_PANEL_DEV_RUN_KWARGS', '{}')) + a.run(**run_kwargs) + + +if __name__ == '__main__': + dev_server_main() diff --git a/N6AdminPanel/n6adminpanel/mail_notices_helpers.py b/N6AdminPanel/n6adminpanel/mail_notices_helpers.py new file mode 100644 index 0000000..43874a4 --- /dev/null +++ b/N6AdminPanel/n6adminpanel/mail_notices_helpers.py @@ -0,0 +1,60 @@ +# Copyright (c) 2021 NASK. All rights reserved. + +from typing import ( + Iterable, + Union, +) + +from flask import ( + flash, + g, +) + +from n6lib.common_helpers import ascii_str + + +class MailNoticesMixin(object): + + def try_to_send_mail_notices(self, notice_key, **get_notice_data_kwargs): + if not g.n6_mail_notices_api.is_active(notice_key): + msg = ('No e-mail notices sent as they are not configured ' + 'for notice_key={!a}.'.format(ascii_str(notice_key))) + flash(msg, 'warning') + return + notice_data = self.get_notice_data(**get_notice_data_kwargs) + notice_lang = self.get_notice_lang(notice_data) + assert notice_lang is None or isinstance(notice_lang, str) and len(notice_lang) == 2 + notice_recipients = list(self.get_notice_recipients(notice_data)) + gathered_ok_recipients = [] + with g.n6_mail_notices_api.dispatcher(notice_key, + suppress_and_log_smtp_exc=True) as dispatch: + for email in notice_recipients: + ok_recipients, _ = dispatch(email, notice_data, notice_lang) + if ok_recipients: + gathered_ok_recipients.extend(ok_recipients) + else: + msg = 'Failed to send an e-mail notice to {}!'.format(ascii_str(email)) + flash(msg, 'warning') + if gathered_ok_recipients: + recipients_str = ', '.join(map(ascii_str, gathered_ok_recipients)) + flash('E-mail notices sent to: {}.'.format(recipients_str)) + else: + flash('No e-mail notices could be sent!', 'error') + + # (The following hooks can be overridden in subclasses.) + + def get_notice_data(self, user_login): + # type: (...) -> dict + with g.n6_auth_manage_api_adapter as api: + user_and_org_basic_info = api.get_user_and_org_basic_info(user_login) + return dict( + user_and_org_basic_info, + user_login=user_login) + + def get_notice_lang(self, notice_data): + # type: (dict) -> Union[str, None] + return notice_data['lang'] + + def get_notice_recipients(self, notice_data): + # type: (dict) -> Iterable[str] + return [notice_data['user_login']] diff --git a/N6AdminPanel/n6adminpanel/org_request_helpers.py b/N6AdminPanel/n6adminpanel/org_request_helpers.py new file mode 100644 index 0000000..05b7ad5 --- /dev/null +++ b/N6AdminPanel/n6adminpanel/org_request_helpers.py @@ -0,0 +1,629 @@ +# Copyright (c) 2020-2021 NASK. All rights reserved. + +import html +import re +import string +from typing import Union + +from flask import ( + flash, + g, +) +from sqlalchemy import inspect as sqla_inspect +from wtforms import StringField +from wtforms.widgets import ( + HTMLString, + TextInput, +) + +import n6lib.auth_db.models as models +from n6adminpanel.mail_notices_helpers import MailNoticesMixin +from n6lib.auth_db import ( + ORG_REQUEST_STATUS_ACCEPTED as _STATUS_ACCEPTED, + ORG_REQUEST_STATUS_DISCARDED as _STATUS_DISCARDED, + ORG_REQUEST_STATUS_BEING_PROCESSED as _STATUS_BEING_PROCESSED, + ORG_REQUEST_STATUS_NEW as _STATUS_NEW, +) +from n6lib.common_helpers import ascii_str +from n6lib.log_helpers import get_logger + + +# +# Non-public stuff (to be used in this module only) +# + +LOGGER = get_logger(__name__) + + +_ID_NAME_OF_TARGET_STATUS_INPUT = 'org_request_form_input_target_status' + +_ID_NAME_OF_ACCEPT_BUTTON = 'org_request_form_button_accept' +_ID_NAME_OF_PROC_BUTTON = 'org_request_form_button_proc' +_ID_NAME_OF_DISCARD_BUTTON = 'org_request_form_button_discard' + + +_HTML_AND_JS_SAFE_CONSTANTS = { + constant_name: globals()[constant_name] + for constant_name in [ + '_ID_NAME_OF_TARGET_STATUS_INPUT', + '_ID_NAME_OF_ACCEPT_BUTTON', + '_ID_NAME_OF_PROC_BUTTON', + '_ID_NAME_OF_DISCARD_BUTTON', + '_STATUS_ACCEPTED', + '_STATUS_DISCARDED', + '_STATUS_BEING_PROCESSED', + '_STATUS_NEW', + ]} + +_OBVIOUSLY_SAFE_CHARACTERS_SUBSET = frozenset(string.ascii_letters + '_') +for _constant_value in _HTML_AND_JS_SAFE_CONSTANTS.values(): + # To make some details of the implementation simpler, + # we guarantee that these string constants consist only + # of HTML-and-JS-safe characters -- so that there will + # be no need to worry about escaping-related matters. + assert _constant_value == html.escape(_constant_value) + assert _OBVIOUSLY_SAFE_CHARACTERS_SUBSET.issuperset(_constant_value) + + +class _OrgRequestActionsWidget(TextInput): + + # Note: we consciously use `TextInput`, and *not* `HiddenInput`, + # as the base class. One reason is that, although our text input + # needs to be invisible, some other visible elements (buttons) + # are needed; another reason is that widgets whose `input_type` is + # "hidden" do not cooperate well with `rules.Field`: the problem + # with them is that the `render_form_fields` Jinja macro (from the + # Bootstrap template provided by Flask-Admin) renders such fields + # twice (redundantly): once near the beginning of the form and + # then within a `div` corresponding to the `rules.Field` instance + # (such redundancy seems a bug). That's why we prefer to base on a + # widget whose `input_type` is "text", and simply to place the input + # text element in a `div` with the `display:none` style property. + # + # The whole rendered HTML includes also other necessary elements: + # the "Accept...", "Mark as being processed" and "Mark as + # discarded" buttons, and the JS script that sets things up. + + __PATTERN_OF_BUTTON_HTML = ''' + ''' + + __PATTERN_OF_INVISIBLE_DIV_HTML = ''' +
+ {} +
''' + + __SCRIPT_HTML = ''' + ''' % _HTML_AND_JS_SAFE_CONSTANTS + + def __init__(self, *args, **kwargs): + self.__accept_button_text = kwargs.pop('accept_button_text') + self.__proc_button_text = kwargs.pop('proc_button_text', 'Mark as being processed') + self.__discard_button_text = kwargs.pop('discard_button_text', 'Mark as discarded') + super(_OrgRequestActionsWidget, self).__init__(*args, **kwargs) + + def __call__(self, *args, **kwargs): + target_status_input_html = super(_OrgRequestActionsWidget, self).__call__(*args, **kwargs) + self.__verify_id_and_name_have_supported_values(target_status_input_html) + all_html = self.__assemble_all_html( + target_status_input_html, + accept_button_html=self.__make_accept_button_html(), + proc_button_html=self.__make_proc_button_html(), + discard_button_html=self.__make_discard_button_html()) + return HTMLString(all_html) + + def __verify_id_and_name_have_supported_values(self, target_status_input_html): + # The `id` and `name` of the target-status-dedicated + # text input are required to be equal to the constant + # `_ID_NAME_OF_TARGET_STATUS_INPUT`. We constraint them + # in this way just for simplicity of the implementation + # (something more general is not needed here). + id_values = self.__extract_html_attr_values(target_status_input_html, 'id') + name_values = self.__extract_html_attr_values(target_status_input_html, 'name') + all_ok = (id_values and all(v == _ID_NAME_OF_TARGET_STATUS_INPUT for v in id_values) and + name_values and all(v == _ID_NAME_OF_TARGET_STATUS_INPUT for v in name_values)) + if not all_ok: + raise AssertionError( + 'both `id` and `name` of the text input rendered with ' + '{} are expected to be equal to {!a} (detected values ' + '- `id`: {}; `name`: {})'.format( + self.__class__.__qualname__, + _ID_NAME_OF_TARGET_STATUS_INPUT, + ', '.join(map(ascii, id_values)) or '', + ', '.join(map(ascii, name_values)) or '')) + + def __extract_html_attr_values(self, element_html, attr_name): + """ + >>> inst = _OrgRequestActionsWidget(accept_button_text='Accept...') + >>> this_method = inst._OrgRequestActionsWidget__extract_html_attr_values + >>> element_html = ''' + ... + ... ''' + >>> this_method(element_html, attr_name='id') + ['foo', 'Good'] + >>> this_method(element_html, attr_name='name') + ['42', 'y'] + >>> this_method(element_html, attr_name='spam') + ['ham'] + >>> this_method(element_html, attr_name='foo') + [] + """ + regex_pattern = r'\b{0}="([^"]*)"'.format(attr_name) + return re.findall(regex_pattern, element_html, re.ASCII) + + def __make_accept_button_html(self): + return self.__format_button_html( + id_and_name=_ID_NAME_OF_ACCEPT_BUTTON, + button_text=self.__accept_button_text, + button_class='btn-primary') + + def __make_proc_button_html(self): + return self.__format_button_html( + id_and_name=_ID_NAME_OF_PROC_BUTTON, + button_text=self.__proc_button_text, + button_class='btn-default') + + def __make_discard_button_html(self): + return self.__format_button_html( + id_and_name=_ID_NAME_OF_DISCARD_BUTTON, + button_text=self.__discard_button_text, + button_class='btn-danger') + + def __format_button_html(self, **kwargs): + return self.__PATTERN_OF_BUTTON_HTML.format(**{ + key: html.escape(value) + for key, value in kwargs.items()}) + + def __assemble_all_html(self, + target_status_input_html, + accept_button_html, + proc_button_html, + discard_button_html): + return (accept_button_html + + proc_button_html + + discard_button_html + + self.__PATTERN_OF_INVISIBLE_DIV_HTML.format(target_status_input_html) + + self.__SCRIPT_HTML) + + +class _BaseStatusTransitionHandlerKit(MailNoticesMixin): + + # + # Public methods + + def just_before_commit(self, form, org_request): + assert isinstance(org_request, (models.RegistrationRequest, + models.OrgConfigUpdateRequest)) + + org_request._successful_status_transition = None + + # Note: we get the old value of the 'status' attribute from the + # backend (using the SQLAlchemy's *history* facility) because + # we *must not* trust the old value provided by the frontend + # (even though, typically, they are the same, because the + # visible 'status' input element of the form is non-editable). + old_status = self._get_old_value_of_scalar_attr(org_request, 'status') + assert old_status, "isn't view's `can_create` set to False?!" + + # Note: we get the new (target) value of 'status' from the form's + # invisible input element whose name is defined by the constant + # `_ID_NAME_OF_TARGET_STATUS_INPUT`, *not* from the visible input + # element whose name is 'status' (the latter typically keeps the + # old value that we do *not* use -- see the comment above...). + target_status = form[_ID_NAME_OF_TARGET_STATUS_INPUT].data + + if target_status: + # Status transition is to be performed... + self._before_status_transition(org_request, old_status, target_status) + org_request.status = target_status + org_request._successful_status_transition = (old_status, + target_status, + org_request.org_id) + else: + # The old status is to be kept. To be sure that it is *not* + # overwritten with a different value from the frontend (that is, + # from the visible 'status' input element of the form -- see the + # comments above) let us explicitly set the real old value here. + org_request.status = old_status + + # noinspection PyProtectedMember + def just_after_commit(self, org_request): + assert isinstance(org_request, (models.RegistrationRequest, + models.OrgConfigUpdateRequest)) + if org_request._successful_status_transition: + (old_status, + target_status, + concerned_org_id) = org_request._successful_status_transition + self._after_status_transition(org_request, + old_status, + target_status, + concerned_org_id) + + # + # Non-public methods + + def __setattr__(self, name, value): + raise TypeError( + '{!a} should be treated as an immutable object (cannot set ' + 'the `{!a}` attribute to `{!a}'.format(self, name, value)) + + def _get_sqla_session(self, org_request): + return sqla_inspect(org_request).session + + def _get_old_value_of_scalar_attr(self, org_request, attr_name): + deleted_or_unchanged = sqla_inspect(org_request).attrs[attr_name].history.non_added() + if deleted_or_unchanged: + [old_value] = deleted_or_unchanged + return old_value + return None + + def _before_status_transition(self, org_request, old_status, target_status): + _TARGET_STATUS_TO_HANDLER = { + _STATUS_NEW: self._before_status_transition_to_new, + _STATUS_BEING_PROCESSED: self._before_status_transition_to_being_processed, + _STATUS_DISCARDED: self._before_status_transition_to_discarded, + _STATUS_ACCEPTED: self._before_status_transition_to_accepted, + } + try: + handler = _TARGET_STATUS_TO_HANDLER[target_status] + except KeyError: + raise ValueError('Illegal status tag: "{}".'.format(target_status)) + else: + # noinspection PyArgumentList + handler(org_request, old_status, target_status) + + # noinspection PyUnusedLocal + def _before_status_transition_to_new(self, org_request, old_status, target_status): + assert target_status == _STATUS_NEW + self._validate_status_transition(old_status, target_status, + legal_old_statuses=()) # (yes, it'll always fail here) + + # noinspection PyUnusedLocal + def _before_status_transition_to_being_processed(self, org_request, old_status, target_status): + assert target_status == _STATUS_BEING_PROCESSED + self._validate_status_transition(old_status, target_status, + legal_old_statuses=(_STATUS_NEW, _STATUS_DISCARDED)) + + # noinspection PyUnusedLocal + def _before_status_transition_to_discarded(self, org_request, old_status, target_status): + assert target_status == _STATUS_DISCARDED + self._validate_status_transition(old_status, target_status, + legal_old_statuses=(_STATUS_NEW, _STATUS_BEING_PROCESSED)) + + # noinspection PyUnusedLocal + def _before_status_transition_to_accepted(self, org_request, old_status, target_status): + assert target_status == _STATUS_ACCEPTED + self._validate_status_transition(old_status, target_status, + legal_old_statuses=(_STATUS_NEW, _STATUS_BEING_PROCESSED)) + + def _validate_status_transition(self, old_status, target_status, legal_old_statuses): + if old_status not in legal_old_statuses: + raise ValueError( + 'Changing status from "{}" to "{}" is not allowed.'.format( + old_status, + target_status)) + if __debug__: + # Assertions regarding conditions whose veracity has already been guaranteed: + assert old_status in (_STATUS_NEW, _STATUS_BEING_PROCESSED, _STATUS_DISCARDED) + assert target_status in (_STATUS_BEING_PROCESSED, _STATUS_DISCARDED, _STATUS_ACCEPTED) + if old_status == _STATUS_NEW: + assert target_status in (_STATUS_BEING_PROCESSED, _STATUS_DISCARDED, _STATUS_ACCEPTED) + elif old_status == _STATUS_BEING_PROCESSED: + assert target_status in (_STATUS_DISCARDED, _STATUS_ACCEPTED) + elif old_status == _STATUS_DISCARDED: + assert target_status == _STATUS_BEING_PROCESSED + + def _after_status_transition(self, + org_request, + old_status, + target_status, + concerned_org_id): + if target_status == _STATUS_ACCEPTED: + # Note: for the sake of strictness, we consciously + # use `concerned_org_id`, *not* the current value of + # `org_request.org_id` -- because we want to be sure we + # report the value it obtained within the transaction + # (which has already finished), and the current value + # may not be the same (even though typically is). + self._after_status_transition_to_accepted( + org_request, + old_status, + target_status, + concerned_org_id) + else: + self._after_status_transition_to_other( + org_request, + old_status, + target_status, + concerned_org_id) + + def _after_status_transition_to_accepted(self, + org_request, + old_status, + target_status, + concerned_org_id): + raise NotImplementedError + + def _after_status_transition_to_other(self, + org_request, + old_status, + target_status, + concerned_org_id): + raise NotImplementedError + + +class _RegistrationRequestStatusTransitionHandlerKit(_BaseStatusTransitionHandlerKit): + + def _before_status_transition_to_accepted(self, org_request, old_status, target_status): + assert isinstance(org_request, models.RegistrationRequest) + super(_RegistrationRequestStatusTransitionHandlerKit, + self)._before_status_transition_to_accepted(org_request, old_status, target_status) + session = self._get_sqla_session(org_request) + self._verify_org_group_specified(org_request) + self._verify_org_does_not_exist(session, org_request) + self._create_org_according_to_request(session, org_request) + + def _verify_org_group_specified(self, org_request): + assert isinstance(org_request, models.RegistrationRequest) + if org_request.org_group is None: + raise ValueError( + 'Acceptation of a registration request cannot be ' + 'done when its `Org Group` field is unspecified.') + assert isinstance(org_request.org_group, models.OrgGroup) + assert sqla_inspect(org_request.org_group).persistent + + def _verify_org_does_not_exist(self, session, org_request): + assert isinstance(org_request, models.RegistrationRequest) + if session.query(models.Org).get(org_request.org_id) is not None: + raise ValueError( + 'Organization "{}" already exists.'.format(org_request.org_id)) + + def _create_org_according_to_request(self, session, org_request): + assert isinstance(org_request, models.RegistrationRequest) + assert org_request.id is not None + assert org_request.org_id is not None + with g.n6_auth_manage_api_adapter as api: + api.create_org_and_user_according_to_registration_request(req_id=org_request.id) + created_org = session.query(models.Org).get(org_request.org_id) + assert isinstance(created_org, models.Org) + assert (len(created_org.users) == 1 + and isinstance(created_org.users[0], models.User)) + assert created_org.org_id == org_request.org_id + assert created_org.users[0].login == org_request.email + + def _after_status_transition_to_accepted(self, + org_request, + old_status, + target_status, + concerned_org_id): + assert isinstance(org_request, models.RegistrationRequest) + flash('Registration request accepted. Organization ' + '"{}" created.'.format(concerned_org_id)) + LOGGER.info('Successfully changed status of %a - from %a to %a. ' + 'Successfully added organization %a.', + org_request, + ascii_str(old_status), + ascii_str(target_status), + ascii_str(concerned_org_id)) + self.try_to_send_mail_notices(notice_key='new_org_and_user_created', + user_login=org_request.email) + + def _after_status_transition_to_other(self, + org_request, + old_status, + target_status, + concerned_org_id): + assert isinstance(org_request, models.RegistrationRequest) + flash('Status of the registration request changed from ' + '"{}" to "{}".'.format(old_status, target_status)) + LOGGER.info('Successfully changed status of %a - from %a to %a', + org_request, + ascii_str(old_status), + ascii_str(target_status)) + + +class _OrgConfigUpdateRequestStatusTransitionHandlerKit(_BaseStatusTransitionHandlerKit): + + def _before_status_transition_to_being_processed(self, org_request, old_status, target_status): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + super(_OrgConfigUpdateRequestStatusTransitionHandlerKit, + self)._before_status_transition_to_being_processed(org_request, + old_status, + target_status) + self._verify_no_other_pending_update_request(org_request) + + def _verify_no_other_pending_update_request(self, org_request): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + pending_update_request = org_request.org.pending_config_update_request + if pending_update_request is not None and pending_update_request is not org_request: + raise ValueError( + 'An organization config update request cannot be ' + 'made the pending one (by switching its status to ' + '"being processed") when another config update ' + 'request related to the same organization is already ' + 'the pending one (i.e., has its status set to "new" ' + 'or "being processed").') + + def _before_status_transition_to_discarded(self, org_request, old_status, target_status): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + super(_OrgConfigUpdateRequestStatusTransitionHandlerKit, + self)._before_status_transition_to_discarded(org_request, + old_status, + target_status) + self._verify_update_request_is_the_pending_one(org_request) + self._remember_org_config_info(org_request) + + def _before_status_transition_to_accepted(self, org_request, old_status, target_status): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + super(_OrgConfigUpdateRequestStatusTransitionHandlerKit, + self)._before_status_transition_to_accepted(org_request, + old_status, + target_status) + self._verify_update_request_is_the_pending_one(org_request) + self._remember_org_config_info(org_request) + self._update_org_according_to_request(org_request) + + def _verify_update_request_is_the_pending_one(self, org_request): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + if org_request.org.pending_config_update_request is not org_request: + raise AssertionError( + 'The active organization config update request {!a} ' + 'is not the currently pending update request of the ' + '{!a} organization!'.format(org_request, + org_request.org)) + + def _remember_org_config_info(self, org_request): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + with g.n6_auth_manage_api_adapter as api: + g.n6_org_config_info = oc_info = api.get_org_config_info(org_id=org_request.org_id) + assert oc_info.get('update_info') is not None + + def _update_org_according_to_request(self, org_request): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + assert org_request.id is not None + with g.n6_auth_manage_api_adapter as api: + api.update_org_according_to_org_config_update_request(req_id=org_request.id) + + def _after_status_transition_to_accepted(self, + org_request, + old_status, + target_status, + concerned_org_id): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + flash('Organization config update request accepted. ' + 'Organization "{}" updated.'.format(concerned_org_id)) + LOGGER.info('Successfully changed status of %a - from %a to %a. ' + 'Successfully updated organization %a.', + org_request, + ascii_str(old_status), + ascii_str(target_status), + ascii_str(concerned_org_id)) + assert org_request.id is not None + assert g.n6_org_config_info is not None + assert g.n6_org_config_info['org_id'] == concerned_org_id + self.try_to_send_mail_notices(notice_key='org_config_update_applied', + req_id=org_request.id) + + def _after_status_transition_to_other(self, + org_request, + old_status, + target_status, + concerned_org_id): + assert isinstance(org_request, models.OrgConfigUpdateRequest) + flash('Status of the organization config update request changed ' + 'from "{}" to "{}".'.format(old_status, target_status)) + LOGGER.info('Successfully changed status of %a - from %a to %a', + org_request, + ascii_str(old_status), + ascii_str(target_status)) + if target_status == _STATUS_DISCARDED: + assert org_request.id is not None + assert g.n6_org_config_info is not None + assert g.n6_org_config_info['org_id'] == concerned_org_id + self.try_to_send_mail_notices(notice_key='org_config_update_rejected', + req_id=org_request.id) + + def get_notice_data(self, req_id): + # type: (...) -> dict + notice_data = g.n6_org_config_info + notice_data['update_info']['update_request_id'] = req_id + return notice_data + + def get_notice_lang(self, notice_data): + # type: (dict) -> Union[str, None] + return notice_data['notification_language'] # TODO?: separate per-user setting?... + + def get_notice_recipients(self, notice_data): + with g.n6_auth_manage_api_adapter as api: + return api.get_org_user_logins(org_id=notice_data['org_id']) + + +# +# Public stuff (used in `n6adminpanel.app`) +# + +ACTIONS_FIELD_NAME = _ID_NAME_OF_TARGET_STATUS_INPUT + +ACTIONS_FIELD_FOR_REGISTRATION = StringField( + label='', + id=_ID_NAME_OF_TARGET_STATUS_INPUT, + widget=_OrgRequestActionsWidget( + accept_button_text='Accept and create the organization')) + +ACTIONS_FIELD_FOR_ORG_CONFIG_UPDATE = StringField( + label='', + id=_ID_NAME_OF_TARGET_STATUS_INPUT, + widget=_OrgRequestActionsWidget( + accept_button_text='Accept and apply the organization config update', + discard_button_text='Reject the organization config update')) + +registration_request_handler_kit = _RegistrationRequestStatusTransitionHandlerKit() +org_config_update_request_handler_kit = _OrgConfigUpdateRequestStatusTransitionHandlerKit() diff --git a/N6AdminPanel/n6adminpanel/patches.py b/N6AdminPanel/n6adminpanel/patches.py index 0fa4e4d..f339bcc 100644 --- a/N6AdminPanel/n6adminpanel/patches.py +++ b/N6AdminPanel/n6adminpanel/patches.py @@ -1,8 +1,7 @@ -# Copyright (c) 2013-2018 NASK. All rights reserved. +# Copyright (c) 2018-2021 NASK. All rights reserved. -from collections import MutableSequence +from collections.abc import MutableSequence -from flask_admin._compat import iteritems from flask_admin.contrib.sqla import form from flask_admin.model.fields import InlineModelFormField from sqlalchemy import inspect @@ -25,7 +24,7 @@ class _PatchedInlineModelFormField(InlineModelFormField): hidden_field_type = 'HiddenField' def populate_obj(self, obj, name): - for name, field in iteritems(self.form._fields): + for name, field in self.form._fields.items(): if field.type != self.hidden_field_type: field.populate_obj(obj, name) @@ -87,7 +86,7 @@ def patched_populate_obj(self, obj, name): """ # treating empty or whitespace-only text as NULL - to_be_set = (None if (isinstance(self.data, basestring) + to_be_set = (None if (isinstance(self.data, str) and not self.data.strip()) else self.data) @@ -129,6 +128,6 @@ def get_patched_init_actions(original_meth): def patched_meth(self): original_meth(self) new_actions_data = {key: (_get_action_meth_wrapper(val[0]), val[1], val[2]) for - key, val in self._actions_data.iteritems()} + key, val in self._actions_data.items()} setattr(self, '_actions_data', new_actions_data) return patched_meth diff --git a/N6AdminPanel/n6adminpanel/static/logo.png b/N6AdminPanel/n6adminpanel/static/logo.png index 9b60233768c104da2dc787976716bb52f42cc9a1..f5f6e736d443c2604acc6a24988d57d96a037e38 100644 GIT binary patch literal 7689 zcmeHLXIPWjwoV9y-UTUwp-7WJ2!Rwqf=HDTI!FykfFL0;2_zzDL{z{LMM1y;#gU?* zfI<{$21TlX4GT4lC4j%OXmn~MIhF)r?xT?)BsMt`0ALTy z1O%NOH<0-nrBovvx+4P3d5CU09Qa%CSlVRT=${~)%#!ff!;*J_(b?QOE{tsYx*KL#w99K41a?Z3VHz$fKpbFDhJrFf?F z={-#QQfXfC*$=z#wJgb3E^pE}CR{fqG_IC>KYjW<{3`W|o|{hKKv@Xcw)Gqftc#uy zDQffpc=lDa0^G4|87pYy167v z)2mp*Sy5U1F==5)GDYX(v)jAolBJ9>un)XJ5U4G0-`PO@9?N}Q?-<5)edWjff*N(N zHXn_IW}q&*1==@c7Fq_ApSv|sMs9r31pC)S^mm-Qo*u6*`_!v0{Oi~fCgyGC?(I&? z*aJmJ{qyI%b!GL(^MKcb73HEF0WyUeH3nC-kE58Gf$vlsQsBk9qK;>Dz{1n+`Nr1O zU(|1J9NKYhRqR%hqX~*RdMKDSx!rcwacDc%&3f=^z+KypTNl+Wlw)nWcgY7C^*z?X zbJJiu8`G7qg7A>g^L|P#dfeGNW+ENmu75pf<)3)?3@wD zb4c^*4{lsh8Iq&EjMTWV{6O{^2FiF{9dKh_Q<*EqsWzyUe|r}RMc3K)RuP|-L8?TyAN|xTtD5$nKh>= znYM%v>bKn8qUyI_afaG*D%A9mNz3{Bi3@+z^9a#s+kLhO&N%Ib=}DH>WYbi!ld z3P+tQK_CjXHGL%LhPWRG`*K?e1*)UBbE>nGlppLQ$Kb1T87ZgC$6qFRFqoIL_G|l= z8AcGeKd}rw9yh=0-S?sxsC4~)=H?wk%zdUy9eX>be=VatYU~><^}**(J~2)i=HHag znA=cft;Obe$FeO`?&XQnn<4Y3!ori>3!kT!l&~@FjL2@0tnt{x(uuLjD=tM{Wg~YL^mOtJJ1;XuUD!~jIz8Dx zi&8~V-;z_xPinz&O`1ADd83=TV`U(@oL#}1IUj@;NA(RHg-IGIu0mUkPXta0^@ZlN z*eQ7NG%MbrtSkD^IK6lMPiqt{ibY=Bw*U;jCkiJ{bVfsUFQuM-6rm#ahM#hV!}o z6RsLWx$#)nT@3K;v=KGvD)`Zt>(Vx=fLS91wz@RMa`D`5dvV-0Y#y(t5We`cH=Q%J zZ6w5tRS}9P8}PCHMQy7q;H7QuzG1Jc+!!U-;*kK&$)(<{D>5D{fnU1QZ(41g*R~OI zC||^;4WD!p!;YOSl50bUJbJhpvrV;@?}}S+=GOIuOw}~J!?a*pSn84Zgk0?)~K!_ zE`fOm(2q2#`}lOL@r=3HF_Fqq(P{kSdAFiu%SX8_iQ!72E&qa#fuBF;CFUWL6r(r0 zE%Xn6tiOHt!d#_)%~4apW5#K}vZCsa$N3co^iC|M=!(IO5}h*eYA;q_u$WxmsCrqy zsrJ!Py2up=dC6S&Dt{%ep8fcn@rzGDHlee_iZ3Jz;W11j1H4yxnaqH6{3+BxPJn02 z=VO!ZHD}(2wl&Ec&74RnuiD#3ADct?Z$EF8FCiZ5ep3v5UgRgoEOeJIX~#)B?Z(*c zx2MY|+$BE3KUjXbGKiCHd+R&Q)X_;c@}b_G@_ks?e>gWE39N*BJ2Z8<7TZRt&E9%> zhWg3Nd@upjoblQs5_LKx!dmjaXqgTJ5_do-LK%eO-FXt@F$$%e?^2 zfF}>T=L*uq!Y-&gJl}DxOY`s)uXJmLV|GBS!>TV+|h=RiPqZw7H%*%njI;c?373+ zc_+I25EAzg&_r#U?GjdOi~xW_V&WleN^Beh!?xC5$HfTl*TkmUkaZL09&7CYHyp%{ zN+&@OCI}N4)SgXFfNO7;fLPIqkr=O?4&NaJPuAMeOePIuYRY1K=xlOndHbn$@)uf zYnt_PeoRE*{s-<~tbeb4-B@7d=7!lxCB(0#=e*Nedo4bONF|VonDtv20zryE!(mWM zVgw9|AP~%siV+ZtU+Yr=z`9&e3&xI4!ZWFKA1XE0T6--e$eQPRdqb?g4T=+)A+XrD z*74taeg{74+tarx5KCTHK_KgGi@_7V1!3S5NW}F(0>5u6LNq=uiX@od-y7<8Ir%>f z7CO?>oQSkQL-8m}1QbC;BB3Z^LHp%y zYTe+6WkBHfO(s}g1gn+lAIsHu&emxBFaExd#eXq^0Q#RGe~aIL==z7Qzs11cGXAH# z{-Nt{G4QvH|LLy(H@YPLeBmL*33fp&!RySkW?={b08Ay@+2Ne+?0*073IG^nC1+VX z-NSCWALd=M+ejus&Qr{*&^Rpv6{D1a(|Rg#4U-rAS;S54^oEU&EnGgoJ)*zmexSdo zzpdN~U@wE=#mrJz&E2rOHIX)SlC`MGngd+)i#XSBCKFvx(R*cPRv4%RsuI)Y#7W@; zBF~!XJB!z9ge}g$|7g-xGjQPxW8re(ap3kW_F2_Zw?7!9umk;#Zxhc7|FXP~dU5+~ezJMSA4IRAeia;Vxa|&bt046$X2asK2lOPa^GM(M* zMPEyZiW_WDinV)7D4ut$a8r&4&cwyntsHlO&YFzs~4WWvadLmA2ab; z+_I)n<^p$*q&2U<^M&=3xzhu~l|jv))HD}}2h4|eksRP-vBbce~WTzO8Ev&nlkQEf2?Y?d05NP2z<_$kMb(y`1? z>0oe@c@*9`_!fP5%(BWnEBw-X!#BFEOB<3EepwkXTsEwo994&7``cLZ)fer7GV+<$ zRp6T{MfDu|@ztIK;nLNp{MOI5wY$sJQ5pfj2MFwa9Kans3rZXk+4^jD)it~+2z&ph zc`J~QtGP7J%vS@HFn`>muG#GfbGvAP&;xw-oVsfG!~i&0=BkGwFRo}K>KVY|u`*3( ztazZdaocWACNQVevk!^Dmb~=<`Meu$1R6C+;mA`+?0rNuhrZERUkiL$B5SS_vf!F;ZdSz+eRry`dTx4 zAfnxMJ%GXI6RCStB41)av0>_mUK1CE&e;IFrK*#qg;;X4tH^@uq%Rq?z*D&cBn7(G z%f`N8K(%4u*k)juocrTVx=H#vlpxO5GtLhAOGw>vp*i3LXbyC5MuETMu7%@`N?EoB zTb>PC;?R<35WZqz+ds>ra^@*qm70-jyg}Z_PIZOo3n%gaqF;RQw)1Ba{st*Ylr6zF z7T*FrsB5}%cxC^BPTL6o=b5O912QUoaXe?9S?csc2&mB|@m8`5xVdM*g{Gh|s(bhA z=kuVm!2PVpXOO&id1A&|L#UN#ZOuX$uh$nK13E9#JDI9=8W{ORijTkBRB&8CUV_N1 zko%sA;p@Oio4cZC%q{|r24NstZ62>cz?Oq#%lAin2VB}5?6mhGgC)-d|6Qpd3%pC<63 zoYXVX(=Qd=L2qt#&vyb|sesRWw9!8%MZL-fw`GAZAH12$QgH9zTxBQb=q!bkJ}I8t z|GsXPXi4G3yfETVV>DSDV7cmVkfb`R&>Cr64@@{|cl)bIwW4ztIwL;quk_T5&Rulb z(_9?KISn*o0H5v0c;&1FXJJ)kXvu{qhj-Ko=lR^y9-DIR7T0&r>}T<~yrY|J7R8wz zxp$sC+f{PvYF|iOo%^vvqg~lImAkV?-j%>F(AZH|KObs=m&EjrCdIfXNs6~~7dN%f z&%6pX%H06$$_96|TO7%;uHmR3LcdBH(qbR+v9OgT)M@pIeL`X_${+Fq>ccr1T>ugX zj_nkjPXoIETR_=B7q9^+TM7p@P%V&U3-b5YPbfp8Bd`?@#a!O||Bn zmr43u!>QoB1*@1gAK{(g)jF^o(Fmm%+^n#0&WV?*bsBlo^-*^^9t$h+#EwB`PnpEqI}Y>Xc9~kP8Rv%K>wMtsD!I%g?NZ cjjKXgzgj_q+!3DgKeXR&4*&oF literal 11724 zcmXYX1zc3!^Y;=`(jeU+@Bk{^tgtNIh;)Z^r*w&QBP9(2(hbrL(vmLHNFyK(0`KAX z{y#3RUiR+2_spF6#!Q5&vJ4Iu1r`JX!I6`NtAlq0_{qgU1Ap5Ty7+=OBv*AA7^HHX zY8U(m%~U}K4taR|liOaL1dd=j$?CX5AWw-Ne~=&MbT>IADfC|$DCj7p%=+rO z;1Ic+l(w6sqrJU_gBwKB#lpnR!ko_2+RchiMovjpD+req0-=M*!6h`j5QhfV+8Q!* z{*JFEIh5*-h8UtmF!F<=^H))o!nxi}U4QOlau`q*x( z3O6|&;Z!G41SBZ118wqO4kHCRuKnaE^U~6;yW6hI6_mGc#myM+v`T%i)=zd;SW^O3 zF|>7%l$FJ;Sn+9a;c!SzO<-%Qg&4CX_F5NKvi$0m^Xhg^kV0yp>vptOqbw>mC7NOe zyb}f!HWWM8^^)6f6L*GhOycyx)3Fzt@VvIEb1TP1*|+MgyE^Zmc==Gm`5mM-CdJOG zUv;8tR-#5mjTwCmgsd)b(Q%Bf&c=W{9AVN{!r`sRv;7xKcJ4VJPw*-BnsM-MCUT>rK(u*K zP*Qcn(DEK2;} z+20-eNusD~VY}f|!u^PKOz5 zEZMi0#WBzL__`<^A(j(AP(KaV8|5TP9W^+yG*8qbMd2u&q~>^fc7 zn!xL7^99NT917(U6Jb^FzRAwfxCp80U>+c(awIc_lAxjpL)72V^i-q8>IpAt&1WJo ziLhNAU7Yjq?C)>NPQtmoIQ^P6SU1B5x_dASk8B8OaP>PK-p6p;k-Z4U`cNL8@Huu4 zW%9=lV zTU%X)(9+U|!x=D8k;D5et&;9^xFmF1B{oSKNuf5MZP~?B6Ss}mMkrbeXc-<-!(Nw zU|K`=^5x6plM@MjeX4;N%8$p#PB@8Ng{{sgB^7-6Pl7C)Nr!rWVl=ycWTcQ`7<-mq zRP+P~2S@PzU#8l+I_K-Z7Rjlpj3OeGq4+~?Wat@-)U>;bJXW`2^r7#oG}zQMHN$Fa zxqp3ge+qFrUX?h$_5}}vijK}GEKDIMCl?+S6;fBnqpE?LuhC=NZTHvQ_`(h$0%t{4 z&O%=HOxbiy!%okAVta;0XBl>WLS4sj?EsXD;Z8`R=G5;78z_Sn;wOSoUj{Lr7k5mmka< zqBPoXXx1+`+Wh|gd#yOfzb8-fqthXRPqs+CzW!!SM^8_ZiLiTk7}MF=8GId?pc7qB zZ|~IPj8vf6mw3*mL7vqTauMG#>H2uU&&vil#MmY=^&-Hcd%Mp%)XQmWie6 zKG_&l)7HkQ(T)ZIil!96&CAQfz`}yLx$&YRBf(%WI$|t$T>_EXmwPu3KMLhW9~v4O zt~TOc$7g599vs-|cluMew6s`RS$X-!LUwGeLY_Q9Qd2`z1P@@COjgm#)Mra`G;rnT z+760Gpwq_AHIBH)^Bz~67e_?x3OU>RR2%In}YEGsTo2iKJQ-N9kT1kYcJ?JApSfe zU+h{3xws%b&<>AAwCiV`r88J;R8-Wi+1Y3G^z=?nPJv}*WtuJ&?nfgSFJ;EY#st03 zST!s4Be2EtrDGsY$bl8~0tVF(?RMQ;PiN;dsJ51grA*E!F2-v305W@Y)BB&gny#)q z6ciM>>ZLUFDRO@l>ViECzLphCSVzFmEML5EGsO@QK`1I?9F{N6@fhL(yu%8(U0iXx zyK&EA|78o|Cud+#Bc{FwDS5c!<26|W{f6b~Nhd(+ZjK zVRu9<2T0K!q%!Dsq3UF)m0cl#lxcQ2fYi=n>kr`D!{xF1d+k_oaLz2p$Is{- z?FvWgYx7@Ml@(ZO+S=X(q+m6>{5GMAx?o>U=jnRYIf>fpbz))=%{_~?lNKTQ5&6xR z$y-HbW#LyU3JSd=uR5b!J&$aBvyqLz(w|q?}DIc=&hd<~C8SzZm`U@>bPE>k*;4wrse%+`NoImFCRM zOgz10SiI&UIsppPQs8YV1g39*Sz6(@X&8zRYqaP2m(Fi*>YnC2Xs+8%qb+5+E}8K6 zm(Xfy@eWF~W4jcs2d}_fWzj^a-_3vR>P8q20P$KV%4};HN9@%O|C#%9NLv z%f=J@siUBCH4JEPuthA{{$T3O39&eHn5rrIa2Is2Eiue-3f|;05T{X9kTubzPM$(`xs%l#= z;+?&{Ql&xboI^5&_Cp-h)UV{+fqHB-0=FJk#J#MB|3>0v09lgcNcJjaRhwy=jBMFk zsdhD}|35EYKEA1(!=GkecR9X^=gbF7a0LYvbg`2qPXs1Rt(sPFa|CQ-9 zvua8T-BE|Fb~h4|n>zL<&l>NeFg|*>W^M(f92G~Z%Oe#ZxBB`?ABXhxt`PKt`H88M zwXoJLX^xUyy7+2NI)NsIP)0}x@+!_Mm6bpxRN+&g?>{b9$?@Eu3lP0VvK67AI$`Yjy*I5+yY4=e4wM3- zl$3WWO9jsy+azwUN;h0eq^KG^Wo5%19$vfV*C$^vOhI)*1m1K1!6D1db0kMGF#OnY zpW&h27&=?7wQn^$TR%da33!X#40O}|wtMOu*C-nd=9Uzc-$jH`O(#m_gNP4#R>8_k zEa-yjb^dnloIBPElGPb?7#kj4m~uY;PM^O`62Zujzj48UN;DWZ=mt?hnz7c2QA8k2 zxsKxkCKD2q5;@*I--xC!R;e@k$dWxdI#K?#rR8A0!Kf1#{iT!|ej|naJ(4kc;D3;N zEMqzd6jFfaqosw|cnHA@oGQ=}LGC3`N^MBU#}Vp zyGwsKf@q@{vjoB+0cewWfus->$X>6+cTK{*;C%OZLKYTPe*Sv0!y~-5D+a&5!6kz? zO~1q6VN%(;MqXdohY0B4khsGnMQTu0KhX;#5PQ$ix+W%$5+-N@p|GSMh`E1rsV@SZ zAoLJBNHR)1a>#Z+;0c$Y>4<>|y@=`HXug+0z9%qrOrV3o-^#ZtM{5J??>x9v|<_nW;=phtKf; z34`SYYOwX+-CmwtZ|Cq#Wq)bd^_m;n+9;UTuM86L1XtLrucywfuErOoQ8@~P-mEu1 zr?OZN|56Ij+l5y^VB>V+wTy=c|NZUCL;lyV@{9~es^{l+n*4Nu(3mBifAOuJyF`n2 zJ@cs-7`ug)N=i!7EaVUkO--9DS)qKffv?k0cxDFY4X>S!4*A~xzVq#cy}703M$?MF zK2rHTlw@|m5`JgD?7WS zfdRFzpI`U(>D0mk1_W>p6O6*~cj~IDfdJ+K2u>HPY~0`74vmiwL=dqUTUvfhd`X6d z{CJ7jIZI7$QQCSgPN!T$q8)dlB3rgsua4%J%#_@cWtpyiry6lOE{hJCb2m3Pix;U& zJ&eesWBqn_C{b zvteOq>l+(piBL)|Q+U&*iDmy21?qGyR$|lhz1g9W5lK6{a-Bb781*aUWs8~4&Nl{H zs$CuQOswG-T|20NSr{a8p7D4KOAI#}eGM`mo3E+BSY2BSs{RC2BVYmxHu!*fRVCgs{Ql1JuD->V0(=Y(MafizW`pu~u;sf-nqIQ&Xq)K1|>9 zS*>rq)|BC|^Qr=^s`rV`{avaXWQ3Bof=eQ~b z27;vya^An6k}wk%f=7I1%Po&yQg-qzW_d#GUc`#UmV(s@6vpsZPV*srFK=&;l{+2< zW<&y`OttT|OF=5-q&&Cg_P|uZY(ag&bLwW#xI5LSCMIWCD3)YpYiCgs5uLZbWchf9OD=kqaO`4#E6OD*CAL`71vSoGm6HY5yh{z#apLE! z1ggKk|B~VBI>G#B&z`M~39RtyT;DAij+IwfQ!CM?onU_=6gTs!(Pf=UKhhC9UU|Pw zoixYJ&rhPMsrh&wS$?UA!&JUhynGRJ%STEGJBaV3>K(3+=3DYlRo7hI=$zEzYCe+T zxcO?tA9DO}Fq>i$n#zf4YFd2`o2WNQfFUk8Hkn?1@9 zXCQCM>!`^5tPFR-P#Gzy^j}DAEZq#6yg-z-xNc|6Em&gWeT{9MN*_9hXJkgc zqV8hPHS_Koi6U`doO$b~_?(aLS?WszZfn16pFcupOxmbZKYsjBTy_rOSd%yxEXD2; zEH$SkQqC1P>qMz0gxp*f_aAt;+uPXmtlVD+B+U=b5II&cWatX`Y{Uovp{M9Uiv)>s zQkdN+)t|`wKlC%ZxViO}>(qIFDoRkwFQxno_BfxldBN^`PP@{s%AV-hQt*wk1muL9+;tosSq*f08 zT3n3XAR&eF@_Q_EdL9w3Zx8Bag;S3C2)R1yH4?bi=KU1NK+6S+2r5*V4;Ta7yXz3$HE`CiwD#zVNczycpq zl+D(oWceWUuK)()%E%1TbY`SySz&$XDZKfa{NEe81lgU~H=fzg?KIKRJec212b}(0 z;EPsE_TTl7sU3>z?)mW4r_JG=M}BEYOJPw_x}rcPpY2?4YyqpVgP)&R>kBH9x$9D! z$owRov=fa6e&Wb}r=|wJk#^|AKi7rD#e$|TyB;nnwUY93ULDRchtmm z#dCKfOf1eP>F5ck9?GWs<(UG7%_1vaP-{uvEiYwVs3i~Y?Dv@>B1;^Tm-L!;zF||# z9~Gx0Pohipn<5BHRUymEXC|Yqo=Bvm!XpZPVyXr>US5;Ktnv!hy-`g9N~=HAyOy(| zLppCHRiHF9Zg)p3#W~$e)T(Us6;(1Dd6LYuPZd6KNO*a9U53MITY3J>z zs9hv-eDNxN_5Z|J4fJm0q;jA)EXfea>(^tK_H}9Xc-@a53QuXBIrn@0{P;d;!t&X<2jUfDXdpD`Sd@{I3{^LlXjopT784PZNQ zQhhKP;KI8t#XC;-Db3<{11Y;0w9i6pG%wKsuJw3oH@#+6kT5t%a1dQ%Hn^VY*dZY&hbhA=X@W&fCoKF;i*0_u&>E6v&M@;uT9x=o zkT@hy730;bSIw=h8{p)HMtjPw(ex0dOn!1cB#SDpFi5>t_{WbQr!2>^<8c!00WSpX zFsa-M+%rDbhwz992{;@T(2ce2ZS)8@vVed9Fa?2f1)d+Eyq9U(x8(#%%t_tT)6+Xy zyBiz5T>Th8ut#=Vpgwuh-P?DKQcM*|QJM8v`bwLR8ytQTTLFzX#OadzIXm7X z#XdPDWi2eW>Uff=twl6VS;Tr#0BSULB>9i@tT)euk-;thLQe^RwF1)6=yFK?hAL zn(hxzNoh`ApI=-Ie5Cac2gVx(z#sfZxthgK`cyR>;Z}R&WMbaDJWnZ}eo;I`Oy@C2 z-$duiGWGCGh=$A?`ru$m5v7w=z8V8ZBw0{SOhvbRQWvQ5hues_*&sV!q05_vJ*GKe45IJ8Rn;d(I zR>}xD2~*IcIygAU0%OFcMJA4>AJ|($1{EP5M>}F4X{31@w7CJvFTg7V=uQ9xh;)<9 z1*-b&%nThn`_s!mOCj<_>wUf1%DnD_EYF{tgmzd*pTU7cob7+JYpC*)FT%yerP{O~ z3+!KGz|O0inm!g6vv`TpH07V8Q7gp+2P21uhLVZ3EQJ#=#%E+87_(yB-ZM^@czr!DFgKZ4ST=x5C~aj`GH@hRJ^j8M$c}{T)9p+qk%*)uqMuEUv}&xx zOZ*CQx58x_Z21~&a==wf82SAO7UsJ@iu%UJVg!YRxF#HIZ52MfG?4#VR)&c_f@I51 zA)cca6&GiTO+FAR%LMcF73Ftajs{%VZA4C$IVlJ~nAZOaX};2+7c7vPx_a>Xx+!oo ztecf$sDucQ+AnDa;%HaOkds{U3$jgB!QKWYq?(qNWv!3WXF&qTwi7f$LP9#ii2AwS zU+adqwvka$k8W#E56s-c;?X&M{`@&8cRzF*LqbrPRkEW!SNupr(DAnVBx9|p@p}IL zb-B8|EhsG=cGIM#!SjXEa&d8`iTUsYCk|z$(U~$vRJI}IdFlvovC)FW1CKf%Xn-dx ztD-Up@|G@jgg!<79P1?|Az^USviDQht)WGFMj@e}97v|2F#2D= zem$xoBos8`1q5tA-@j!gR{WY58Hoj|fX3Umc|Z`) z&CQjRl!${T-`m@BI^9wNqIMm`5TGDP29FL#&_j4 z;8RfqM3{)5KTWyv3-5;w&R|sUR&vw zt9yH2o%KI3lFRTG5IO)Qpb`WYJ|D|v4S8E?>6$v$e z>4(S04n)1`2nTv7Kj7is$-`qynofpygy~lp4yDuIO}XUc zMRCyIKt@KchmwLWCW$2fYF@7$Q1aZ|+|@pp4hM@(c)(*WpHG(dYU9v@3_)ICg4Dok(s$R z4`pXX$88U?ievWxi6oh)8ko-tnm(Z6fDcRC#F8&cJVBmnVA$R0914=GVnA$^M^ZTp zI>9%(@+d=|QuYU-fF>su=re*w?_;MH^e&x%haU95cJKc_7bJFJDV6zX*J?6is`{^ zZEY``a$1JJ;%=HBoEa@8f(lE|$cO?0Gr5~)D?3#4}CftzM&D zH?UD?eN;Oy5Vi|Vjz+2b+@N;+`*%8_+gaDtgq|}YP*Qby@Y8H=ICf&YcWSC;vE1t4 zbHK!Q5Awx)F2rA^G71Qg?$1?7KHT4d-8RH)r~VrG7RV>>568YByAWUzb@Oj zUI#!y72@={u)_wK2589Y>Gfk0ul0Vcao^YccRdj>cf%niC3Uq^5T~lG9d6Pa#ueMg z$H%ALVDk)@S{M~z_tn)E=gNO6@*GATRHdb*6kN1*b#(yYgQ}}JAzdJ!@JUF*K*jzn zO!S#m7R)11KF$KTV>LM}^#H;W|M@d^x?)&(IL2tE0G?K*es_^Vs;ZV2db;8O(My=nPH_GJ@fOVr(0u^>gt3f9NK{ZGT>5DT<$3&zqLDjg>Y#^F|@R_ zz8ruZ4mnGpg)G!t6Ls9**!~~gA>*?lCemH&4nfy$ae0nQ3&nPHbOh`d0s)Hn7~lzv zAz>A~g=uqyb#}hCv9&$9Saf6r@J9N|5U#GCI9C`CT6tIVo%bJrOzVC0r)E0>9{ly& zyb^>yoh|zubuTVb{`vDqvDIOvgE|zS*+d4IcO4J+KG6UhK+=V1b4ZJeL#A;-!gc{5 z4f^_(E%f6@o}5>PZDgSS^k(qcI{mwH?i(Dm6-;vc)l9J1!hFb zWSAuzKd2KN;Oej1j*&q}4z!&nLDn+=Co0Y&aCvbd z?c~Jiy7^&Z+w5q$Jx$o1ZQ1u!2_HtI^CCt5`M-bvCW}>Q?UvifeJ&0_7nAkHiw{jr z0szw1j*jB4uDpCNep*5TfddHO0Z^?xpvq+x75hN+ArO#tkFfsc9wgmkPx1pS=#d*m zk-h@J&ieiP_v&|J6xw=vW}u`$wl0|!(|{Q*W4jr7`=|Bj7$isc+}uS1wAueoSUFo1 z6SB}?OKNInMxP*CZTFMgZm|)ESV{u0QP2Pd_RjJ7dH+I<*~!YoJt-t>VE0#xD_y8D z`0lIw+am&U@`(T5d@%;)ebn#o-@mK7yAgm&0;a^k%}vEi^V!+i$E#H6HQ`fHMFSso9S~g;<{%;0t=!yP$co=3 zHK?z$uuLC{Pe?%_|I(;K5hPcFe9`8` z;bLEJZwNkY2)Io{&CVNXD-{N!+QytDt_NZZ@LXmQ^sp%0Ch{D_+X=nVG? zxK=`9BB9G#&y)2(f5y%0O3TU^eW5wvw!vh8oV+~No58LwaTj2pgKGe)#Kg!bX-f+{ zp0u{pagoQH78DUl0{#sV4CDHI-bl{Qjssi(w2&jp^;;6JmE7I=h9@R0Z=3*c+yqAB zCg=#vvn~z~N4)=1Qo`y29HSqTlj6YemQzzBaCUN{3&nQKU8X6k-FpT$r6sFz$ zK{Q0!EmTWvlVxz4AYb3?SEch;mXy%J^5TcRlhV`W(C}y!0pf&fussjO2NJV09@sfN z761`lz%B*%uA`$v;jU9}MIaP#-zMEj2z&e}2(*&nVS94I<0 zw+%J#%0LozKF-WIsf8BsJ4Gw)5*HU2(6qqRR8o-doSuL5V`dqdm_n!2 z5PPNp*CR%&zkh#7OCtk_$iTxxG&MElwOf>GY+>=?QEP&511SGIaJTg9m#UuLNSh-l z*PFvhe3d7sr;>p7^SCz5KLcY}CtkoC_1ezzeB2ShMqb-3&KUqS2{7W1vt?R5^8*6| z!TGHEtsXd0Brkq=c^KraA(nYqS!MS}mm8H>BYQ^EdANY`C@d_@Ei6Rs{fKvTtWPDS zW@TsB2Nnq%;^gjb5ln0VMg&`q629AutR;jMa2?Blq@fK+Vl-X(6|rD@TIh zw1YmzDa9pnZUXWR@UjPej@pZ-UIK{per1L1_o}|O_J?o@sG^TGACRW4*2iu&HC=Xg zw%1;1F{k^UM!8`-1qwQDU)=;W3e=7DR3m>R@sH-2uLJHGHCr!)6>hsl0E!C=3-?S+ z-~kg8KLSpH@(HYHhc7wcy$j9GfXA|}Zfya&``aGf04hKh4a@posC9L0F)18@~KD_e*kWn}ch78DznyM-!ARqupL^@zwtB9VUPQQHr{(W@z-2E~r zZ)_>+RRMSGe^xptc2sSsb(fUEAQAOrO)GnC{s7dRJUs`4pF9=EFp=eZH%f*Nqa%sp^1X5b zxcrEDSAe+@fTso2%uO2)%$9(S_81gUNp#W66`6alPBtFD%=BcvpA70N2%&ra{0W%1 zd6W_zcNcnRM=P}KT9v5us30Y}!G#{N>EKrj5unD=%S5!aagXbm<99(0C=DcC5zWWP zr~O6g-u@#fEVX)qbRtFeX-T^6|+)CDBkOT_&%3^qyX^TpTq?K z=}ZqcI9RwoK-qxujs#HyQ$Gw`Tm+BV@op^Rk?t^Zb0_M=xOFct(|CAz0M|^U<9dq{ zNOrncukg6j%X@21i(lVi4#v@1BAx@x1d>xR-~iF^6wLf+*O)%>I7EB^br_Vu?q;~F zr>6`c7N^k1FI{^Cc7XbiEw_)j)IETgkgF*$Bp5GLn^2$tg%bPoO+IU64Y97C9?u#o zzda%}?)5zxNSi*et(~_<$z#%;fUu`v{~Q;G2V#o^0isp{q|{uc!R?T1GzA~lBi{me z_Slg~Na)tCb@1;t=yc6JRc*aBFlMu(+HQbVtxT`;;iz6R=A zbHD@iRG%;+l)grHQ%&#g(F5DvE#)FmOyfiXy!=C4T-^BV=-3zzc-&l8ECAxsLWUe9 zX5hXc5Ij6Qpodns#xfuG$K$$5_j!ONP*ueTGFx6={!wP*Q1To9thHFEv&6%C`V?uh zGy=zr? zfX1_oerFVL#|L<=v7=)g7{2oTJIo^Jbxa2k6%eb{_4VMfOo5G~fCrIAyPx@WFDol6 zAzhm(3ZS3`+@I1u@(n=GAc0_@j9ch0xJk2xy2eIn6BBx{zBMMjXyv*MSZ|7zKfoDG zm@2@i)?;fLFtUD7pCSO3?u&7GP<+p=xSEAdD&1ZlTH4qTlOqPVCrPR#Gh_4jzs2rN zcTgGp-t$H?aE1#ju$4@B+8>E{pIL#FQDE%)3bpX~eJxy;1HCIPQ=u)cG}!GNtWdB# zTu}p^N>I(d<^DH~YL<6p6Fj004`L1m!nO|=E1=u-6bgl=2|68bE-<%J$)$a = , SESSION = , ..." + to set the specified variables. + + To be called automatically whenever a new low-level + connection to the database is established. + + WARNING: for simplicity, the variable names and values are + inserted "as is", *without* any escaping -- we assume we + can treat them as *trusted* data. + """ + with dbapi_connection.cursor() as cursor: + cursor.execute(setter_sql) + + @classmethod + def get_arg_parser(cls): + parser = super(Recorder, cls).get_arg_parser() + parser.add_argument("--n6recorder-blacklist", type=SourceField().clean_result_value, + help="the identifier of a blacklist source (in the " + "format: 'source-label.source-channel'); if given, " + "this recorder instance will consume and store " + "*only* events from this blacklist source") + parser.add_argument("--n6recorder-non-blacklist", action="store_true", + help="if given, this recorder instance will consume " + "and store *only* events from *all* non-blacklist " + "sources (note: then the '--n6recorder-blacklist' " + "option, if given, is just ignored)") + return parser + def ping_connection(self): """ Required to maintain the connection to MySQL. Perform ping before each query to the database. OperationalError if an exception occurs, remove sessions, and connects again. - Set the wait_timeout(Mysql session variable) for the session on self.wait_timeout. """ try: - self.session_db.execute("SELECT 1") + self.session_db.execute(sqla_text("SELECT 1")) except OperationalError as exc: # OperationalError: (2006, 'MySQL server has gone away') LOGGER.warning("Database server went away: %r", exc) LOGGER.info("Reconnect to server") self.session_db.remove() - self.set_session_wait_timeout() - - def set_session_wait_timeout(self): - """set session wait_timeout in mysql SESSION VARIABLES""" - self.session_db.execute(Recorder.SQL_WAIT_TIMEOUT.format(wait=self.wait_timeout)) + try: + self.session_db.execute(sqla_text("SELECT 1")) + except SQLAlchemyError as exc: + LOGGER.error( + "Could not reconnect to the MySQL database: %s", + make_exc_ascii_str(exc)) + sys.exit(1) @staticmethod def get_truncated_rk(rk, parts): @@ -207,6 +325,7 @@ def insert_new_event(self, items, with_transact=True, recorded=False): with transact: self.session_db.add_all(items) else: + assert transact.is_entered self.session_db.add_all(items) except IntegrityError as exc: str_exc = make_exc_ascii_str(exc) @@ -429,6 +548,12 @@ def suppressed_update(self): def main(): + parser = Recorder.get_arg_parser() + args = Recorder.parse_only_n6_args(parser) + if args.n6recorder_non_blacklist: + monkey_patch_non_bl_recorder() + elif args.n6recorder_blacklist is not None: + monkey_patch_bl_recorder(args.n6recorder_blacklist) with logging_configured(): if os.environ.get('n6integration_test'): # for debugging only @@ -441,5 +566,34 @@ def main(): d.stop() +def monkey_patch_non_bl_recorder(): + Recorder.input_queue = { + "exchange": "event", + "exchange_type": "topic", + "queue_name": 'zbd-non-blacklist', + "binding_keys": [ + 'event.filtered.*.*', + 'suppressed.filtered.*.*', + ] + } + + +def monkey_patch_bl_recorder(source): + Recorder.input_queue = { + "exchange": "event", + "exchange_type": "topic", + "queue_name": 'zbd-bl-{}'.format(source.replace(".", "-")), + "binding_keys": [ + x.format(source) for x in [ + 'bl-new.filtered.{}', + 'bl-change.filtered.{}', + 'bl-delist.filtered.{}', + 'bl-expire.filtered.{}', + 'bl-update.filtered.{}', + ] + ] + } + + if __name__ == "__main__": main() diff --git a/N6Core/n6/base/config.py b/N6Core/n6/base/config.py index d394878..cb408c5 100644 --- a/N6Core/n6/base/config.py +++ b/N6Core/n6/base/config.py @@ -3,6 +3,8 @@ # Copyright (c) 2013-2018 NASK. All rights reserved. +# NOTE: more of the config-related stuff is in n6lib.config + import os import os.path import shutil @@ -11,9 +13,6 @@ from n6lib.const import USER_DIR, ETC_DIR -# NOTE: more of the config-related stuff is in n6lib.config - - def install_default_config(): """ Copy default N6Core conf files to '/etc/n6' or '~/.n6'. diff --git a/N6Core/n6/base/queue.py b/N6Core/n6/base/queue.py index 64ef4f7..356ed2c 100644 --- a/N6Core/n6/base/queue.py +++ b/N6Core/n6/base/queue.py @@ -1,12 +1,14 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. # Note, however, that some parts of the QueuedBase class are patterned # after some examples from the docs of a 3rd-party library: `pika`; and # some of the docstrings are taken from or contain fragments of the # docs of the `pika` library. +from __future__ import print_function +from future.utils import raise_ import collections import contextlib import copy @@ -15,16 +17,20 @@ import re import sys import time -import traceback -import types try: import pika import pika.credentials except ImportError: - print >>sys.stderr, "Warning: pika is required to run AMQP components" - -from n6lib.amqp_helpers import get_amqp_connection_params_dict + print("Warning: pika is required to run AMQP components", file=sys.stderr) + +from n6corelib.timeout_callback_manager import TimeoutCallbackManager +from n6lib.amqp_helpers import ( + PIPELINE_OPTIONAL_COMPONENTS, + PIPELINE_OPTIONAL_GROUPS, + get_amqp_connection_params_dict, + get_pipeline_binding_states, +) from n6lib.argument_parser import N6ArgumentParser from n6lib.auth_api import AuthAPICommunicationError from n6lib.common_helpers import ( @@ -33,7 +39,6 @@ make_exc_ascii_str, ) from n6lib.log_helpers import get_logger -from n6lib.timeout_callback_manager import TimeoutCallbackManager LOGGER = get_logger(__name__) @@ -66,7 +71,7 @@ class QueuedBase(object): "exchange": "", "exchange_type": "", "queue_name": "", - "binding_keys": , + "accepted_event_types": , "queue_exclusive": True|False, # is queue exclusive (optional) } @@ -139,7 +144,7 @@ def __new__(cls, **kwargs): Normally, this special method should not be overridden in subclasses. (If you really need that please *extend* it by - overridding and calling with super()). + overriding and calling with super()). The method causes that immediately after creating of a QueuedBase-derived class instance -- before calling __init__() @@ -167,7 +172,7 @@ def __new__(cls, **kwargs): 4) the preinit_hook() method is called (see its docs...). """ # some unit tests are over-zealous about patching super() - from __builtin__ import super + from builtins import super self = super(QueuedBase, cls).__new__(cls, **kwargs) @@ -194,6 +199,27 @@ def parse_cmdline_args(self): """ Parse commandline arguments (taken from sys.argv[1:]). + Returns: + An argparse.Namespace instance containing parsed commandline + arguments. + + For more information about the parsing see documentation + for the `parse_only_n6_args` method. + """ + arg_parser = self.get_arg_parser() + return self.parse_only_n6_args(arg_parser) + + @classmethod + def parse_only_n6_args(cls, arg_parser): + """ + Parse commandline arguments (taken from sys.argv[1:]) + using provided argument parser. + + Args/kwargs: + `arg_parser`: + An `N6ArgumentParser` instance used to parse + the commandline arguments. + Returns: An argparse.Namespace instance containing parsed commandline arguments. @@ -210,7 +236,6 @@ def parse_cmdline_args(self): This method *should not* be overridden completely; instead, it can be *extended* (overridden + called with super()). """ - arg_parser = self.get_arg_parser() cmdline_args, unknown = arg_parser.parse_known_args() illegal_n6_args = [arg for arg in unknown if re.match(r'\-+n6', arg)] if illegal_n6_args: @@ -218,7 +243,8 @@ def parse_cmdline_args(self): ', '.join(illegal_n6_args))) return cmdline_args - def get_arg_parser(self): + @classmethod + def get_arg_parser(cls): """ Make and configure argument parser. @@ -262,7 +288,7 @@ def get_arg_parser(self): metavar='SUFFIX', help=('add the specified suffix to all ' 'output AMQP exchange/queue names')) - if self.supports_n6recovery: # <- True by default + if cls.supports_n6recovery: # <- True by default arg_parser.add_argument('--n6recovery', action='store_true', help=('add the "_recovery" suffix to ' @@ -333,10 +359,167 @@ def __init__(self, **kwargs): LOGGER.debug('output_queue: %r', self.output_queue) self.clear_amqp_communication_state_attributes() + self.configure_pipeline() self._conn_params_dict = self.get_connection_params_dict() self._amqp_setup_timeout_callback_manager = \ self._make_timeout_callback_manager('AMQP_SETUP_TIMEOUT') + def configure_pipeline(self): + """ + Place the component inside the pipeline, by creating + binding keys, which are used to determine which component's + output messages should be bound to the input queue + of currently initialized component. + + If the component is configured in the pipeline config, + special keywords called "states" will be used to create + unique binding keys, joining one component's output + to other component's input. If no configuration + could be found, the method will look for a "hard-coded" + list of binding keys in the `input_queue` attribute. + + Usually, each binding key is composed of four sections, + where the first section defines accepted types of + events, second section is the "state", and the last two + contain wildcards, matching all events originally routed + with keys consisting of `source` and `channel` part, + separated by dot. + + Previously, the `input_queue` dict - a class attribute + (which has been transformed to an instance attribute) of + the component contained the `binding_keys` key, a fixed + list of input queue's binding keys of the component. Now, + the `binding_keys` list is generated using + the `accepted_event_types` list and the list of "binding + states" defined in pipeline configuration. + + A different binding key is created for every "state". + Sample binding key: *.{state}.*.*, where {state} is + a "state" defined in the pipeline config. The behaviour + differs for such components as collectors, parsers + or DBarchiver - see their docstrings for more details. + + If the `accepted_event_types` item of the `input_queue` + attribute is defined, it specifies the types of events, + the component should bind to. Otherwise, the component + will bind to all event types (the asterisk wildcard). + + The pipeline config should be defined in the `pipeline` + section by default. Each component is configured through + the option, that is the component's lowercase class' name; + its values should be a list of "binding states", separated + by a comma, that will be used to create component's + binding keys, e.g. for the Enricher the config will look like: + + [pipeline] + enricher = somestate, otherstate + + Considering that Enricher has a following list of the + `accepted_event_types`: + + ['event', 'bl', 'bl-update', 'suppressed'] + + Then the list of resulting binding keys for the Enricher + will be: + + ['event.somestate.*.*', 'bl.somestate.*.*', + 'bl-update.somestate.*.*', 'suppressed.somestate.*.*', + 'event.otherstate.*.*', 'bl.otherstate.*.*', + 'bl-update.*.*', 'suppressed.otherstate.*.*'] + + These "binding states" are parts of routing keys of messages + received by the component, that identify the component + which sent them. E.g., parsers send their messages with + routing keys using the format: + .parsed.. + The second part of the routing key - "parsed" is + characteristic for parsers, being their "binding state". + If you want other component to receive messages from parsers, + then "parsed" should be on the list of values bound to + the option being the component's ID. So, for the Enricher + to receive this type of messages, it should have configuration + like: + + [pipeline] + enricher = parsed + + Each component is also bound to some group of components, + the `utils` group by default. Collectors are bound + to the `collectors` group and parsers - to `parsers`. + The "binding states" can be defined for a whole group. + The group's config will be used, in case, there is no config + for a component. Otherwise, a component's specific config + option has the priority over the group's option. + + SEE: configuration template file with the "pipeline" section + for more examples. + """ + if self.input_queue is not None: + self.set_queue_name() + pipeline_group, pipeline_name = self.get_component_group_and_id() + binding_states = get_pipeline_binding_states(pipeline_group, pipeline_name) + if binding_states: + assert(isinstance(binding_states, list)) + accepted_event_types = self.input_queue.get('accepted_event_types') + if (accepted_event_types is not None and + not isinstance(accepted_event_types, list)): + raise TypeError('The `accepted_event_types` key of the `input_queue` dict, ' + 'if present and set, should be a list') + self.make_binding_keys(binding_states, accepted_event_types) + # if there is no pipeline configuration for the component, + # check if binding keys have been manually set in + # the `input_queue` attribute + elif ('binding_keys' in self.input_queue and + self.input_queue['binding_keys'] is not None): + if not isinstance(self.input_queue['binding_keys'], list): + raise TypeError('The `binding_keys` item of the `input_queue` attribute, ' + 'if manually set, has to be a list') + elif (pipeline_name not in PIPELINE_OPTIONAL_COMPONENTS and + pipeline_group not in PIPELINE_OPTIONAL_GROUPS): + LOGGER.warning('The component `%s` is not configured in the pipeline ' + 'config and the list of binding keys is not defined ' + 'in the `input_queue` attribute. If the `input_queue` ' + 'attribute is set, the list of binding keys should be ' + 'defined', pipeline_name) + self.input_queue['binding_keys'] = [] + + def set_queue_name(self): + """ + The hook that should be implemented by subclasses, which does + not have explicitly defined input queue's name, like + IntelMQ bots. + """ + + def get_component_group_and_id(self): + """ + Get component's group name and its ID. These values are used + for the pipeline configuration mechanism. + + Pipeline configuration-related methods search for these names + among the options of the `pipeline` config section. + + If the component's ID or its group name is found in + the section, the list of values will be used as component's + 'binding states'. Then these 'binding states' are used + to generate binding keys for component's input queue. + + The method should be overridden in subclasses of components + of different groups, such as collectors or parsers. + + Returns: + A tuple of component's group name and its ID. + """ + return 'utils', self.__class__.__name__.lower() + + def make_binding_keys(self, binding_states, accepted_event_types): + if not accepted_event_types: + accepted_event_types = ['*'] + self.input_queue['binding_keys'] = [] + for state in binding_states: + for event_type in accepted_event_types: + self.input_queue['binding_keys'].append( + '{type}.{state}.*.*'.format(type=event_type, state=state)) + def clear_amqp_communication_state_attributes(self): self._connection = None self._channel_in = None @@ -431,7 +614,7 @@ def inner_stop(self): def _make_timeout_callback_manager(self, timeout_attribute_name): timeout = getattr(self, timeout_attribute_name) - timeout_expiry_msg = '{}.{}={!r} expired!'.format(self.__class__.__name__, + timeout_expiry_msg = '{}.{}={!r} expired!'.format(self.__class__.__name__, #3: `__name__` -> `__qualname__` timeout_attribute_name, timeout) return TimeoutCallbackManager(timeout, sys.exit, timeout_expiry_msg) @@ -476,8 +659,8 @@ def close_connection(self): def on_connection_error_open(self, connection, error_message=''): error_message = ascii_str(error_message) # in case logging via AMQP does not work... - print >>sys.stderr, ('Could not connect to RabbitMQ. ' - 'Reason: {}.'.format(error_message)) + print('Could not connect to RabbitMQ. Reason: {}.'.format(error_message), + file=sys.stderr) LOGGER.critical('Could not connect to RabbitMQ. Reason: %s', error_message) sys.exit(1) @@ -519,8 +702,9 @@ def on_connection_closed(self, connection, reply_code, reply_text): else: # in case logging via AMQP does not work, let's additionally # print this error message to the standard error output - print >>sys.stderr, ('Error: AMQP connection has been closed with code: {}. ' - 'Reason: {}.'.format(reply_code, reply_text)) + print('Error: AMQP connection has been closed with code: {}. ' + 'Reason: {}.'.format(reply_code, reply_text), + file=sys.stderr) LOGGER.error('AMQP connection has been closed with code: %s. Reason: %s', reply_code, reply_text) sys.exit(1) @@ -736,15 +920,19 @@ def on_queue_declared(self, method_frame): Args: method_frame: The Queue.DeclareOk frame """ - LOGGER.debug('Binding %r to %r with %r', - self.input_queue["exchange"], - self.input_queue["queue_name"], - self.input_queue["binding_keys"]) - for binding_key in self.input_queue["binding_keys"]: - self._channel_in.queue_bind(self.on_bindok, - self.input_queue["queue_name"], - self.input_queue["exchange"], - binding_key) + if not self.input_queue['binding_keys']: + LOGGER.warning('The list of binding keys is empty for a queue %r.', + self.input_queue['queue_name']) + else: + LOGGER.debug('Binding %r to %r with %r', + self.input_queue["exchange"], + self.input_queue["queue_name"], + self.input_queue["binding_keys"]) + for binding_key in self.input_queue["binding_keys"]: + self._channel_in.queue_bind(self.on_bindok, + self.input_queue["queue_name"], + self.input_queue["exchange"], + binding_key) def on_bindok(self, unused_frame): """ @@ -887,6 +1075,13 @@ def on_message(self, channel, basic_deliver, properties, body): exc_info = None delivery_tag = basic_deliver.delivery_tag routing_key = basic_deliver.routing_key + if not self._is_output_ready_or_none(): + LOGGER.warn('Message received from the input_queue while the' + 'output queue has not been yet set up. ' + '[%s:%s] The message will be requeueud.', + delivery_tag, + routing_key) + try: LOGGER.debug('Received message #%r routed with key %r)', delivery_tag, routing_key) @@ -921,7 +1116,7 @@ def on_message(self, channel, basic_deliver, properties, body): self.nacknowledge_message(delivery_tag, '{0!r} in {1!r}'.format(exc_info[1], self), requeue=True) # now we can re-raise the original exception - raise exc_info[0], exc_info[1], exc_info[2] + raise_(exc_info[0], exc_info[1], exc_info[2]) else: self.acknowledge_message(delivery_tag) finally: @@ -967,7 +1162,9 @@ def setting_error_event_info(rid_or_record_dict): with self.setting_error_event_info(event_record_dict): """ - if isinstance(rid_or_record_dict, (basestring, types.NoneType)): + if rid_or_record_dict is None: + event_rid = event_id = None + elif isinstance(rid_or_record_dict, str): event_rid = rid_or_record_dict event_id = None else: @@ -1466,7 +1663,7 @@ def log_disruptive_exc(): finally: if is_present(to_be_raised_exc_info): exc_type, exc_value, tb = to_be_raised_exc_info - raise exc_type, exc_value, tb + raise_(exc_type, exc_value, tb) finally: # (breaking traceback-related reference cycles, if any) # noinspection PyUnusedLocal @@ -1482,6 +1679,7 @@ def _do_publish_iteratively(self): concrete_publishing_generator = self.publish_iteratively() try: try: + # TODO: analyze whether time.time() should be replaced e.g. with time.monotonic(). yield_time = time.time() for marker in concrete_publishing_generator: if marker not in (self.FLUSH_OUT, None): diff --git a/N6Core/n6/collectors/abuse_ch.py b/N6Core/n6/collectors/abuse_ch.py index b43ddb8..b4159b6 100644 --- a/N6Core/n6/collectors/abuse_ch.py +++ b/N6Core/n6/collectors/abuse_ch.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. """ Collectors: abuse-ch.spyeye-doms, abuse-ch.spyeye-ips, @@ -39,12 +39,16 @@ from n6lib.common_helpers import ( make_exc_ascii_str, read_file, - reduce_indent, ) -from n6lib.csv_helpers import split_csv_row +from n6lib.config import join_config_specs +from n6lib.csv_helpers import ( + extract_field_from_csv_row, + split_csv_row, +) from n6lib.datetime_helpers import parse_iso_datetime_to_utc from n6lib.http_helpers import RequestPerformer from n6lib.log_helpers import get_logger +from n6lib.unpacking_helpers import iter_unzip_from_bytes @@ -407,9 +411,8 @@ def load_state(self): } return state - def extract_raw_row_time(self, row): - fields = split_csv_row(row) - return fields[self.time_field_index].strip() + def pick_raw_row_time(self, row): + return extract_field_from_csv_row(row, column_index=self.time_field_index).strip() def clean_row_time(self, raw_row_time): return self.normalize_row_time(raw_row_time) @@ -429,13 +432,25 @@ def get_source_channel(self, **processed_data): class AbuseChFeodoTrackerCollector(_BaseAbuseChDownloadingTimeOrderedRowsCollector): - raw_format_version_tag = '201908' + raw_format_version_tag = '202110' time_field_index = 0 def get_source_channel(self, **processed_data): return 'feodotracker' + def split_orig_data_into_rows(self, orig_data): + return reversed(orig_data.split('\n')) + + def should_row_be_used(self, row): + if not row.strip() or row.startswith('#'): + return False + try: + self.normalize_row_time(extract_field_from_csv_row(row, column_index=self.time_field_index)) + return True + except ValueError: + return False + class AbuseChSSLBlacklistCollector(_BaseAbuseChDownloadingTimeOrderedRowsCollector): # Note that, contrary to its name, it is an *event-based* source @@ -454,18 +469,27 @@ class AbuseChUrlhausUrlsCollector(_BaseAbuseChDownloadingTimeOrderedRowsCollecto raw_format_version_tag = '202001' type = 'stream' - config_spec_pattern = ( - reduce_indent(BaseDownloadingTimeOrderedRowsCollector.config_spec_pattern) + - reduce_indent(''' + config_spec_pattern = join_config_specs( + BaseDownloadingTimeOrderedRowsCollector.config_spec_pattern, + ''' api_url :: str api_retries = 3 :: int - ''')) + ''') time_field_index = 1 + CSV_FILENAME = 'csv.txt' + def get_source_channel(self, **processed_data): return 'urlhaus-urls' + # note that since Apr 2020 AbuseCh changed input format for this + # source - now it is .zip file with .txt inside + def obtain_orig_data(self): + data = self.download(self.config['url']) + [(_, all_rows)] = iter_unzip_from_bytes(data, filenames=[self.CSV_FILENAME]) + return all_rows + def prepare_selected_data(self, fresh_rows): abuse_info_dicts = [self._make_abuse_info_dict(row) for row in fresh_rows] return json.dumps(abuse_info_dicts) @@ -501,9 +525,18 @@ class AbuseChUrlhausPayloadsUrlsCollector(_BaseAbuseChDownloadingTimeOrderedRows time_field_index = 0 + CSV_FILENAME = 'payload.txt' + def get_source_channel(self, **processed_data): return 'urlhaus-payloads-urls' + # note that since Apr 2020 AbuseCh changed input format for this + # source - now it is .zip file with .txt inside + def obtain_orig_data(self): + data = self.download(self.config['url']) + [(_, all_rows)] = iter_unzip_from_bytes(data, filenames=[self.CSV_FILENAME]) + return all_rows + def prepare_selected_data(self, fresh_rows): return fresh_rows @@ -540,13 +573,12 @@ class AbuseChUrlhausPayloadsCollector(CollectorWithStateMixin, @property def custom_converters(self): return { - 'zip_filename': self._zip_filename_from_config, - 'list_of_zip_filenames': self.make_list_converter( - self._zip_filename_from_config), + 'zip_filename': self._conv_zip_filename_from_config, + 'list_of_zip_filenames': self.make_list_converter(self._conv_zip_filename_from_config), } @classmethod - def _zip_filename_from_config(cls, zip_filename): + def _conv_zip_filename_from_config(cls, zip_filename): if cls.VALID_ZIP_FILENAME_REGEX.search(zip_filename): return zip_filename if zip_filename == '': diff --git a/N6Core/n6/collectors/generic.py b/N6Core/n6/collectors/generic.py index 528dbc1..bc72845 100644 --- a/N6Core/n6/collectors/generic.py +++ b/N6Core/n6/collectors/generic.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. """ Collector base classes + auxiliary tools. @@ -15,6 +15,7 @@ import time import urllib import urllib2 +from math import trunc import lxml.etree import lxml.html @@ -29,8 +30,11 @@ all_subclasses, attr_required, ) -from n6lib.common_helpers import make_exc_ascii_str -from n6lib.email_message import EmailMessage +from n6lib.common_helpers import ( + AtomicallySavedFile, + make_exc_ascii_str, +) +from n6corelib.email_message import ReceivedEmailMessage from n6lib.http_helpers import RequestPerformer from n6lib.log_helpers import ( get_logger, @@ -97,11 +101,13 @@ def _save_last_state(self): try: if not os.path.isdir(os.path.expanduser(self.config['cache_dir'])): os.makedirs(os.path.expanduser(self.config['cache_dir'])) - with open(self.cache_file_path, "w") as f: + + with AtomicallySavedFile(self.cache_file_path, 'w') as f: f.write(str(self._current_state)) except (IOError, OSError): LOGGER.warning("Cannot save state to cache '%s'. ", self.cache_file_path) + def get_cache_file_name(self): return self.config['source'] + ".txt" @@ -167,8 +173,9 @@ def save_state(self, state): os.makedirs(cache_dir, 0700) except OSError: pass - with open(self._cache_file_path, 'wb') as cache_file: - cPickle.dump(state, cache_file, cPickle.HIGHEST_PROTOCOL) + + with AtomicallySavedFile(self._cache_file_path, 'wb') as f: + cPickle.dump(state, f, cPickle.HIGHEST_PROTOCOL) LOGGER.info("Saved state: %r", state) def get_cache_file_name(self): @@ -278,6 +285,42 @@ def get_script_init_kwargs(cls): """ return {} + def get_component_group_and_id(self): + return 'collectors', self.__class__.__name__ + + def make_binding_keys(self, binding_keys, *args): + """ + Make binding keys for the collector using values from + the pipeline config, if the collector accepts input messages + (it has its `input_queue` class attribute implemented). + + Unlike in case of standard components (e.g., 'utils' group), + values for the collector in the pipeline config are treated + as target binding keys, not binding states. + + Each value from the config is the new binding key. + + Use the lowercase collector's class' name as associated option + in the pipeline config, or its group's name - 'collectors'. + + Args: + New binding keys as a list. + """ + self.input_queue['binding_keys'] = binding_keys + self.set_queue_name() + + def set_queue_name(self): + """ + If the collector's `input_queue` dict does not have + the `queue_name` key set, its queue's name defaults + to the lowercase name of its class. + + The method may be called only for non-standard collectors + accepting input messages. + """ + if 'queue_name' not in self.input_queue or not self.input_queue['queue_name']: + self.input_queue['queue_name'] = self.__class__.__name__.lower() + def _validate_type(self): """Validate type of message, should be one of: 'stream', 'file', 'blacklist.""" if self.type not in self.limits_type_of: @@ -516,7 +559,7 @@ def get_output_prop_kwargs(self, source, output_data_body, *extended* in subclasses (with cooperative super()). """ - created_timestamp = int(time.time()) + created_timestamp = trunc(time.time()) message_id = self.get_output_message_id( source=source, created_timestamp=created_timestamp, @@ -552,7 +595,7 @@ def get_output_message_id(self, source, created_timestamp, The output AMQP message body (a string) as returned by the get_output_data_body() method. `created_timestamp`: - Message creation timestamp as a float number. + Message creation timestamp as an int number. : Processed data (as returned by the process_input_data() method) passed as keyword arguments (the default @@ -620,15 +663,15 @@ def get_script_init_kwargs(cls): return {'input_data': {'raw_email': sys.stdin.read()}} def process_input_data(self, raw_email): - return {'email_msg': EmailMessage.from_string(raw_email)} + return {'email_msg': ReceivedEmailMessage.from_string(raw_email)} def get_output_data_body(self, email_msg, **kwargs): """ - Extract the data body, typically from the given EmailMessage instance. + Extract the data body, typically from the given ReceivedEmailMessage instance. Kwargs: `email_msg`: - An n6lib.email_message.EmailMessage instance. + An n6corelib.email_message.ReceivedEmailMessage instance. : See: BaseCollector.get_output_data_body. Typically, concrete implementations will ignore them. @@ -911,7 +954,131 @@ def _save_last_rss_feed(self): class BaseTimeOrderedRowsCollector(CollectorWithStateMixin, BaseCollector): """ - TODO + The base class for "row-like" data collectors. + + + Implementation/overriding of methods and attributes: + + * required: + * `obtain_orig_data()` + -- see its docs, + * `pick_raw_row_time()` + -- see its docs (and the docs of `extract_row_time()`), + * `clean_row_time()` + -- see its docs (and the docs of `extract_row_time()`); + + * optional: see the attributes and methods defined within the body + of this class below the "Stuff that can be overridden..." comment. + + + Original data (as returned by `obtain_orig_data()`) should consist + of rows that can be singled out (see: `split_orig_data_into_rows()`), + selected (see: `get_fresh_rows_only()` and the methods it calls) + and joined after all (see: `prepare_selected_data()`). + + Rows (those for whom `should_row_be_used()` returns true) should + contain the time/order field; its values are to be extracted by + the `extract_row_time()` method; or -- let's be more specific -- + by certain methods called by it, namely: `pick_raw_row_time()` + (which picks the raw time/order value from the given row) and + `clean_row_time()` (which validates, converts and normalizes that + time/order value). + + For example, for rows such as: + + '"123", "2019-07-18 14:29:05", "sample", "data"\n' + '"987", "2019-07-17 15:13:13", "other", "data"\n' + + ...the `pick_raw_row_time()` should pick the values from the second + column (for an example implementation -- see the docstring of the + `pick_raw_row_time()` method). + + Values returned by `clean_row_time()` can have any form and type + -- provided that a **newer** one always sorts as **greater than** + an older one, and values representing the **same** time are always + **equal**. An important related requirement is that the value returned + by the `get_oldest_possible_row_time()` method **must always** sort + as **less than** any value returned by `clean_row_time()`. + + It is important to highlight that the original data (rows) are + expected to be already sorted **descendingly** (from newest to oldest) + by the time/order field (as extracted with `extract_row_time()`, + described above). If not, that must be enforced by your + implementation, e.g., in the following way: + + def split_orig_data_into_rows(self, orig_data): + all_rows = super(..., self).split_orig_data_into_rows(orig_data) + return sorted(all_rows, key=self._row_sort_key, reverse=True) + + def _row_sort_key(self, row): + sort_key = self.extract_row_time(row) + if sort_key is None: + sort_key = self.get_oldest_possible_row_time() + return sort_key + + *** + + Even a more important requirement, concerning the data source itself, + is that values of the *time/order* field of any **new** (fresh) rows + encountered by the collector **must** be **greater than or equal to** + the *time/order* field's values of all rows collected during any + previous runs of the collector. + + If the **data source does not satisfy** the requirement described + above then **some rows will be lost** (i.e., will **not** be + collected at all). + + For example, let's assume that a certain data source provided + the following data: + + '"3", "2019-07-19 02:00:00", "sample", "data"\n' + '"2", "2019-07-18 01:00:00", "sample_data", "data"\n' + '"1", "2019-07-17 00:00:00", "other_data", "data"\n' + + Assuming that our imaginary collector threats the second column + as the *time/order* field and that we just ran our collector, + all those rows have been collected by it and the collector's saved + state points on the `3`-rd row as the recent one. + + Now, let's imagine that the source added three new rows -- so that + the data provided by the source looks like this: + + '"6", "2019-07-20 02:00:00", "sample_2", "data"\n' + '"5", "2019-07-18 02:00:00", "sample_1", "data"\n' + '"4", "2019-07-21 02:00:00", "sample_3", "data"\n' + '"3", "2019-07-19 02:00:00", "sample", "data"\n' + '"2", "2019-07-18 01:00:00", "sample_data", "data"\n' + '"1", "2019-07-17 00:00:00", "other_data", "data"\n' + + If we run our collector now, it will collect the `6`-th row, but it + will **not** collect the `4`-th and `5`-th rows, because of treating + the `5`-th one as a row *from the past* (because its *time/order* + value is less (older) than the, previously-saved-as-the-recent-one, + `3`-rd's one). + + Note that, in such a case, making our collector sort these rows + by the *time/order* field would **not** help much: + + '"4", "2019-07-21 02:00:00", "sample_3", "data"\n' + '"6", "2019-07-20 02:00:00", "sample_2", "data"\n' + '"5", "2019-07-18 02:00:00", "sample_1", "data"\n' + '"3", "2019-07-19 02:00:00", "sample", "data"\n' + '"2", "2019-07-18 01:00:00", "sample_data", "data"\n' + '"1", "2019-07-17 00:00:00", "other_data", "data"\n' + + Even though the `4`-th and `6`-th rows would be collected, the + `5`-th one **would not** -- as it would be (still) considered a row + *from the past*. Indeed, the main problem is with the data source + itself: it does not satisfy the requirement described above. + + *** + + One more thing concerning the original input data: while it is OK + to have several rows with exact same values of the time/order field, + whole rows should not be the same (unless you do not care that such + duplicates may be detected as already seen and, consequently, + omitted). + """ config_required = ('source', 'cache_dir') @@ -955,7 +1122,7 @@ def get_output_data_body(self, selected_data, **kwargs): # # Stuff that can be overridden in subclasses (only if needed, # as sensible defaults are provided -- *except* for the three - # abstract methods: `obtain_orig_data()`, `extract_raw_row_time()` + # abstract methods: `obtain_orig_data()`, `pick_raw_row_time()` # and `clean_row_time()`) # * basic raw event attributes: @@ -982,7 +1149,7 @@ def get_oldest_possible_row_time(self): or `0`... See also: the docs of the method `clean_row_time()` and the - description of the return value the method `extract_row_time()` + description of the return value of the method `extract_row_time()` (in its docs). """ return '' @@ -1000,10 +1167,11 @@ def obtain_orig_data(self): retries=self.config['download_retries']) (Though, in practice -- when it comes to obtaining original - data with the `RequestPerformer` stuff -- you will want to use - the `BaseDownloadingTimeOrderedRowsCollector` class rather than - to implement `RequestPerformer`-based `obtain_orig_data(`) by - your own.) + data with the `RequestPerformer` stuff -- you will more likely + want to use the `BaseDownloadingTimeOrderedRowsCollector` class + rather than to implement `RequestPerformer`-based `obtain_orig_data()` + by your own.) + """ raise NotImplementedError @@ -1050,16 +1218,16 @@ def get_fresh_rows_only(self, all_rows): if newest_row_time is None: # this is the first (newest) actual (not blank/commented) # row in the downloaded file -- so here we have the *newest* - # abuse time + # row time newest_row_time = row_time if row_time == newest_row_time: - # this row is amongst those with the *newest* abuse time + # this row is amongst those with the *newest* row time newest_rows.add(row) if row in prev_newest_rows: # this row is amongst those with the *previously newest* - # abuse time, *and* we know that it has already been + # row time, *and* we know that it has already been # collected -> so we *skip* it assert row_time == prev_newest_row_time continue @@ -1074,7 +1242,7 @@ def get_fresh_rows_only(self, all_rows): # sanity assertions fresh_newest_rows = newest_rows - prev_newest_rows - assert newest_row_time and fresh_newest_rows + assert newest_row_time is not None and fresh_newest_rows else: # sanity assertions assert (newest_row_time is None and not newest_rows @@ -1112,23 +1280,23 @@ def extract_row_time(self, row): but, of course, can be overridden/extended in your subclass if needed) -- takes the given `row` and returns a boolean value; if a false value is returned, the result of the whole - `extract_row_time()` call will be `None`, and *no* calls of - the further methods (that is, `extract_raw_row_time()` and - `clean_row_time()`) will be made; + `extract_row_time()` call will be `None`, and calls of the + further methods (that is, of `pick_raw_row_time()` and + `clean_row_time()`) will *not* be made; - * `extract_raw_row_time()` (must be implemented in subclasses) + * `pick_raw_row_time()` (must be implemented in subclasses) -- takes the given `row` and extracts the raw value of its date-or-timestamp field, and then returns that raw value (typically, as a string); alternatively it can return `None` (to indicate that the whole row should be ignored) -- then the result of the whole `extract_row_time()` call will also - be `None`, and the call of `clean_row_time()` will not be + be `None`, and the call of `clean_row_time()` will *not* be made; * `clean_row_time()` (must be implemented in subclasses) -- - takes the value just returned by `extract_raw_row_time()` and + takes the value just returned by `pick_raw_row_time()` and cleans it (i.e., validates and normalizes -- in particular, - converts to some target type, if needed), ant then returns + converts to some target type, if needed), and then returns the cleaned value; alternatively it can return `None` (to indicate that the whole row should be ignored); the returned value will become the result of the whole `extract_row_time()` @@ -1136,7 +1304,7 @@ def extract_row_time(self, row): """ if not self.should_row_be_used(row): return None - raw_row_time = self.extract_raw_row_time(row) + raw_row_time = self.pick_raw_row_time(row) if raw_row_time is None: return None return self.clean_row_time(raw_row_time) @@ -1152,31 +1320,29 @@ def should_row_be_used(self, row): """ return row.strip() and not row.startswith('#') - def extract_raw_row_time(self, row): + def pick_raw_row_time(self, row): """ Abstract method; see the docs of `extract_row_time()`. Below we present an implementation for a case when data rows - are expected be formatted according to the following pattern: - `"", "", `. + are expected to be formatted according to the following pattern: + `"","",`. - def extract_raw_row_time(self, row): - # (here we use `split_csv_row()` -- + def pick_raw_row_time(self, row): + # (here we use `extract_field_from_csv_row()` -- # imported from `n6lib.csv_helpers`) - fields = split_csv_row() - return fields[1].strip() + return extract_field_from_csv_row(row, column_index=1) An alternative version of the above example: - def extract_raw_row_time(self, row): + def pick_raw_row_time(self, row): # Here we return `None` if an error occurs when trying # to parse the row -- because: # * we assume that (for our particular data source) # some wrongly formatted rows may appear, # * and we want to skip such rows. try: - fields = split_csv_row() - return fields[1].strip() + return extract_field_from_csv_row(row, column_index=1) except Exception as exc: LOGGER.warning( 'Cannot extract the time field from the %r row ' @@ -1308,13 +1474,16 @@ def obtain_orig_data(self): +# +# Script/entry point factories + def generate_collector_main(collector_class): - def parser_main(): + def collector_main(): with logging_configured(): init_kwargs = collector_class.get_script_init_kwargs() collector = collector_class(**init_kwargs) collector.run_handling() - return parser_main + return collector_main def entry_point_factory(module): @@ -1323,4 +1492,3 @@ def entry_point_factory(module): not collector_class.__name__.startswith('_')): setattr(module, "%s_main" % collector_class.__name__, generate_collector_main(collector_class)) - diff --git a/N6Core/n6/data/conf/05_enrich.conf b/N6Core/n6/data/conf/05_enrich.conf index 10937b7..3f988ea 100644 --- a/N6Core/n6/data/conf/05_enrich.conf +++ b/N6Core/n6/data/conf/05_enrich.conf @@ -1,7 +1,15 @@ [enrich] dnshost=8.8.8.8 dnsport=53 -#geoippath=/usr/share/GeoIP ; required -#asndatabasefilename=GeoLite2-ASN.mmdb ; required -#citydatabasefilename=GeoLite2-City.mmdb ; required + +# options below are optional; if they are provided, IP addresses +# in 'address' field in processed data will be looked up against +# one of or both GeoIP databases (i.e., the 'asndatabasefilename' +# and 'citydatabasefilename' options) +geoippath= +asndatabasefilename= +citydatabasefilename= + +# optional - a list of IP addresses that should be excluded +# within enriched data #excluded_ips=0.0.0.0, 255.255.255.255,127.0.0.0/8 diff --git a/N6Core/n6/data/conf/07_aggregator.conf b/N6Core/n6/data/conf/07_aggregator.conf index b1c18ea..61af786 100644 --- a/N6Core/n6/data/conf/07_aggregator.conf +++ b/N6Core/n6/data/conf/07_aggregator.conf @@ -8,3 +8,8 @@ dbpath=~/.n6aggregator/aggregator_db.pickle ## time interval (in seconds) within which non-monotonic times of ## events are tolerated time_tolerance=600 + +## time interval like `time_tolerance`, but defined for specific source +## (if it is not defined for the current source, +## `time_tolerance` is used) +time_tolerance_per_source={} diff --git a/N6Core/n6/data/conf/09_manage.conf b/N6Core/n6/data/conf/09_manage.conf index 49ed9fa..d301ce8 100644 --- a/N6Core/n6/data/conf/09_manage.conf +++ b/N6Core/n6/data/conf/09_manage.conf @@ -26,24 +26,19 @@ server_component_ou_regex_pattern = \AMain[ ]Unit\Z ca_key_client_1 = /home/someuser/clientCA/private/cakey.pem ca_key_service_1 = /home/someuser/serviceCA/private/cakey.pem -## NOTE [about an experimental feature]: +## NOTE [about an experimental feature; OpenSSL 1.1+ only]: # -# For a key stored on a PKCS#11-compliant cryptografic token, the value +# For a key stored on a PKCS#11-compliant cryptographic token, the value # of the appropriate `ca_key_...` option should be: # -# "pkcs11:::" +# "pkcs11::" # # -- where: # -# * is the intended value of the `dynamic_path` option -# in the `[pkcs11_section]` of the OpenSSL configuration -# -# -- e.g.: "/usr/lib/engines/engine_pkcs11.so" -# # * is the intended value of the `MODULE_PATH` option in # the `[pkcs11_section]` of the OpenSSL configuration # -# -- e.g.: "/usr/lib/x86_64-linux-gnu/opensc-pkcs11.so" +# -- e.g.: "opensc-pkcs11.so" # # * is a bunch of additional `openssl` # commandline arguments (whitespace-separated), typically including @@ -56,11 +51,11 @@ ca_key_service_1 = /home/someuser/serviceCA/private/cakey.pem # automatically set to the value of the "default_ca" option of the # "[ca]" section of the OpenSSL configuration) # -# (the OpenSSL configuration, containing (among others) the first two -# of the options mentioned above, will be generated automatically by -# the Manage API machinery, based (among others) on the SSL config +# (the OpenSSL configuration, containing (among others) the +# `MODULE_PATH` option mentioned above, will be generated automatically +# by the Manage API machinery, based (among others) on the SSL config # stored in the appropriate record of the Auth DB). # # An example of a PKCS#11-token-dedicated setting: # -# ca_key_client_2 = pkcs11:/usr/lib/engines/engine_pkcs11.so:/usr/lib/x86_64-linux-gnu/opensc-pkcs11.so:-keyfile foo:bar -keyform spam +# ca_key_client_2 = pkcs11:opensc-pkcs11.so:-keyform engine -keyfile slot_0-id_2 diff --git a/N6Core/n6/data/conf/11_jinja_rendering.conf b/N6Core/n6/data/conf/11_jinja_rendering.conf new file mode 100644 index 0000000..75a84cb --- /dev/null +++ b/N6Core/n6/data/conf/11_jinja_rendering.conf @@ -0,0 +1,59 @@ +[jinja_template_based_renderer] +############################################################################ +# This configuration section is needed only if the `from_predefined()` # +# constructor provided by `n6lib.jinja_helpers.JinjaTemplateBasedRenderer` # +# is used (note: this is also the case when `MailNoticesAPI` from the # +# `n6lib.mail_notices_api` module and/or `MailMessageBuilder` from the # +# `n6lib.mail_sending_api` module are in use). Other constructors provided # +# by `JinjaTemplateBasedRenderer` do not need any configuration at all. # +############################################################################ + +# The value of the following option should consist of (one or +# more) comma-separated template locations that will be tried, +# in the specified order, by Jinja template loaders when +# searching for templates. +# +# Each of these locations should be: +# +# * An *absolute* path of a directory (aka folder); if it makes +# use of a tilde-based home directory placeholder prefix, such +# as `~` or `~username`, the placeholder will be automatically +# expanded. +# Examples: +# /etc/n6/templates +# ~/my-own-n6-stuff/jinja-related +# ~dataman/.n6/our-custom-fancy-templates +# +# *OR* +# +# * A specification in the following format: +# @: +# where: +# * is a Python package name +# (see also: the docs of the `jinja2.PackageLoader`'s +# parameter `package_name`); +# * is a *relative* path of +# a directory (folder) in that package's source tree +# (see also: the docs of the `jinja2.PackageLoader`'s +# parameter `package_path`). +# Examples: +# @n6lib:data/templates +# @my.own.package:some-dir/sub-dir/sub-sub-dir +template_locations = ~/.n6/templates, @n6:data/templates, @n6lib:data/templates + +# The default value ("utf-8") of the following option, should be +# OK in nearly all cases. +;template_encoding = utf-8 + +# The following option is relevant *only* to template locations +# specified as absolute paths of directories (*not* to those in +# the `@:` format). +;follow_symlinks = False + +# The value of the following option should consist of (zero or +# more) comma-separated *import names* of Jinja extensions (see: +# https://jinja.palletsprojects.com/extensions/). Typically, it +# should contain, at the minimum, the "jinja2.ext.do" name -- at +# least, as long as any of the default templates (those bundled +# with *n6*) are in use. +;jinja_extensions = jinja2.ext.do diff --git a/N6Core/n6/data/conf/11_mailing.conf b/N6Core/n6/data/conf/11_mailing.conf new file mode 100644 index 0000000..9898769 --- /dev/null +++ b/N6Core/n6/data/conf/11_mailing.conf @@ -0,0 +1,242 @@ +# Note: the *mail notices* feature engages the 3 configuration sections: +# +# * the `[mail_notices_api]` section (see below) +# -- directly related to `MailNoticesAPI` from `n6lib.mail_notices_api`, +# +# * the `[mail_sending_api]` section (see below) +# -- directly related to `MailSendingAPI` from `n6lib.mail_sending_api`, +# +# * the `[jinja_template_based_renderer]` section +# (see a separate file; typically it is `11_jinja_rendering.conf`) +# -- directly related to `JinjaTemplateBasedRenderer.from_predefined()` +# from `n6lib.jinja_helpers`. +# +# The `MailSendingAPI` and/or `JinjaTemplateBasedRenderer` tools, though +# somewhat lower-level ones, can also be used on their own (then only +# the section directly related to the particular tool is relevant). + + + + +[mail_notices_api] + +# Should mail notices be dispatched at all? If this option is +# false then any invocations of a dispatcher obtained from a +# context manager returned by the `MailNoticesAPI.dispatcher()` +# method do nothing, and *no* other options from this section or +# from the `[mail_sending_api]`/`[jinja_template_based_renderer]` +# sections (which normally are also engaged) are used by the +# `MailNoticesAPI` stuff. +active = false + +# The value of the following option, if not being empty, should +# be a Python dict literal representing a dict that maps *notice +# keys* (str, e.g.: 'org_config_update_requested') to dicts that +# map 2-character codes of a supported *language* (such as 'EN' +# or 'PL) to dicts specifying the following mail components: +# *body*, *subject*, *sender* and (optionally) *misc headers* +# (which stands for *miscellaneous mail headers*). +# +# Lack of a certain *notice key* means that the mail notices +# stuff is not active for that *notice key* (meaning that any +# invocations of a dispatcher obtained from a context manager +# returned by any `MailNoticesAPI.dispatcher()` +# call do nothing). +# +# Each of the *mail components* dicts (i.e., the dicts mentioned +# above as those specifying mail components) contains some or +# all of the following items: +# +# * 'body' -- a *string value* (required), +# +# * 'subject' -- a *string value* (required), +# +# * 'sender' -- a *string value* (required if the value of +# the `default_sender` option [see below] is left empty, +# otherwise optional), +# +# * 'misc_headers' -- a dict that maps any mail header names +# to their values, specified as *string values* (optional); +# +# **Important note:** each of the *string values* mentioned +# above shall be a string which is: +# +# * (1) **either** a Jinja template name preceded with a `$:` +# (*dollar sign* followed by *colon*) marker, +# +# * (2) **or** any other string -- which *literally* specifies +# the item's value (**without** any HTML/XML escaping!). +# +# Ad (1): those Jinja templates will be used by an instance of +# `JinjaTemplateBasedRenderer` (see `n6lib.jinja_helpers` and +# the `[jinja_template_based_renderer]` config section) as the +# basis for rendering of actual values -- with the *rendering +# context* containing the `data_dict` variable being a deep copy +# of the `notice_data` dict passed in to the dispatcher [where +# *dispatcher* is a callable object obtained as the `as` target +# (`__enter__()`'s return value) of a context manager returned +# by `MailNoticesAPI.dispatcher()`]. +# +# **Beware** that HTML/XML escaping is applied **only** if the +# template name has a `.html`, `.htm` or `.xml` suffix (checked +# in a case-insensitive manner). +# +# For example templates -- see the template files in the +# `data/templates` subdirectory of the `n6lib` package source +# tree. +# +# The default value of this option seems to be quite sensible +# for most important use cases. The basic versions of the +# Jinja templates it refers to are already defined in the +# `data/templates` subdirectory of the `n6lib` package; note: +# you can customize them by creating your own template files -- +# named the same but placed in (an)other location(s) (specified +# with the `template_locations` configuration option in the +# section `[jinja_template_based_renderer]`). +;notice_key_to_lang_to_mail_components = +; { +; 'mfa_config_done': { +; 'EN': { +; 'subject': +; 'New configuration of multi-factor authentication', +; 'body': '$:mail_notice__mfa_config_done__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Nowa konfiguracja uwierzytelniania wielosk\u0142adnikowego', +; 'body': '$:mail_notice__mfa_config_done__PL.txt', +; }, +; }, +; 'mfa_config_erased': { +; 'EN': { +; 'subject': +; 'Deleted configuration of multi-factor authentication', +; 'body': '$:mail_notice__mfa_config_erased__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Usuni\u0119ta konfiguracja uwierzytelniania wielosk\u0142adnikowego', +; 'body': '$:mail_notice__mfa_config_erased__PL.txt', +; }, +; }, +; +; 'new_org_and_user_created': { +; 'EN': { +; 'subject': +; 'Welcome to the n6 system', +; 'body': '$:mail_notice__new_org_and_user_created__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Witamy w systemie n6', +; 'body': '$:mail_notice__new_org_and_user_created__PL.txt', +; }, +; }, +; +; 'org_config_update_requested': { +; 'EN': { +; 'subject': +; 'A new request to update the organization configuration', +; 'body': '$:mail_notice__org_config_update_requested__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Nowa propozycja zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_requested__PL.txt', +; }, +; }, +; 'org_config_update_applied': { +; 'EN': { +; 'subject': +; 'Acceptance of the requested update of the organization configuration', +; 'body': '$:mail_notice__org_config_update_applied__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Akceptacja zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_applied__PL.txt', +; }, +; }, +; 'org_config_update_rejected': { +; 'EN': { +; 'subject': +; 'Rejection of the requested update of the organization configuration', +; 'body': '$:mail_notice__org_config_update_rejected__EN.txt', +; }, +; 'PL': { +; 'subject': +; 'Odmowa wprowadzenia zmian w konfiguracji Twojej organizacji', +; 'body': '$:mail_notice__org_config_update_rejected__PL.txt', +; }, +; }, +; +; 'password_reset_done': { +; 'EN': { +; 'subject': +; 'New log-in password', +; 'body': '$:mail_notice__password_reset_done__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Nowe has\u0142o logowania', +; 'body': '$:mail_notice__password_reset_done__PL.txt', +; }, +; }, +; 'password_reset_requested': { +; 'EN': { +; 'subject': +; 'Setting new log-in password', +; 'body': '$:mail_notice__password_reset_requested__EN.txt', +; }, +; 'PL': { +; 'subject': +; u'Ustawianie nowego has\u0142a logowania', +; 'body': '$:mail_notice__password_reset_requested__PL.txt', +; }, +; }, +; } + +# The following option specifies (using a 2-character string) +# the *default language* -- to be used when *neither* of the +# `MailNoticesAPI.dispatcher()` and `()` +# invocations has included the `lang` argument (specifying the +# desired mail notice language variant); but also when it has +# been included but its value is missing from the *notice key*- +# specific subdict of the `notice_key_to_lang_to_mail_components` +# dict (see its description above). +;default_lang = EN + +# The value of the following option, if not left empty, should +# be a text to be used as the default value of the 'sender' +# item of subdicts that define mail components (see the above +# description of the `notice_key_to_lang_to_mail_components` +# option; the remarks about `$:`-prepended *template names* +# and HTML/XML escaping apply also here). +default_sender = n6notices@example.org + +# The value of the following option, if not left empty, should +# be a Python dict literal that defines additional mail headers, +# to be used to complement (but never overwrite) the items of +# each 'misc_headers' dict (ad 'misc_headers` -- see the above +# description of the `notice_key_to_lang_to_mail_components` +# option; the remarks about `$:`-prepended *template names* and +# HTML/XML escaping apply also here). +;common_misc_headers = + + + + +[mail_sending_api] + +smtp_host=localhost +smtp_port=25 +;smtp_login= +;smtp_password= + + + + +# Note: if you make use of `n6lib.mail_notices_api.MailNoticesAPI` +# and/or `n6lib.mail_sending_api.MailMessageBuilder`, you must also pay +# attention to the `[jinja_template_based_renderer]` configuration +# section (typically, placed in the `11_jinja_rendering.conf` file). diff --git a/N6Core/n6/data/conf/21_recorder.conf b/N6Core/n6/data/conf/21_recorder.conf index 7978e6a..d195212 100644 --- a/N6Core/n6/data/conf/21_recorder.conf +++ b/N6Core/n6/data/conf/21_recorder.conf @@ -1,7 +1,12 @@ [recorder] -## DO NOT remove the `charset=utf8` and `use_unicode=1` options from the `uri` value -## DO NOT change the `mysql://` prefix of the `uri` value -## (see also: http://docs.sqlalchemy.org/en/rel_0_9/core/engines.html) -#uri = mysql://user:password@host/dbname?unix_socket=/tmp/mysql.sock&charset=utf8&use_unicode=1 -echo = 0 -wait_timeout = 28800 + +# Uncomment and adjust this option but DO NOT change the `mysql://` prefix: +;uri = mysql://dbuser:dbpassword@dbhost/dbname + +# DO NOT change this option unless you also adjusted appropriately your database: +;connect_charset = utf8 + +# see: https://docs.sqlalchemy.org/en/13/core/engines.html#more-on-the-echo-flag +;echo = 0 + +;wait_timeout = 28800 diff --git a/N6Core/n6/data/conf/70_abuse_ch.conf b/N6Core/n6/data/conf/70_abuse_ch.conf index e953562..8ffa234 100644 --- a/N6Core/n6/data/conf/70_abuse_ch.conf +++ b/N6Core/n6/data/conf/70_abuse_ch.conf @@ -24,6 +24,7 @@ download_retries=10 api_url=https://urlhaus-api.abuse.ch/v1/urlid/ api_retries=3 + [abusech_urlhaus_payloads_urls] source=abuse-ch @@ -31,6 +32,7 @@ url=https://urlhaus.abuse.ch/downloads/payloads/ cache_dir=~/.n6cache download_retries=10 + [abusech_urlhaus_payloads] source=abuse-ch diff --git a/N6Core/n6/data/conf/pipeline.conf b/N6Core/n6/data/conf/pipeline.conf new file mode 100644 index 0000000..422e3d7 --- /dev/null +++ b/N6Core/n6/data/conf/pipeline.conf @@ -0,0 +1,27 @@ +# The n6 components use the 'pipeline' section to configure their +# "place" in the RabbitMQ pipeline. To configure a component, create +# the option, which name equals to the component's lowercase class +# name. Each option can be assigned a list of values (each value being +# a string, separated by commas). These values, called "routing states" +# here, are then used to generate their binding keys - keys that +# assign messages sent by other components within the same exchange +# to the component's inner queue. +# +# Routing states that components' output messages are sent with: +# * Parsers: parsed +# * Aggregator: aggregated +# * Enricher: enriched +# * Comparator: compared +# * Filter: filtered +# * Recorder: recorded +# +# Values in this configuration template create a default order +# of components in n6 pipeline. + +[pipeline] +aggregator = parsed +enricher = parsed, aggregated +comparator = enriched +filter = enriched, compared +anonymizer = filtered +recorder = filtered diff --git a/N6Core/n6/parsers/abuse_ch.py b/N6Core/n6/parsers/abuse_ch.py index 74816d6..c7e58e6 100644 --- a/N6Core/n6/parsers/abuse_ch.py +++ b/N6Core/n6/parsers/abuse_ch.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import csv import datetime @@ -286,6 +286,32 @@ def parse(self, data): yield parsed +class AbuseChFeodoTracker202110Parser(BaseParser): + + default_binding_key = "abuse-ch.feodotracker.202110" + + constant_items = { + "restriction": "public", + "confidence": "medium", + "category": "cnc", + } + + def parse(self, data): + rows = csv.reader(StringIO(data['raw']), delimiter=',', quotechar='"') + for row in rows: + # SOURCE FIELDS FORMAT: + # first_seen_utc,dst_ip,dst_port,c2_status,last_online,malware + t, ip, dport, _, _, name = row + with self.new_record_dict(data) as parsed: + parsed.update({ + 'time': t, + 'address': {'ip': ip}, + 'dport': dport, + 'name': name, + }) + yield parsed + + class AbuseChRansomwareTrackerParser(BaseParser): default_binding_key = 'abuse-ch.ransomware' diff --git a/N6Core/n6/parsers/generic.py b/N6Core/n6/parsers/generic.py index 6534f41..d0374b5 100644 --- a/N6Core/n6/parsers/generic.py +++ b/N6Core/n6/parsers/generic.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2018 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. """ Parser base classes + auxiliary tools. @@ -76,7 +76,7 @@ class BaseParser(ConfigMixin, QueuedBase): # specify the `[{parser_class_name}]` section including the # `prefetch_count` option with the `int` converter [hint: # the attribute can be easily extended using the - # n6lib.common_helpers.concat_reducing_indent() helper] + # n6lib.config.join_config_specs() helper] config_spec_pattern = ''' [{parser_class_name}] prefetch_count = 1 :: int @@ -144,24 +144,30 @@ def preinit_hook(self): if self.default_binding_key is not None: # (not for an abstract class) assert 'input_queue' in vars(self) # ensured by QueuedBase.__new__() - self.input_queue["queue_name"] = self.default_binding_key - self.input_queue["binding_keys"] = self.make_binding_keys() + self.input_queue['queue_name'] = self.default_binding_key super(BaseParser, self).preinit_hook() - def make_binding_keys(self): + def configure_pipeline(self): """ - Get binding keys (called by __new__()-triggered preinit_hook()). + The default binding keys, set in `default_binding_key` + attribute, may be overridden in the pipeline configuration. + """ + self.input_queue['binding_keys'] = [self.default_binding_key] + super(BaseParser, self).configure_pipeline() - Returns: - A list of input queue binding keys (to be set as - input_queue["binding_keys"] after instance creation). + def get_component_group_and_id(self): + return 'parsers', self.__class__.__name__.lower() - The default implementation of this method returns a single-element - list containing a string being the value of the `default_binding_key` - attribute. + def make_binding_keys(self, binding_keys, *args, **kwargs): """ - return [self.default_binding_key] + If the `default_binding_key` attribute is not set in parser's + subclass, try to obtain binding keys from the pipeline config. + Args: + `binding_keys`: + The list of new binding keys. + """ + self.input_queue['binding_keys'] = binding_keys # # Utility static method extensions diff --git a/N6Core/n6/parsers/misp.py b/N6Core/n6/parsers/misp.py index e410bd1..b49422b 100644 --- a/N6Core/n6/parsers/misp.py +++ b/N6Core/n6/parsers/misp.py @@ -8,6 +8,7 @@ import re import sys from collections import MutableMapping +from math import trunc from n6lib.log_helpers import get_logger from n6.parsers.generic import ( @@ -129,7 +130,7 @@ def get_event(self, misp_attribute, misp_category, misp_event_type): if not n6event_dict: return None n6event_dict['category'] = n6_category - n6event_dict['time'] = self.get_time(int(misp_attribute['timestamp'])) + n6event_dict['time'] = self.get_time(misp_attribute['timestamp']) return n6event_dict @staticmethod @@ -141,7 +142,18 @@ def get_dport(comment, n6event_dict): @staticmethod def get_time(misp_ts): - return datetime.datetime.utcfromtimestamp(misp_ts).replace(microsecond=0) + # XXX: What the actual type of `misp_ts` is? (This check and one of + # the following branches may be unnecessary as never used...) + if isinstance(misp_ts, basestring): + # If it's a string, let's parse it as an integer. + misp_ts = int(misp_ts) # type: int + else: + # If it's a number, let's be explicit that we truncate, not + # round up or what... (see the fragment: "Conversion from + # floating point to integer may round or truncate as in C" + # of the document: https://docs.python.org/3/library/stdtypes.html#numeric-types-int-float-complex) + misp_ts = trunc(misp_ts) # type: int + return datetime.datetime.utcfromtimestamp(misp_ts) def get_restriction(self, misp_event, min_restriction=None): initial_restriction = self._get_initial_restriction(misp_event) diff --git a/N6Core/n6/parsers/packetmail.py b/N6Core/n6/parsers/packetmail.py index 8133a9a..27f82b4 100644 --- a/N6Core/n6/parsers/packetmail.py +++ b/N6Core/n6/parsers/packetmail.py @@ -1,16 +1,18 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2020 NASK. All rights reserved. import sys -import pytz - from n6.parsers.generic import ( TabDataParser, entry_point_factory, ) -from n6lib.datetime_helpers import parse_iso_datetime +from n6lib.datetime_helpers import ( + ReactionToProblematicTime, + parse_iso_datetime, + datetime_with_tz_to_utc, +) from n6lib.log_helpers import get_logger @@ -29,8 +31,11 @@ def process_row_fields(self, data, parsed, ip, timestamp, *rest): @staticmethod def _convert_cet_to_utc(cet_date): - offset = pytz.timezone('Europe/Berlin').localize(cet_date).utcoffset() - return cet_date - offset + return datetime_with_tz_to_utc( + cet_date, + 'Europe/Berlin', + on_ambiguous_time=ReactionToProblematicTime.PICK_THE_LATER, + on_non_existent_time=ReactionToProblematicTime.PICK_THE_LATER) class PacketmailScanningParser(_PacketmailBaseParser): diff --git a/N6Core/n6/tests/collectors/_collectors_test_helpers.py b/N6Core/n6/tests/collectors/_collectors_test_helpers.py index 7dad115..a9048aa 100644 --- a/N6Core/n6/tests/collectors/_collectors_test_helpers.py +++ b/N6Core/n6/tests/collectors/_collectors_test_helpers.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import collections import datetime @@ -15,8 +15,9 @@ import mock +from n6lib.class_helpers import FalseIfOwnerClassNameMatchesRegex from n6lib.common_helpers import ( - SimpleNamespace, + PlainNamespace, reduce_indent, ) from n6lib.config import ( @@ -30,6 +31,10 @@ class _BaseCollectorTestCase(TestCaseMixin, unittest.TestCase): + # Prevent pytest *from treating* those subclasses of this class that + # are base (abstract) classes *as concrete test classes*. + __test__ = FalseIfOwnerClassNameMatchesRegex(r'\A_.*Base') + # These flags make it possible to turn on/off patching # of particular groups of stuff... patch_cmdline_args = True @@ -104,7 +109,7 @@ def do_patching(self, if self.patch_config: fake_of__RawConfigParser_read = self.make_fake_of__RawConfigParser_read(config_content) stub_of__Config__get_config_file_paths = self.make_stub_of__Config__get_config_file_paths() - self.patch('ConfigParser.RawConfigParser.read', + self.patch('configparser.RawConfigParser.read', fake_of__RawConfigParser_read) self.patch('n6lib.config.Config._get_config_file_paths', stub_of__Config__get_config_file_paths) @@ -178,13 +183,13 @@ def make_fake_of__RawConfigParser_read(self, config_content=None): adjusted_config_content = reduce_indent(config_content or '') - def fake_of__RawConfigParser_read(self, filenames): + def fake_of__RawConfigParser_read(self, filenames, encoding=None): if isinstance(filenames, basestring): filenames = [filenames] fp = StringIO(adjusted_config_content) for name in filenames: # (only for the first of filenames `fp` will offer any content) - self.readfp(fp, name) + self.read_file(fp, name) read_ok = list(filenames) return read_ok @@ -212,8 +217,8 @@ def stub_of__QueuedBase___init__(self, **_): self.clear_amqp_communication_state_attributes() def stub_of__QueuedBase_run(self): - self._connection = SimpleNamespace(add_timeout=_add_timeout, - outbound_buffer=collections.deque()) + self._connection = PlainNamespace(add_timeout=_add_timeout, + outbound_buffer=collections.deque()) self.output_ready = True try: try: @@ -364,9 +369,13 @@ class _TestMailCollectorsBaseClass(TestCaseMixin, unittest.TestCase): appended at the end of headers section, before the actual content. """ - _COLLECTOR_SOURCE = 'test' + # Prevent pytest *from treating* those subclasses of this class that + # are base (abstract) classes *as concrete test classes*. + __test__ = FalseIfOwnerClassNameMatchesRegex(r'\A_.*Base') + _COLLECTOR_SOURCE = 'test' + # * required common stuff (must be provided for each case) collector_class = None expected_source_channel = None @@ -543,6 +552,6 @@ def get_expected_mail_subject(self): else: # Variant II expected_mail_subject = self.email_subject - # As in EmailMessage.get_subject() + # As in ReceivedEmailMessage.get_subject() expected_mail_subject = ' '.join(expected_mail_subject.split()) return expected_mail_subject diff --git a/N6Core/n6/tests/collectors/test_abuse_ch.py b/N6Core/n6/tests/collectors/test_abuse_ch.py index b62232c..538ebb2 100644 --- a/N6Core/n6/tests/collectors/test_abuse_ch.py +++ b/N6Core/n6/tests/collectors/test_abuse_ch.py @@ -1,13 +1,16 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2019-2020 NASK. All rights reserved. +# Copyright (c) 2019-2021 NASK. All rights reserved. +import datetime +import json import unittest from bson.json_util import loads from mock import ( ANY, Mock, + MagicMock, call, patch, sentinel, @@ -23,13 +26,22 @@ AbuseChRansomwareTrackerCollector, AbuseChFeodoTrackerCollector, AbuseChSSLBlacklistCollector, + AbuseChUrlhausUrlsCollector, AbuseChUrlhausPayloadsUrlsCollector, AbuseChSSLBlacklistDyreCollector, NoNewDataException, ) +from n6lib.unit_test_helpers import zip_data_in_memory from n6.tests.collectors.test_generic import _BaseCollectorTestCase +def _make_request_performer_mock(response_content, mock_spec=None, last_modified_dt=None): + performer_mock = MagicMock(spec=mock_spec) + performer_mock.__enter__.return_value = performer_mock + performer_mock.get_dt_header.return_value = last_modified_dt + performer_mock.response.content = response_content + return performer_mock + class _TestAbuseChDownloadingTimeOrderedRowsCollectorBase(_BaseCollectorTestCase): @@ -97,7 +109,7 @@ def cases(): 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', - 'headers': {} + 'headers': {}, }, ), ], @@ -219,13 +231,13 @@ def cases(): expected_publish_output_calls=[ call( # routing_key - 'abuse-ch.feodotracker.201908', + 'abuse-ch.feodotracker.202110', # body ( - '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' - '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' - '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3' + '2019-08-20 03:00:00,5.5.5.5,447,online,2019-08-20,ExampleName5\n' + '2019-08-20 03:00:00,4.4.4.4,447,online,2019-08-20,ExampleName4\n' + '2019-08-20 02:00:00,3.3.3.3,447,online,2019-08-20,ExampleName3' ), # prop_kwargs @@ -234,15 +246,15 @@ def cases(): 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', - 'headers': {} + 'headers': {}, }, ), ], expected_saved_state={ 'newest_row_time': '2019-08-20 03:00:00', 'newest_rows': { - '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5', - '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4' + '2019-08-20 03:00:00,5.5.5.5,447,online,2019-08-20,ExampleName5', + '2019-08-20 03:00:00,4.4.4.4,447,online,2019-08-20,ExampleName4' }, }, ) @@ -253,18 +265,19 @@ def initial_state_and_orig_data_variants(): initial_state={ 'newest_row_time': '2019-08-20 01:00:00', 'newest_rows': { - '2019-08-20 01:00:00,1.1.1.1,447,2019-08-20,ExampleName1', - '2019-08-20 01:00:00,2.2.2.2,447,2019-08-20,ExampleName2' + '2019-08-20 01:00:00,1.1.1.1,447,online,2019-08-20,ExampleName1', + '2019-08-20 01:00:00,2.2.2.2,447,online,2019-08-20,ExampleName2', }, }, orig_data=( '# row which should be ignored by collector\n' - '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' - '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' - '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3\n' - '2019-08-20 01:00:00,2.2.2.2,447,2019-08-20,ExampleName2\n' - '2019-08-20 01:00:00,1.1.1.1,447,2019-08-20,ExampleName1\n' - '2019-08-20 00:00:00,0.0.0.0,447,2019-08-20,ExampleName0' + '2019-08-20 00:00:00,0.0.0.0,447,online,2019-08-20,ExampleName0\n' + '2019-08-20 01:00:00,1.1.1.1,447,online,2019-08-20,ExampleName1\n' + '2019-08-20 01:00:00,2.2.2.2,447,online,2019-08-20,ExampleName2\n' + '2019-08-20 02:00:00,3.3.3.3,447,online,2019-08-20,ExampleName3\n' + '2019-08-20 03:00:00,4.4.4.4,447,online,2019-08-20,ExampleName4\n' + '2019-08-20 03:00:00,5.5.5.5,447,online,2019-08-20,ExampleName5' + ), ) @@ -272,9 +285,9 @@ def initial_state_and_orig_data_variants(): initial_state=sentinel.NO_STATE, orig_data=( '# row which should be ignored by collector\n' - '2019-08-20 03:00:00,5.5.5.5,447,2019-08-20,ExampleName5\n' - '2019-08-20 03:00:00,4.4.4.4,447,2019-08-20,ExampleName4\n' - '2019-08-20 02:00:00,3.3.3.3,447,2019-08-20,ExampleName3\n' + '2019-08-20 02:00:00,3.3.3.3,447,online,2019-08-20,ExampleName3\n' + '2019-08-20 03:00:00,4.4.4.4,447,online,2019-08-20,ExampleName4\n' + '2019-08-20 03:00:00,5.5.5.5,447,online,2019-08-20,ExampleName5\n' ), ) @@ -317,7 +330,7 @@ def cases(): 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', - 'headers': {} + 'headers': {}, }, ), ], @@ -384,20 +397,231 @@ def test(self, **kwargs): @expand -class TestAbuseChUrlhausPayloadsUrlsCollector(_TestAbuseChDownloadingTimeOrderedRowsCollectorBase): +class TestAbuseChUrlhausUrlsCollector(_BaseCollectorTestCase): + + CONFIG_CONTENT = ''' + [abusech_urlhaus_urls] + source=abuse-ch + api_url=https://www.example2.com + url=https://www.example1.com + api_retries=3 + cache_dir=~/.n6cache + download_retries=10 + ''' + + DEFAULT_EXPECTED_OUTPUT_ROUTING_KEY = 'abuse-ch.urlhaus-urls.202001' + + DEFAULT_ABUSE_ROWS = ( + '"111111", "2020-01-01 01:00:00", "http://example_1.com", "XXX1", "YYY1", "ZZZ1",' + '"https://urlhaus.abuse.ch/url/111111/", "Example_Nick_1"\n' + '"000000", "2020-01-01 00:00:00", "http://example_0.com", "XXX0", "YYY0", "ZZZ0",' + '"https://urlhaus.abuse.ch/url/000000/", "Example_Nick_0"') + + DEFAULT_INFO_PAGES = [ + json.dumps( + { + "urlhaus_reference": "https://urlhaus.abuse.ch/url/000000/", + "threat": "malware_download", + "larted": "true", + "reporter": "ExampleNick_1", + "url": "https://example_1.com", + "tags": [ + "elf", + "Mozi", + ], + "blacklists": { + "surbl": "not listed", + "gsb": "not listed", + "spamhaus_dbl": "not listed", + }, + "id": "111111", + "host": "1.1.1.1", + "payloads": [ + { + "urlhaus_download": "https://urlhaus-api.abuse.ch/v1/download/a00a00aa0aa0a0a00aaa0a00a0a00a00a00a000a0a00000a0a0a0a00a0aaa0a0/", + "file_type": "elf", + "filename": "", + "response_md5": "1a111111a1aa11a111111aa11a111aa1", + "response_sha256": "a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1", + "response_size": "95268", + "signature": "", + "firstseen": "2020-01-20", + "virustotal": { + "link": "https://www.virustotal.com/file/a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1/analysis/111111111111/", + "percent": "61.02", + "result": "36 / 59", + }, + }, + ], + "url_status": "online", + "takedown_time_seconds": "", + "date_added": "2020-01-01 00:00:00 UTC", + "query_status": "ok", + }, + ), + json.dumps( + { + "urlhaus_reference": "https://urlhaus.abuse.ch/url/000000/", + "threat": "malware_download", + "larted": "true", + "reporter": "ExampleNick_1", + "url": "https://example_1.com", + "tags": [ + "elf", + "Mozi", + ], + "blacklists": { + "surbl": "not listed", + "gsb": "not listed", + "spamhaus_dbl": "not listed", + }, + "id": "111111", + "host": "1.1.1.1", + "payloads": [ + { + "urlhaus_download": "https://urlhaus-api.abuse.ch/v1/download/a00a00aa0aa0a0a00aaa0a00a0a00a00a00a000a0a00000a0a0a0a00a0aaa0a0/", + "file_type": "elf", + "filename": "", + "response_md5": "1a111111a1aa11a111111aa11a111aa1", + "response_sha256": "a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1", + "response_size": "95268", + "signature": "", + "firstseen": "2020-01-20", + "virustotal": { + "link": "https://www.virustotal.com/file/a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1/analysis/111111111111/", + "percent": "61.02", + "result": "36 / 59", + }, + }, + ], + "url_status": "online", + "takedown_time_seconds": "", + "date_added": "2020-01-01 00:00:00 UTC", + "query_status": "ok", + }, + ), + ] + + @paramseq + def cases(self): + yield param( + config_content=self.CONFIG_CONTENT, + url_info_pages=self.DEFAULT_INFO_PAGES, + expected_publish_output_calls=[ + call('abuse-ch.urlhaus-urls.202001', + ('[{"dateadded": "2020-01-01 01:00:00", "threat": "YYY1", "url_status": "XXX1", ' + '"tags": "ZZZ1", "url": "http://example_1.com", "urlhaus_link": ' + '"https://urlhaus.abuse.ch/url/111111/", "url_id": "111111", ' + '"url_info_from_api": {"urlhaus_reference": ' + '"https://urlhaus.abuse.ch/url/000000/", "url_status": "online", "larted": ' + '"true", "reporter": "ExampleNick_1", "url": "https://example_1.com", "tags": ' + '["elf", "Mozi"], "blacklists": {"surbl": "not listed", "gsb": "not listed", ' + '"spamhaus_dbl": "not listed"}, "query_status": "ok", "host": "1.1.1.1", ' + '"payloads": [{"virustotal": {"link": ' + '"https://www.virustotal.com/file/a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1/analysis/111111111111/", ' + '"percent": "61.02", "result": "36 / 59"}, "file_type": "elf", "filename": ' + '"", "response_md5": "1a111111a1aa11a111111aa11a111aa1", "response_sha256": ' + '"a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1", ' + '"response_size": "95268", "signature": "", "firstseen": "2020-01-20", ' + '"urlhaus_download": ' + '"https://urlhaus-api.abuse.ch/v1/download/a00a00aa0aa0a0a00aaa0a00a0a00a00a00a000a0a00000a0a0a0a00a0aaa0a0/"}], ' + '"threat": "malware_download", "takedown_time_seconds": "", "date_added": ' + '"2020-01-01 00:00:00 UTC", "id": "111111"}, "reporter": "Example_Nick_1"}, ' + + '{"dateadded": "2020-01-01 00:00:00", "threat": "YYY0", "url_status": "XXX0", ' + '"tags": "ZZZ0", "url": "http://example_0.com", "urlhaus_link": ' + '"https://urlhaus.abuse.ch/url/000000/", "url_id": "000000", ' + '"url_info_from_api": {"urlhaus_reference": ' + '"https://urlhaus.abuse.ch/url/000000/", "url_status": "online", "larted": ' + '"true", "reporter": "ExampleNick_1", "url": "https://example_1.com", "tags": ' + '["elf", "Mozi"], "blacklists": {"surbl": "not listed", "gsb": "not listed", ' + '"spamhaus_dbl": "not listed"}, "query_status": "ok", "host": "1.1.1.1", ' + '"payloads": [{"virustotal": {"link": ' + '"https://www.virustotal.com/file/a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1/analysis/111111111111/", ' + '"percent": "61.02", "result": "36 / 59"}, "file_type": "elf", "filename": ' + '"", "response_md5": "1a111111a1aa11a111111aa11a111aa1", "response_sha256": ' + '"a11a11aa1aa1a1a11aaa1a11a1a11a11a11a111a1a11111a1a1a1a11a1aaa1a1", ' + '"response_size": "95268", "signature": "", "firstseen": "2020-01-20", ' + '"urlhaus_download": ' + '"https://urlhaus-api.abuse.ch/v1/download/a00a00aa0aa0a0a00aaa0a00a0a00a00a00a000a0a00000a0a0a0a00a0aaa0a0/"}], ' + '"threat": "malware_download", "takedown_time_seconds": "", "date_added": ' + '"2020-01-01 00:00:00 UTC", "id": "111111"}, "reporter": "Example_Nick_0"}]'), + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'stream', + 'headers': { + 'meta': { + 'http_last_modified': '2020-12-12 01:01:01', + }, + }, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2020-01-01 01:00:00', + 'newest_rows': { + '"111111","2020-01-01 01:00:00","http://example_1.com","XXX1","YYY1","ZZZ1",' + '"https://urlhaus.abuse.ch/url/111111/","Example_Nick_1"' + }, + }, + ) + + @paramseq + def initial_state_and_orig_data_variants(): + yield param( + initial_state=sentinel.NO_STATE, + orig_data=zip_data_in_memory( + filename='csv.txt', + data=( + '# row which should be ignored by collector\n' + '"111111","2020-01-01 01:00:00","http://example_1.com","XXX1","YYY1","ZZZ1",' + '"https://urlhaus.abuse.ch/url/111111/","Example_Nick_1"\n' + '"000000","2020-01-01 00:00:00","http://example_0.com","XXX0","YYY0","ZZZ0",' + '"https://urlhaus.abuse.ch/url/000000/","Example_Nick_0"\n' + ), + ), + ) + + @foreach(cases) + @foreach(initial_state_and_orig_data_variants) + def test(self, + config_content, + initial_state, + orig_data, + url_info_pages, + expected_publish_output_calls, + expected_saved_state): + collector = self.prepare_collector(AbuseChUrlhausUrlsCollector, + config_content=config_content, + initial_state=initial_state) + self.patch( + 'n6.collectors.generic.RequestPerformer', + side_effect=[_make_request_performer_mock( + response_content=orig_data, + mock_spec=['__enter__', '__exit__', 'response', 'get_dt_header'], + last_modified_dt=datetime.datetime(2020, 12, 12, 01, 01, 01), + )], + spec=True) + self.patch( + 'n6.collectors.abuse_ch.RequestPerformer.fetch', + side_effect=url_info_pages) + collector.run_handling() + self.assertEqual(self.publish_output_mock.mock_calls, expected_publish_output_calls) + self.assertEqual(self.saved_state, expected_saved_state) - COLLECTOR_CLASS = AbuseChUrlhausPayloadsUrlsCollector + +@expand +class TestAbuseChUrlhausPayloadsUrlsCollector(_BaseCollectorTestCase): DEFAULT_PROP_KWARGS = { 'timestamp': ANY, 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', - 'headers': {} + 'headers': ANY, } - - @paramseq def cases(): yield param( @@ -435,7 +659,11 @@ def cases(): 'message_id': ANY, 'type': 'file', 'content_type': 'text/csv', - 'headers': {} + 'headers': { + 'meta': { + 'http_last_modified': '2020-12-12 01:01:01', + }, + }, }, ), ], @@ -449,7 +677,7 @@ def cases(): '"2019-08-20 03:00:00","http://www.example4.com","XX4",' '"444d4444d444dd4d44d44444dd444444",' '"4d4d44dd44d4d44dd44d4444444dddddddddddddd444444d4d4d4d4d4d4d4d44",' - '"ExampleNick4"' + '"ExampleNick4"', }, }, ) @@ -470,7 +698,9 @@ def initial_state_and_orig_data_variants(): '"ExampleNick1"' }, }, - orig_data=( + orig_data=zip_data_in_memory( + filename='payload.txt', + data=( '# row which should be ignored by collector\n' '"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' @@ -496,12 +726,15 @@ def initial_state_and_orig_data_variants(): '"000a0000a000aa0a00a00000aa000000",' '"0a0a00aa00a0a00aa00a0000000aaaaaaaaaaaaaa000000a0a0a0a0a0a0a0a00",' '"ExampleNick0"' + ), ), ) yield param( initial_state=sentinel.NO_STATE, - orig_data=( + orig_data=zip_data_in_memory( + filename='payload.txt', + data=( '# row which should be ignored by collector\n' '"2019-08-20 03:00:00","http://www.example5.com","XX5",' '"555e5555e555ee5e55e55555ee555555",' @@ -515,13 +748,32 @@ def initial_state_and_orig_data_variants(): '"333c3333c333cc3c33c33333cc333333",' '"3c3c33cc33c3c33cc33c3333333cccccccccccccc333333c3c3c3c3c3c3c3c33",' '"ExampleNick3"\n' + ), ), ) @foreach(cases) @foreach(initial_state_and_orig_data_variants) - def test(self, **kwargs): - self._perform_test(**kwargs) + def test(self, + config_content, + initial_state, + orig_data, + expected_publish_output_calls, + expected_saved_state): + collector = self.prepare_collector(AbuseChUrlhausPayloadsUrlsCollector, + config_content=config_content, + initial_state=initial_state) + self.patch( + 'n6.collectors.generic.RequestPerformer', + side_effect=[_make_request_performer_mock( + response_content=orig_data, + mock_spec=['__enter__', '__exit__', 'response', 'get_dt_header'], + last_modified_dt=datetime.datetime(2020, 12, 12, 01, 01, 01), + )], + spec=True) + collector.run_handling() + self.assertEqual(self.publish_output_mock.mock_calls, expected_publish_output_calls) + self.assertEqual(self.saved_state, expected_saved_state) @expand diff --git a/N6Core/n6/tests/collectors/test_generic.py b/N6Core/n6/tests/collectors/test_generic.py index c6e1a44..735fc57 100644 --- a/N6Core/n6/tests/collectors/test_generic.py +++ b/N6Core/n6/tests/collectors/test_generic.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import datetime import hashlib @@ -17,24 +17,22 @@ expand, foreach, param, + paramseq, ) -from n6lib.common_helpers import SimpleNamespace +from n6corelib.email_message import ReceivedEmailMessage +from n6lib.common_helpers import PlainNamespace from n6lib.config import ( ConfigError, ConfigSection, ) -from n6lib.csv_helpers import split_csv_row -from n6lib.email_message import EmailMessage -from n6lib.unit_test_helpers import ( - AnyDictIncluding, - patch_always, -) +from n6lib.csv_helpers import extract_field_from_csv_row +from n6lib.unit_test_helpers import patch_always from n6.base.queue import QueuedBase from n6.collectors.generic import ( BaseCollector, - BaseOneShotCollector, BaseEmailSourceCollector, + BaseOneShotCollector, BaseTimeOrderedRowsCollector, BaseUrlDownloaderCollector, ) @@ -59,7 +57,7 @@ 'some_opt': '[{"a": "bcd"}]', }, } -MOCKED_SUPER_CLS = SimpleNamespace(__init__=Mock()) +MOCKED_SUPER_CLS = PlainNamespace(__init__=Mock()) SAMPLE_EMAIL_MESSAGE = sentinel.email_msg SAMPLE_INPUT_DATA = sentinel.input_data SAMPLE_MESSAGE_ID = sentinel.message_id @@ -441,7 +439,7 @@ def test__get_output_prop_kwargs_content_type_not_set(self, source_type): def test__get_output_message_id(self): source = 'my_src_label.my_src_channel' - created_timestamp = int(1234.56789098765432) + created_timestamp = 1234 created_timestamp_str = '1234' output_data_body = '1234' mock = Mock(__class__=BaseCollector) @@ -463,7 +461,7 @@ def test_basics(self): self.assertTrue(issubclass(BaseOneShotCollector, BaseCollector)) def test__init(self): - super_cls_mock = SimpleNamespace(__init__=Mock()) + super_cls_mock = PlainNamespace(__init__=Mock()) with patch_always('n6.collectors.generic.super', return_value=super_cls_mock) as super_mock: # instantiation @@ -524,7 +522,7 @@ def test__get_script_init_kwargs(self, stdin_mock): {'input_data': {'raw_email': sentinel.stdin_read_result}}) - @patch.object(EmailMessage, 'from_string', + @patch.object(ReceivedEmailMessage, 'from_string', return_value=SAMPLE_EMAIL_MESSAGE) def test__process_input_data(self, EM_from_string_mock): mock = Mock(__class__=BaseEmailSourceCollector) @@ -606,7 +604,8 @@ def test(self, headers, expected__http_last_modified): expected__http_last_modified) -class TestXXX(_BaseCollectorTestCase): +@expand +class TestBaseTimeOrderedRowsCollector(_BaseCollectorTestCase): class ExampleTimeOrderedRowsCollector(BaseTimeOrderedRowsCollector): @@ -616,89 +615,711 @@ class ExampleTimeOrderedRowsCollector(BaseTimeOrderedRowsCollector): cache_dir :: str ''' - example_orig_data = None # to be set on instance by test code... + example_orig_data = None # to be set on instance by test code def obtain_orig_data(self): return self.example_orig_data - def clean_row_time(self, raw_row_time): - return raw_row_time.strip().strip('"') + def pick_raw_row_time(self, row): + return extract_field_from_csv_row(row, column_index=1) - def extract_raw_row_time(self, row): - fields = split_csv_row(row) - return fields[1].strip() + def clean_row_time(self, raw_row_time): + return raw_row_time def get_source_channel(self, **kwargs): return 'my-channel' - # XXX: to be removed (rather...) - def get_output_prop_kwargs(self, **kwargs): - output_prop_kwargs = super(TestXXX.ExampleTimeOrderedRowsCollector, - self).get_output_prop_kwargs(**kwargs) - output_prop_kwargs['headers'].setdefault('meta', {})['proba'] = '123' - return output_prop_kwargs + @paramseq + def cases(): + yield param( + # Initial state (one row) + # and expected saved state (one row) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': {'"zzz","2019-07-10"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","2019-07-13"\n' + '\t\n' + '"spam","2019-07-11"\n' + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-13"\n' + '"spam","2019-07-11"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-13', + 'newest_rows': {'"ham","2019-07-13"'}, + } + ) + yield param( + # Mostly the same as the first test case, but instead + # of `date/time-based` order we have ids (just to show that + # it might work in the same way as with `date/time-based` + # order) + # --- + # Initial state (one row) + # and expected saved state (one row) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '5', + 'newest_rows': {'"zzz","5"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","7"\n' + '\t\n' + '"spam","6"\n' + '"zzz","5"\n' + '"egg","4"\n' + '"sss","3"\n' + '\n' + '"bar","2"\n' + '"foo","1"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","7"\n' + '"spam","6"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '7', + 'newest_rows': {'"ham","7"'}, + } + ) - def test(self): - config_content = ''' - [xyz_my_channel] - source = xyz - cache_dir = /who/cares - ''' - initial_state = { - 'newest_row_time': '2019-07-02', - 'newest_rows': {'"sss", "2019-07-02"'}, - } - orig_data = ( - '# halo, mówię...\n' - '"ham", "2019-07-11"\n' - '"spam", "2019-07-11"\n' - '\t\n' - '"zzz", "2019-07-10"\n' - '"egg", "2019-07-02"\n' - '"sss", "2019-07-02"\n' - '\n' - '"bar", "2019-07-01"\n' - '"foo", "2019-06-30"\n' + yield param( + # Initial state (one row) and + # expected saved state (two rows) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': {'"sss","2019-07-02"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","2019-07-11"\n' + '\t\n' + '"spam","2019-07-11"\n' + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-11"\n' + '"spam","2019-07-11"\n' + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-11', + 'newest_rows': { + '"ham","2019-07-11"', + '"spam","2019-07-11"' + }, + } + ) + + yield param( + # Initial state (one row) but without expected saved state + # (no new data) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': {'"zzz","2019-07-10"'}, + }, + orig_data=( + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[], + expected_saved_state=sentinel.NO_STATE) + + yield param( + # Initial state (two rows) + # and expected saved state (one row) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': { + '"spam","2019-07-10"', + '"zzz","2019-07-10"' + }, + }, + orig_data=( + '"ham","2019-07-11"\n' + '"spam","2019-07-10"\n' + '"zzz","2019-07-10"\n' + '\t\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-11"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-11', + 'newest_rows': { + '"ham","2019-07-11"' + }, + } + ) + + yield param( + # Initial state (two rows) and + # expected saved state (also two rows) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"sss","2019-07-02"', + '"egg","2019-07-02"' + }, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","2019-07-11"\n' + '"spam","2019-07-11"\n' + '\t\n' + '"zzz","2019-07-02"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-11"\n' + '"spam","2019-07-11"\n' + '"zzz","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-11', + 'newest_rows': { + '"ham","2019-07-11"', + '"spam","2019-07-11"' + }, + } + ) + + yield param( + # Initial state (two rows) + # but without expected saved state + # (no new data) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"sss","2019-07-02"', + '"egg","2019-07-02"' + }, + }, + orig_data=( + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[], + expected_saved_state=sentinel.NO_STATE) + + yield param( + # Without initial state but with expected saved state + # (e.g.first run) - one row + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state=sentinel.NO_STATE, + orig_data=( + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': {'"zzz","2019-07-10"'}, + } ) - expected_publish_output_calls = [ - call( - # routing_key - 'xyz.my-channel', - - # body - ( - '"ham", "2019-07-11"\n' - '"spam", "2019-07-11"\n' - '"zzz", "2019-07-10"\n' - '"egg", "2019-07-02"' + + yield param( + # Without initial state but with expected saved state + # (e.g.first run) - two rows + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state=sentinel.NO_STATE, + orig_data=( + '"zzz","2019-07-10"\n' + '"egg","2019-07-10"\n' + '"sss","2019-07-02"\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"zzz","2019-07-10"\n' + '"egg","2019-07-10"\n' + '"sss","2019-07-02"\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': { + '"zzz","2019-07-10"', + '"egg","2019-07-10"' + }, + } + ) - # prop_kwargs - { - 'timestamp': ANY, - 'message_id': ANY, - 'type': 'file', - 'content_type': 'text/csv', - 'headers': AnyDictIncluding(**{ - 'meta': { - 'proba': '123', - } - }), + yield param( + # Without initial state (e.g. first run) and without + # expected saved state (no data at all - just empty string) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state=sentinel.NO_STATE, + orig_data='', + expected_publish_output_calls=[], + expected_saved_state=sentinel.NO_STATE) + + yield param( + # Initial state one row, another row with the same date + # in orig data - we expect to get this row + # Expected saved state - old row + new row + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': {'"sss","2019-07-02"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"egg","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"egg","2019-07-02"', + '"sss","2019-07-02"' }, + } + ) + + yield param( + # Initial state one row, orig data consists of two + # additional (new) rows with the same date as "state row" + # - we expect to get only these two new rows + # Expected saved state - old row + new row + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': {'"sss","2019-07-02"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","2019-07-02"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' ), - ] - expected_saved_state = { - 'newest_row_time': '2019-07-11', - 'newest_rows': {'"ham", "2019-07-11"', '"spam", "2019-07-11"'}, - } + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-02"\n' + '"egg","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"egg","2019-07-02"', + '"sss","2019-07-02"', + '"ham","2019-07-02"' + }, + } + ) + + yield param( + # Initial state two rows, orig data consists of one + # additional (new) row with the same date as "state row" + # - we expect to get only this new row + # Expected saved state - old rows + new row + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"egg","2019-07-02"', + '"sss","2019-07-02"' + }, + }, + orig_data=( + '# halo,mówię...\n' + '"ham","2019-07-02"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"ham","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': { + '"egg","2019-07-02"', + '"sss","2019-07-02"', + '"ham","2019-07-02"' + }, + } + ) + yield param( + # Initial state one row, another row with the same date + # in orig data - we expect to get this row + # Expected state: new row (different, later date) + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-02', + 'newest_rows': {'"sss","2019-07-02"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=[ + call( + # routing_key + 'xyz.my-channel', + + # body + ( + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"' + ), + + # prop_kwargs + { + 'timestamp': ANY, + 'message_id': ANY, + 'type': 'file', + 'content_type': 'text/csv', + 'headers': ANY, + }, + ), + ], + expected_saved_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': {'"zzz","2019-07-10"'}, + } + ) + + yield param( + # Order of rows does not satisfy our requirements (data + # from source is not sorted, `older` rows are mixed with + # newer -- see `BaseTimeOrderedRowsCollector`'s + # documentation, for more details. + # We expect to obtain ValueError. + config_content=''' + [xyz_my_channel] + source = xyz + cache_dir = /who/cares + ''', + initial_state={ + 'newest_row_time': '2019-07-10', + 'newest_rows': {'"zzz","2019-07-10"'}, + }, + orig_data=( + '# halo,mówię...\n' + '"spam","2019-07-11"\n' + '"ham","2019-07-13"\n' + '\t\n' + '"zzz","2019-07-10"\n' + '"egg","2019-07-02"\n' + '"sss","2019-07-02"\n' + '\n' + '"bar","2019-07-01"\n' + '"foo","2019-06-30"\n' + ), + expected_publish_output_calls=None, + expected_saved_state=None, + expected_error=ValueError + ) + + @foreach(cases) + def test(self, + config_content, + initial_state, + orig_data, + expected_publish_output_calls, + expected_saved_state, + expected_error=None): collector = self.prepare_collector(self.ExampleTimeOrderedRowsCollector, config_content=config_content, initial_state=initial_state) collector.example_orig_data = orig_data - - collector.run_handling() - - self.assertEqual(self.publish_output_mock.mock_calls, expected_publish_output_calls) - self.assertEqual(self.saved_state, expected_saved_state) + if expected_error: + with self.assertRaises(expected_error): + collector.run_handling() + else: + collector.run_handling() + self.assertEqual(self.publish_output_mock.mock_calls, expected_publish_output_calls) + self.assertEqual(self.saved_state, expected_saved_state) diff --git a/N6Core/n6/tests/parsers/_parser_test_mixin.py b/N6Core/n6/tests/parsers/_parser_test_mixin.py index e6fa091..8a7e46f 100644 --- a/N6Core/n6/tests/parsers/_parser_test_mixin.py +++ b/N6Core/n6/tests/parsers/_parser_test_mixin.py @@ -13,6 +13,7 @@ AggregatedEventParser, BlackListParser, ) +from n6lib.class_helpers import FalseIfOwnerClassNameMatchesRegex from n6lib.record_dict import RecordDict from n6lib.unit_test_helpers import TestCaseMixin @@ -22,6 +23,11 @@ class ParserTestMixIn(TestCaseMixin): + # Prevent pytest *from treating* those subclasses of this class that + # are base/mixin (abstract) classes *as concrete test classes*. + __test__ = FalseIfOwnerClassNameMatchesRegex(r'\A(_.*Base|.*ParserTestMix[Ii]n\Z)') + + MESSAGE_TIMESTAMP = 1389348840 # '2014-01-10 10:14:00' message_created = str(datetime.datetime.utcfromtimestamp(MESSAGE_TIMESTAMP)) diff --git a/N6Core/n6/tests/parsers/conftest.py b/N6Core/n6/tests/parsers/conftest.py new file mode 100644 index 0000000..b7c786f --- /dev/null +++ b/N6Core/n6/tests/parsers/conftest.py @@ -0,0 +1,6 @@ +# Copyright (c) 2021 NASK. All rights reserved. + +# Prevent *pytest* from trying to collect tests from a file that is +# a template rather than a real module (that would cause unnecessary +# exceptions...). +collect_ignore = ["_parser_test_template.py"] diff --git a/N6Core/n6/tests/parsers/test_abuse_ch.py b/N6Core/n6/tests/parsers/test_abuse_ch.py index a3f7820..c89936a 100644 --- a/N6Core/n6/tests/parsers/test_abuse_ch.py +++ b/N6Core/n6/tests/parsers/test_abuse_ch.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import datetime import unittest @@ -25,6 +25,7 @@ AbuseChZeusTrackerParser, AbuseChFeodoTrackerParser, AbuseChFeodoTracker201908Parser, + AbuseChFeodoTracker202110Parser, AbuseChRansomwareTrackerParser, _AbuseChSSLBlacklistBaseParser, AbuseChSSLBlacklistDyreParser, @@ -526,6 +527,51 @@ def cases(self): ) +class TestAbuseChFeodotracker202110Parser(ParserTestMixIn, unittest.TestCase): + + PARSER_SOURCE = 'abuse-ch.feodotracker' + PARSER_CLASS = AbuseChFeodoTracker202110Parser + PARSER_BASE_CLASS = BaseParser + PARSER_CONSTANT_ITEMS = { + 'restriction': 'public', + 'confidence': 'medium', + 'category': 'cnc' + } + + def cases(self): + yield ( + '2019-05-27 13:36:27,0.0.0.0,447,online,2019-05-28,TrickBot\n' + 'this, is, one, very, wrong, line\n' + '2019-05-25 01:30:36,0.0.0.0,443,online,2019-05-27,Heodo\n' + '2019-05-16 19:43:27,0.0.0.0,8080,online,2019-05-22,Heodo\n', + [ + { + 'name': 'trickbot', + 'address': [{'ip': '0.0.0.0'}], + 'dport': 447, + 'time': '2019-05-27 13:36:27', + }, + { + 'name': 'heodo', + 'address': [{'ip': '0.0.0.0'}], + 'dport': 443, + 'time': '2019-05-25 01:30:36', + }, + { + 'name': 'heodo', + 'address': [{'ip': '0.0.0.0'}], + 'dport': 8080, + 'time': '2019-05-16 19:43:27', + }, + ] + ) + + yield ( + "INVALID_DATA", + ValueError + ) + + class TestAbuseChRansomwareTrackerParser(ParserTestMixIn, unittest.TestCase): PARSER_SOURCE = 'abuse-ch.ransomware' PARSER_CLASS = AbuseChRansomwareTrackerParser diff --git a/N6Core/n6/tests/parsers/test_generic.py b/N6Core/n6/tests/parsers/test_generic.py index 294bee0..c6f4558 100644 --- a/N6Core/n6/tests/parsers/test_generic.py +++ b/N6Core/n6/tests/parsers/test_generic.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import hashlib import json @@ -22,12 +22,12 @@ from n6lib.common_helpers import ( FilePagedSequence, - SimpleNamespace, - concat_reducing_indent, + PlainNamespace, ) from n6lib.config import ( Config, ConfigSection, + join_config_specs, parse_config_spec, ) from n6lib.record_dict import ( @@ -66,7 +66,6 @@ def _asserts_of_proper_preinit_hook_instance_adjustment(self, instance, 'exchange': 'raw', 'exchange_type': 'topic', 'queue_name': binding_key, - 'binding_keys': [binding_key], }) self.assertEqual(BaseParser.input_queue, { 'exchange': 'raw', @@ -143,7 +142,7 @@ class SomeParser(BaseParser): expected_config_full=Config.make({'SomeParser': {'prefetch_count': 42}}), ), param( - custom_config_spec_pattern=concat_reducing_indent( + custom_config_spec_pattern=join_config_specs( BaseParser.config_spec_pattern, ''' some_opt = [-3, null] :: json @@ -196,7 +195,7 @@ class SomeParser(BaseParser): self._asserts_of_proper__new__instance_adjustment(unready_instance) self._asserts_of_proper_preinit_hook_instance_adjustment(unready_instance, binding_key) - super_cls_mock = SimpleNamespace(__init__=Mock()) + super_cls_mock = PlainNamespace(__init__=Mock()) with patch_always('n6.parsers.generic.super', return_value=super_cls_mock) as super_mock, \ patch('n6.parsers.generic.Config._load_n6_config_files', @@ -213,16 +212,27 @@ class SomeParser(BaseParser): expected_config, expected_config_full) + # TODO: full process of the pipeline configuration should + # be tested def test__make_binding_keys(self): self.mock.default_binding_key = 'fooo.barr' - binding_keys = self.meth.make_binding_keys() - self.assertEqual(binding_keys, ['fooo.barr']) + self.mock.input_queue = {} + self.meth.make_binding_keys([self.mock.default_binding_key]) + self.assertEqual(self.mock.input_queue['binding_keys'], ['fooo.barr']) self.assertEqual(self.mock.mock_calls, []) def test__make_binding_keys_with_raw_format_version_tag(self): self.mock.default_binding_key = 'fooo.barr.33' - binding_keys = self.meth.make_binding_keys() - self.assertEqual(binding_keys, ['fooo.barr.33']) + self.mock.input_queue = {} + self.meth.make_binding_keys([self.mock.default_binding_key]) + self.assertEqual(self.mock.input_queue['binding_keys'], ['fooo.barr.33']) + self.assertEqual(self.mock.mock_calls, []) + + def test__make_binding_keys__override_default_key(self): + self.mock.default_binding_key = 'fooo.barr' + self.mock.input_queue = {} + self.meth.make_binding_keys(['overridden.key']) + self.assertEqual(self.mock.input_queue['binding_keys'], ['overridden.key']) self.assertEqual(self.mock.mock_calls, []) def test__get_script_init_kwargs(self): @@ -285,10 +295,10 @@ def test__prepare_data(self): data = self.meth.prepare_data( routing_key='ham.spam', body=sentinel.body, - properties=SimpleNamespace(foo=sentinel.foo, - bar=sentinel.bar, - timestamp=1389348840, - headers={'a': sentinel.a})) + properties=PlainNamespace(foo=sentinel.foo, + bar=sentinel.bar, + timestamp=1389348840, + headers={'a': sentinel.a})) self.assertEqual(data, { 'a': sentinel.a, 'properties.foo': sentinel.foo, @@ -303,10 +313,10 @@ def test__prepare_data__rk__with_raw_format_version_tag(self): data = self.meth.prepare_data( routing_key='ham.spam.33', body=sentinel.body, - properties=SimpleNamespace(foo=sentinel.foo, - bar=sentinel.bar, - timestamp=1389348840, - headers={'a': sentinel.a})) + properties=PlainNamespace(foo=sentinel.foo, + bar=sentinel.bar, + timestamp=1389348840, + headers={'a': sentinel.a})) self.assertEqual(data, { 'a': sentinel.a, 'properties.foo': sentinel.foo, diff --git a/N6Core/n6/tests/parsers/test_packetmail.py b/N6Core/n6/tests/parsers/test_packetmail.py index dd98137..f732f11 100644 --- a/N6Core/n6/tests/parsers/test_packetmail.py +++ b/N6Core/n6/tests/parsers/test_packetmail.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2020 NASK. All rights reserved. import unittest @@ -35,7 +35,16 @@ def cases(self): '2.2.2.2; 2016-10-17 10:02:48; Honeypot hits in 3600 hash-collection seconds:' ' 1; Cumulative honeypot hits for IP over all days: 5\n' '3.3.3.3; 2016-11-17 10:05:38; Honeypot hits in 3600 hash-collection seconds:' + ' 1; Cumulative honeypot hits for IP over all days: 1\n' + + # DST-related corner case: impossible (non-existent) time + '4.4.4.4; 2020-03-29 02:45:01; Honeypot hits in 3600 hash-collection seconds:' + ' 1; Cumulative honeypot hits for IP over all days: 1\n' + + # DST-related corner case: ambiguous time + '5.5.5.5; 2020-10-25 02:38:02; Honeypot hits in 3600 hash-collection seconds:' ' 1; Cumulative honeypot hits for IP over all days: 1\n', + [ dict( address=[{'ip': '1.1.1.1'}], @@ -49,6 +58,14 @@ def cases(self): address=[{'ip': '3.3.3.3'}], time='2016-11-17 09:05:38', ), + dict( + address=[{'ip': '4.4.4.4'}], + time='2020-03-29 01:45:01', + ), + dict( + address=[{'ip': '5.5.5.5'}], + time='2020-10-25 01:38:02', + ), ] ) @@ -71,7 +88,14 @@ def cases(self): '# IP; last_seen; context\n' '1.1.1.1; 2016-10-18 08:35:46; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n' '2.2.2.2; 2016-10-18 08:36:19; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n' - '3.3.3.3; 2016-11-18 08:54:43; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n', + '3.3.3.3; 2016-11-18 08:54:43; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n' + + # DST-related corner case: impossible (non-existent) time + '4.4.4.4; 2020-03-29 02:45:01; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n' + + # DST-related corner case: ambiguous time + '5.5.5.5; 2020-10-25 02:38:02; ignores RFC 5321 MAIL FROM/RCPT TO greeting delay values\n', + [ dict( address=[{'ip': '1.1.1.1'}], @@ -85,6 +109,14 @@ def cases(self): address=[{'ip': '3.3.3.3'}], time='2016-11-18 07:54:43', ), + dict( + address=[{'ip': '4.4.4.4'}], + time='2020-03-29 01:45:01', + ), + dict( + address=[{'ip': '5.5.5.5'}], + time='2020-10-25 01:38:02', + ), ] ) @@ -106,9 +138,16 @@ def cases(self): '#\n' '\n' '# This list was last updated on Thu Oct 20 03:05:05 CDT 2016\n' - '3 11111111111 2016-10-06 07:34:34 11111111111 2016-10-06 14:20:28 1.1.1.1\n' - '4 11111111111 2016-10-06 01:30:10 11111111111 2016-10-06 15:36:10 2.2.2.2\n' - '3 11111111111 2016-10-06 00:58:29 11111111111 2016-11-06 15:36:10 3.3.3.3\n', + '3\t11111111111\t2016-10-06 07:34:34\t11111111111\t2016-10-06 14:20:28\t1.1.1.1\n' + '4\t11111111111\t2016-10-06 01:30:10\t11111111111\t2016-10-06 15:36:10\t2.2.2.2\n' + '3\t11111111111\t2016-10-06 00:58:29\t11111111111\t2016-11-06 15:36:10\t3.3.3.3\n' + + # DST-related corner case: impossible (non-existent) time + '3\t11111111111\txxxx-xx-xx xx:xx:xx\t11111111111\t2020-03-29 02:45:01\t4.4.4.4\n' + + # DST-related corner case: ambiguous time + '3\t11111111111\txxxx-xx-xx xx:xx:xx\t11111111111\t2020-10-25 02:38:02\t5.5.5.5\n', + [ dict( address=[{'ip': '1.1.1.1'}], @@ -122,5 +161,13 @@ def cases(self): address=[{'ip': '3.3.3.3'}], time='2016-11-06 14:36:10', ), + dict( + address=[{'ip': '4.4.4.4'}], + time='2020-03-29 01:45:01', + ), + dict( + address=[{'ip': '5.5.5.5'}], + time='2020-10-25 01:38:02', + ), ] ) diff --git a/N6Core/n6/tests/utils/test_aggregator.py b/N6Core/n6/tests/utils/test_aggregator.py index 59da7e0..904a106 100644 --- a/N6Core/n6/tests/utils/test_aggregator.py +++ b/N6Core/n6/tests/utils/test_aggregator.py @@ -4,12 +4,13 @@ import datetime import json +import os +import tempfile import unittest from collections import namedtuple from mock import ( MagicMock, - call, patch, ) from unittest_expander import ( @@ -26,7 +27,6 @@ AggregatorDataWrapper, HiFreqEventData, SourceData, - DEFAULT_TIME_TOLERANCE, ) from n6lib.unit_test_helpers import TestCaseMixin @@ -35,8 +35,12 @@ @expand class TestAggregator(TestCaseMixin, unittest.TestCase): + sample_routing_key = "testsource.testchannel" + sample_dbpath = "/tmp/sample_dbfile" sample_time_tolerance = 600 - sample_routing_key = 'testsource.testchannel' + sample_time_tolerance_per_source = { + 'anothersource.andchannel': 1200, + } starting_datetime = datetime.datetime(2017, 6, 1, 10) mocked_utcnow = datetime.datetime(2017, 7, 1, 7, 0, 0) input_callback_proper_msg = ( @@ -54,6 +58,13 @@ class TestAggregator(TestCaseMixin, unittest.TestCase): '"time": "2017-06-01 10:00:00"' '}' ) + mocked_config = { + "aggregator": { + "dbpath": sample_dbpath, + "time_tolerance": str(sample_time_tolerance), + "time_tolerance_per_source": json.dumps(sample_time_tolerance_per_source), + } + } @paramseq @@ -81,9 +92,9 @@ def _ordered_data_to_process(cls): }, ], expected_ids_to_single_events=[ - 'c4ca4238a0b923820dcc509a6f75849b', - 'c81e728d9d4c2f636f067f89cc14862c', - 'eccbc87e4b5ce2fe28308fd9f2a7baf3' + "c4ca4238a0b923820dcc509a6f75849b", + "c81e728d9d4c2f636f067f89cc14862c", + "eccbc87e4b5ce2fe28308fd9f2a7baf3" ], ) @@ -138,28 +149,28 @@ def _ordered_data_to_process(cls): }, ], expected_ids_to_single_events=[ - 'c4ca4238a0b923820dcc509a6f75849b', - 'c81e728d9d4c2f636f067f89cc14862c', - 'd41d8cd98f00b204e9800998ecf8426f', - 'd41d8cd98f00b204e9800998ecf8427d' + "c4ca4238a0b923820dcc509a6f75849b", + "c81e728d9d4c2f636f067f89cc14862c", + "d41d8cd98f00b204e9800998ecf8426f", + "d41d8cd98f00b204e9800998ecf8427d" ], expected_ids_to_suppressed_events={ - 'c4ca4238a0b923820dcc509a6f75849b': { + "c4ca4238a0b923820dcc509a6f75849b": { '_first_time': str(cls.starting_datetime), # the 'until' value is the time of the # excluding the event that triggered # publishing of aggregated events - 'until': str(cls.starting_datetime + datetime.timedelta(hours=2)), + "until": str(cls.starting_datetime + datetime.timedelta(hours=2)), # the event that triggered publishing # of aggregated events is not included # in the count, it will be published # with next group of aggregated events - 'count': 3, + "count": 3, }, - 'c81e728d9d4c2f636f067f89cc14862c': { - '_first_time': str(cls.starting_datetime), - 'until': str(cls.starting_datetime + datetime.timedelta(hours=1)), - 'count': 2, + "c81e728d9d4c2f636f067f89cc14862c": { + "_first_time": str(cls.starting_datetime), + "until": str(cls.starting_datetime + datetime.timedelta(hours=1)), + "count": 2, }, }, ) @@ -470,6 +481,42 @@ def _ordered_data_to_process(cls): }, ) + # The second and fourth event is older than the current time, + # but fits the time tolerance for specific source, so it is + # still aggregated. + yield param( + input_data=[ + { + "id": "1", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2017-06-01 10:00:00", + }, + { + "id": "2", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2017-06-01 09:51:00", # within time tolerance + }, + { + "id": "3", + "source": "anothersource.andchannel", + "_group": "group1", + "time": '2017-06-01 11:00:00', + }, + { + "id": "4", + "source": "anothersource.andchannel", + "_group": "group1", + "time": '2017-06-01 10:40:00', # within time tolerance + }, + ], + expected_ids_to_single_events=[ + "1", + "3", + ], + ) + # The newest event, which triggers publishing of suppressed # events, has next day's date, but it also has to be # greater than the time of a checked group's last @@ -650,6 +697,141 @@ def _unordered_data_to_process(cls): expected_last_event_dt_updates=2, ) + @paramseq + def _unordered_data_to_process_event__buffer_may_contain_suppressed_event_1(cls): + # The first, second, and third event are published. The last event is unique. + # The first has a new group. The second with the same group achieved aggregated time. + # The third event has a new group. The last event is older than the time of the source, + # but it fits in the tolerance range. There is not a high-frequency event of 'group1' + # in the groups dict, but it still remains in the buffer. Because of it, the event is + # neither being published nor aggregated, but the count attribute of related high-frequency + # event in the buffer is incremented. + yield param( + input_data=[ + { + "id": "d41d8cd98f00b204e9800998hg351", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 00:00:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg352", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 23:51:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg353", + "source": "testsource.testchannel", + "_group": "group2", + "time": "2020-01-02 00:01:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg354", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-02 00:00:00", + }, + ], + expected_ids_to_single_events=[ + "d41d8cd98f00b204e9800998hg351", + "d41d8cd98f00b204e9800998hg352", + "d41d8cd98f00b204e9800998hg353", + ], + expected_last_event_dt_updates=3, + ) + + @paramseq + def _unordered_data_to_process_event__buffer_may_contain_suppressed_event_2(cls): + # The first and third event are published. The second is aggregated. The last event is + # unique. The first has a new group. The second with the same group fits in the aggregated + # range time. The third event has a new group so published. + # The last event is older than the time of the source, but it fits in the tolerance range. + # There is not a high-frequency event of 'group1' in the groups dict, but it still remains + # in the buffer. Because of it, the event is neither being published nor aggregated, + # but the count attribute of related high-frequency event in the buffer is incremented. + yield param( + input_data=[ + { + "id": "d41d8cd98f00b204e9800998hg351", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 22:00:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg352", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 23:51:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg353", + "source": "testsource.testchannel", + "_group": "group2", + "time": "2020-01-02 00:01:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg354", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-02 00:00:00", + }, + ], + expected_ids_to_single_events=[ + "d41d8cd98f00b204e9800998hg351", + "d41d8cd98f00b204e9800998hg353", + ], + expected_last_event_dt_updates=3, + ) + + @paramseq + def _unordered_data_to_process_event__buffer_may_contain_suppressed_event_3(cls): + # All events are published. The first has a new group. The second with the same group + # achieved aggregated time. The third event has a new group so published. + # The last event has a new group and is older than the time of the source, but it fits + # in the tolerance range. The difference between the case and other two similar + # cases is that it does not fulfill the condition, that a 'group1' hi-freq + # event still remains in the buffer - the buffer has been cleared, because + # the difference between the source time and 'until' time of the last event + # of 'group1' exceeds the tolerance range. So instead of suppressing the + # last 'group1' event and incrementing the hi-freq event's counter, + # the new event is being published. + + yield param( + input_data=[ + { + "id": "d41d8cd98f00b204e9800998hg351", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 00:00:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg352", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-01 20:51:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg353", + "source": "testsource.testchannel", + "_group": "group2", + "time": "2020-01-02 22:01:00", + }, + { + "id": "d41d8cd98f00b204e9800998hg354", + "source": "testsource.testchannel", + "_group": "group1", + "time": "2020-01-02 22:00:00", + }, + ], + expected_ids_to_single_events=[ + "d41d8cd98f00b204e9800998hg351", + "d41d8cd98f00b204e9800998hg352", + "d41d8cd98f00b204e9800998hg353", + "d41d8cd98f00b204e9800998hg354", + ], + expected_last_event_dt_updates=4, + ) def setUp(self): self._published_events = [] @@ -657,16 +839,20 @@ def setUp(self): aggr_data_wrapper = AggregatorDataWrapper.__new__(AggregatorDataWrapper) aggr_data_wrapper.aggr_data = AggregatorData() aggr_data_wrapper.time_tolerance = self.sample_time_tolerance + aggr_data_wrapper.time_tolerance_per_source = self.sample_time_tolerance_per_source self._mocked_datetime_counter = 0 self._aggregator.db = aggr_data_wrapper - @foreach(_ordered_data_to_process) - def test_processing_ordered_events(self, - input_data, - expected_ids_to_single_events=None, - expected_ids_to_suppressed_events=None, - expected_last_event_dt_updates=None): + @foreach(_ordered_data_to_process + + _unordered_data_to_process_event__buffer_may_contain_suppressed_event_1 + + _unordered_data_to_process_event__buffer_may_contain_suppressed_event_2 + + _unordered_data_to_process_event__buffer_may_contain_suppressed_event_3) + def test_processing_events(self, + input_data, + expected_ids_to_single_events=None, + expected_ids_to_suppressed_events=None, + expected_last_event_dt_updates=None): if expected_last_event_dt_updates is None: expected_last_event_dt_updates = len(input_data) @@ -687,55 +873,54 @@ def test_processing_unordered_events(self, if expected_last_event_dt_updates is None: expected_last_event_dt_updates = len(input_data) - with self.assertRaisesRegexp(n6QueueProcessingException, r'\bEvent out of order\b'): + with self.assertRaisesRegexp(n6QueueProcessingException, r"\bEvent out of order\b"): self._test_process_event(input_data, expected_ids_to_single_events, expected_ids_to_suppressed_events, expected_last_event_dt_updates) - @foreach([ param( count=32767, expected_body_content={ - 'source': 'ham.spam', - 'type': 'foobar', - 'count': 32767, + "source": "ham.spam", + "type": "foobar", + "count": 32767, }, - ).label('count not over limit'), + ).label("count not over limit"), param( count=32768, expected_body_content={ - 'source': 'ham.spam', - 'type': 'foobar', - 'count': 32767, - 'count_actual': 32768, + "source": "ham.spam", + "type": "foobar", + "count": 32767, + "count_actual": 32768, }, - ).label('count over limit'), + ).label("count over limit"), ]) def test_publish_event(self, count, expected_body_content): - type_ = 'foobar' + type_ = "foobar" payload = { - 'source': 'ham.spam', - '_group': 'something', - 'count': count, + "source": "ham.spam", + "_group": "something", + "count": count, } data = type_, payload - expected_routing_key = 'foobar.aggregated.ham.spam' + expected_routing_key = "foobar.aggregated.ham.spam" self._aggregator.publish_output = MagicMock() self._aggregator.publish_event(data) self.assertEqual(len(self._aggregator.publish_output.mock_calls), 1) publish_output_kwargs = self._aggregator.publish_output.mock_calls[0][-1] - self.assertEqual(set(publish_output_kwargs.iterkeys()), {'routing_key', 'body'}) - self.assertEqual(publish_output_kwargs['routing_key'], expected_routing_key) - self.assertJsonEqual(publish_output_kwargs['body'], expected_body_content) + self.assertEqual(set(publish_output_kwargs.iterkeys()), {"routing_key", "body"}) + self.assertEqual(publish_output_kwargs["routing_key"], expected_routing_key) + self.assertJsonEqual(publish_output_kwargs["body"], expected_body_content) def test_input_callback(self): - with patch.object(Aggregator, 'process_event') as process_event_mock: - self._aggregator.input_callback('testsource.testchannel', + with patch.object(Aggregator, "process_event") as process_event_mock: + self._aggregator.input_callback("testsource.testchannel", self.input_callback_proper_msg, self.sample_routing_key) process_event_mock.assert_called_with(json.loads(self.input_callback_proper_msg)) @@ -743,11 +928,35 @@ def test_input_callback(self): def test_input_callback_with__group_missing(self): with self.assertRaisesRegexp(n6QueueProcessingException, r"\bmissing '_group' field\b"): - with patch.object(Aggregator, 'process_event'): - self._aggregator.input_callback('testsource.testchannel', + with patch.object(Aggregator, "process_event"): + self._aggregator.input_callback("testsource.testchannel", self.input_callback_msg_no__group, self.sample_routing_key) + @patch("n6.base.queue.QueuedBase.__init__", autospec=True) + @patch("n6lib.config.Config._load_n6_config_files", return_value=mocked_config) + def test_init_class(self, config_mock, init_mock): + with tempfile.NamedTemporaryFile() as fp: + config_mock.return_value["aggregator"]["dbpath"] = fp.name + self._aggregator.__init__() + + # store dir does not exist + with tempfile.NamedTemporaryFile() as fp, \ + self.assertRaisesRegexp(Exception, r"store dir does not exist, stop aggregator"): + config_mock.return_value["aggregator"]["dbpath"] = os.path.join(fp.name, + "nonexistent_file") + self._aggregator.__init__() + + # store directory exists, but it has no rights to write + with tempfile.NamedTemporaryFile() as fp, \ + patch("os.access", return_value=None), \ + self.assertRaisesRegexp(Exception, + r"stop aggregator, remember to set the rights for user, " + r"which runs aggregator"): + config_mock.return_value["aggregator"]["dbpath"] = fp.name + self._aggregator.__init__() + + def _mocked_utcnow_method(self): """ Helper method used as a side effect of a mocked @@ -773,8 +982,8 @@ def _test_process_event(self, """ expected_events = [] - with patch('n6.utils.aggregator.datetime') as datetime_mock,\ - patch.object(Aggregator, 'publish_output') as publish_output_mock: + with patch("n6.utils.aggregator.datetime") as datetime_mock,\ + patch.object(Aggregator, "publish_output") as publish_output_mock: datetime_mock.datetime.utcnow.side_effect = self._mocked_utcnow_method datetime_mock.datetime.side_effect = (lambda *args, **kw: datetime.datetime(*args, **kw)) @@ -783,15 +992,15 @@ def _test_process_event(self, datetime_mock.timedelta.side_effect = (lambda *args, **kw: datetime.timedelta(*args, **kw)) for event in input_data: - if expected_ids_to_single_events and event['id'] in expected_ids_to_single_events: - expected_events.append(self._get_expected_event_from_input_data(event.copy(), - 'event')) - if (expected_ids_to_suppressed_events and - event['id'] in expected_ids_to_suppressed_events): + if expected_ids_to_single_events and event["id"] in expected_ids_to_single_events: + expected_events.append( + self._get_expected_event_from_input_data(event.copy(), "event")) + if (expected_ids_to_suppressed_events + and event["id"] in expected_ids_to_suppressed_events): new_suppressed = event.copy() - new_suppressed.update(expected_ids_to_suppressed_events[event['id']]) - expected_events.append(self._get_expected_event_from_input_data(new_suppressed, - 'suppressed')) + new_suppressed.update(expected_ids_to_suppressed_events[event["id"]]) + expected_events.append( + self._get_expected_event_from_input_data(new_suppressed, "suppressed")) self._aggregator.process_event(event) events_from_calls = self._get_events_from_calls(publish_output_mock.call_args_list) self.assertItemsEqual(expected_events, events_from_calls) @@ -818,12 +1027,12 @@ def _get_expected_event_from_input_data(input_data, type_): an event-like dict, that is expected to be created during the call to `process_event()`. """ - input_data.update({'type': type_}) + input_data.update({"type": type_}) # final events do not contain field `_group` - del input_data['_group'] + del input_data["_group"] return { - 'body': input_data, - 'routing_key': '{type}.aggregated.testsource.testchannel'.format(type=type_), + "body": input_data, + "routing_key": "{}.aggregated.{}".format(type_, input_data['source']), } @@ -836,21 +1045,22 @@ def _get_events_from_calls(call_args_list): """ events_from_calls = [] for _, call_args in call_args_list: - events_from_calls.append({'body': json.loads(call_args['body']), - 'routing_key': call_args['routing_key']}) + events_from_calls.append({"body": json.loads(call_args["body"]), + "routing_key": call_args["routing_key"]}) return events_from_calls - @expand class TestAggregatorDataWrapper(unittest.TestCase): + tested_source_channel = "testsource.testchannel" + other_source_channel = "othersource.otherchannel" + sample_db_path = "/tmp/example.pickle" sample_time_tolerance = 600 - sample_db_path = '/tmp/example.pickle' + sample_time_tolerance_per_source = { + other_source_channel: 1200, + } mocked_utcnow = datetime.datetime(2017, 7, 1, 12, 0, 0) - - tested_source_channel = 'testsource.testchannel' - other_source_channel = 'othersource.otherchannel' sources_tested_for_inactivity = [tested_source_channel, other_source_channel] group1_expected_suppressed_payload = dict( @@ -863,7 +1073,7 @@ class TestAggregatorDataWrapper(unittest.TestCase): until="2017-06-01 09:00:00", ) group1_expected_suppressed_event = ( - 'suppressed', + "suppressed", group1_expected_suppressed_payload, ) group2_expected_suppressed_payload = dict( @@ -876,11 +1086,11 @@ class TestAggregatorDataWrapper(unittest.TestCase): until="2017-06-01 10:00:00", ) group2_expected_suppressed_event = ( - 'suppressed', + "suppressed", group2_expected_suppressed_payload, ) group3_expected_suppressed_event = ( - 'suppressed', + "suppressed", None, ) @@ -897,8 +1107,8 @@ class TestAggregatorDataWrapper(unittest.TestCase): # 'msg_index_to_payload': an index of element in the `messages` # param, a dict, that is expected to be equal to a 'payload' # attribute of the `HiFreqEventData` instance. - ExpectedHiFreqData = namedtuple('ExpectedHiFreqData', ('name', 'until', 'first', 'count', - 'msg_index_to_payload')) + ExpectedHiFreqData = namedtuple( + "ExpectedHiFreqData", ("name", "until", "first", "count", "msg_index_to_payload")) @paramseq @@ -907,7 +1117,7 @@ def _test_process_new_message_data(cls): messages=[ { "id": "c4ca4238a0b923820dcc509a6f75849b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 10:00:00", }, @@ -915,7 +1125,7 @@ def _test_process_new_message_data(cls): expected_source_time=datetime.datetime(2017, 6, 1, 10), expected_groups=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 10), first=datetime.datetime(2017, 6, 1, 10), count=1, @@ -924,17 +1134,46 @@ def _test_process_new_message_data(cls): ], ) + # Second message fits to specific `time_tolerance` parameter + # for the source. yield param( messages=[ { "id": "c4ca4238a0b923820dcc509a6f75849b", - "source": "testsource.testchannel", + "source": cls.other_source_channel, "_group": "group1", "time": "2017-06-01 10:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850c", - "source": "testsource.testchannel", + "source": cls.other_source_channel, + "_group": "group1", + "time": "2017-06-01 09:40:00", + }, + ], + expected_source_time=datetime.datetime(2017, 6, 1, 10), + expected_groups=[ + cls.ExpectedHiFreqData( + name="group1", + until=datetime.datetime(2017, 6, 1, 10), + first=datetime.datetime(2017, 6, 1, 10), + count=2, + msg_index_to_payload=0, + ), + ], + ) + + yield param( + messages=[ + { + "id": "c4ca4238a0b923820dcc509a6f75849b", + "source": cls.tested_source_channel, + "_group": "group1", + "time": "2017-06-01 10:00:00", + }, + { + "id": "c4ca4238a0b923820dcc509a6f75850c", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 12:00:00", }, @@ -942,14 +1181,14 @@ def _test_process_new_message_data(cls): expected_source_time=datetime.datetime(2017, 6, 1, 12), expected_groups=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 10), first=datetime.datetime(2017, 6, 1, 10), count=1, msg_index_to_payload=0, ), cls.ExpectedHiFreqData( - name='group2', + name="group2", until=datetime.datetime(2017, 6, 1, 12), first=datetime.datetime(2017, 6, 1, 12), count=1, @@ -962,31 +1201,31 @@ def _test_process_new_message_data(cls): messages=[ { "id": "c4ca4238a0b923820dcc509a6f75849b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 10:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 11:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 12:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75851c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 13:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75852b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 14:00:00", }, @@ -994,14 +1233,14 @@ def _test_process_new_message_data(cls): expected_source_time=datetime.datetime(2017, 6, 1, 14), expected_groups=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 14), first=datetime.datetime(2017, 6, 1, 10), count=3, msg_index_to_payload=0, ), cls.ExpectedHiFreqData( - name='group2', + name="group2", until=datetime.datetime(2017, 6, 1, 13), first=datetime.datetime(2017, 6, 1, 12), count=2, @@ -1022,43 +1261,43 @@ def _test_process_new_message_data(cls): messages=[ { "id": "c4ca4238a0b923820dcc509a6f75849b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 10:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75851b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 10:15:00", }, { "id": "c4ca4238a0b923820dcc509a6f75751c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 10:30:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 11:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 12:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75851c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 13:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75852b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-02 14:00:00", }, @@ -1066,14 +1305,14 @@ def _test_process_new_message_data(cls): expected_source_time=datetime.datetime(2017, 6, 2, 14), expected_groups=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 2, 14), first=datetime.datetime(2017, 6, 2, 14), count=1, msg_index_to_payload=6, ), cls.ExpectedHiFreqData( - name='group2', + name="group2", until=datetime.datetime(2017, 6, 1, 10, 30), first=datetime.datetime(2017, 6, 1, 10, 15), count=2, @@ -1082,7 +1321,7 @@ def _test_process_new_message_data(cls): ], expected_buffers=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 13), first=datetime.datetime(2017, 6, 1, 10), count=4, @@ -1093,7 +1332,7 @@ def _test_process_new_message_data(cls): # Messages of the "group1" are aggregated until the message # newer by more than 12 hours (by default) is processed. - # It triggers publishing of aggregated + # It triggers publishing of aggregated # messages, and a `HiFreqEventData` for "group1" events # is replaced by the new instance. # *Important*: aggregated messages of different groups @@ -1104,43 +1343,43 @@ def _test_process_new_message_data(cls): messages=[ { "id": "c4ca4238a0b923820dcc509a6f75849b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 07:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 08:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75751b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 08:10:00", }, { "id": "c4ca4238a0b923820dcc509a6f75851b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group2", "time": "2017-06-01 08:30:00", }, { "id": "c4ca4238a0b923820dcc509a6f75850c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 09:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75851c", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 10:00:00", }, { "id": "c4ca4238a0b923820dcc509a6f75852b", - "source": "testsource.testchannel", + "source": cls.tested_source_channel, "_group": "group1", "time": "2017-06-01 22:00:01", }, @@ -1148,14 +1387,14 @@ def _test_process_new_message_data(cls): expected_source_time=datetime.datetime(2017, 6, 1, 22, 0, 1), expected_groups=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 22, 0, 1), first=datetime.datetime(2017, 6, 1, 22, 0, 1), count=1, msg_index_to_payload=6, ), cls.ExpectedHiFreqData( - name='group2', + name="group2", until=datetime.datetime(2017, 6, 1, 8, 30), first=datetime.datetime(2017, 6, 1, 8, 10), count=2, @@ -1164,7 +1403,7 @@ def _test_process_new_message_data(cls): ], expected_buffers=[ cls.ExpectedHiFreqData( - name='group1', + name="group1", until=datetime.datetime(2017, 6, 1, 10), first=datetime.datetime(2017, 6, 1, 7), count=4, @@ -1260,13 +1499,64 @@ def _test_generate_suppressed_events_after_timeout_data(cls): expected_inactive_sources=[], ) - def setUp(self): - self._aggregator_data_wrapper = AggregatorDataWrapper.__new__(AggregatorDataWrapper) - self._aggregator_data_wrapper.time_tolerance = self.sample_time_tolerance - self._aggregator_data_wrapper.dbpath = self.sample_db_path - self._aggregator_data_wrapper.aggr_data = AggregatorData() + self._adw = AggregatorDataWrapper.__new__(AggregatorDataWrapper) + self._adw.time_tolerance = self.sample_time_tolerance + self._adw.time_tolerance_per_source = self.sample_time_tolerance_per_source + self._adw.dbpath = self.sample_db_path + self._adw.aggr_data = AggregatorData() + + def test_store_restore_state(self): + """ + Check validity of data stored in Pickle object and saved as temporary files + comparing its restored state. + """ + message = { + "id": "c4ca4238a0b923820dcc509a6f75852b", + "source": self.tested_source_channel, + "_group": "group1", + "time": "2017-06-01 22:10:00", + } + + expected_stored_message = { + "id": "c4ca4238a0b923820dcc509a6f75852b", + "source": self.tested_source_channel, + "_group": "group1", + "time": "2017-06-01 22:10:00", + } + self._adw.process_new_message(message) + with tempfile.NamedTemporaryFile() as fp: + self._adw.dbpath = fp.name + # store the state + self._adw.store_state() + # delete attribute with stored sources + del self._adw.aggr_data + # check restored state from existing file + self._adw.restore_state() + self.assertDictEqual( + self._adw.aggr_data.sources[self.tested_source_channel].groups[ + message["_group"]].payload, + expected_stored_message) + # assert given path exist + self.assertTrue(self._adw.dbpath) + # assert the exception is being raised when trying to store + # the state, but there is no access to the given path; first, + # make sure there actually is no access to the given path + tmp_db_path = "/root/example.pickle" + if not os.access(tmp_db_path, os.W_OK): + with patch.object(self._adw, "dbpath", tmp_db_path): + self.assertRaises(IOError, self._adw.store_state()) + # assert the exception is being raised when trying to restore + # the state from nonexistent file; first, safely create + # a temporary file, then close and remove it, so the path + # most likely does not exist + with tempfile.NamedTemporaryFile() as fp: + tmp_db_path = fp.name + if not os.path.exists(tmp_db_path): + with patch.object(self._adw, "dbpath", tmp_db_path), \ + self.assertRaisesRegexp(IOError, r"No such file or directory"): + self._adw.restore_state() @foreach(_test_process_new_message_data) def test_process_new_message(self, messages, expected_source_time, @@ -1276,7 +1566,8 @@ def test_process_new_message(self, messages, expected_source_time, and `buffer` attributes after processing of consecutive messages. """ - with patch('n6.utils.aggregator.datetime') as datetime_mock: + test_sources = [] + with patch("n6.utils.aggregator.datetime") as datetime_mock: datetime_mock.datetime.utcnow.return_value = self.mocked_utcnow datetime_mock.datetime.side_effect = (lambda *args, **kw: datetime.datetime(*args, **kw)) @@ -1287,40 +1578,38 @@ def test_process_new_message(self, messages, expected_source_time, # actual calls for msg in messages: - self._aggregator_data_wrapper.process_new_message(msg) - - self.assertIn( - 'testsource.testchannel', - self._aggregator_data_wrapper.aggr_data.sources) - - # assertions for the source - created_source = self._aggregator_data_wrapper.aggr_data.sources[ - 'testsource.testchannel'] - self.assertEqual(created_source.last_event, self.mocked_utcnow) - self.assertEqual(created_source.time, expected_source_time) - self.assertEqual(len(expected_groups), len(created_source.groups)) - - # assertions for groups - for expected_group in expected_groups: - self.assertIn(expected_group.name, created_source.groups) - created_group = created_source.groups[expected_group.name] - self.assertIsInstance(created_group, HiFreqEventData) - self.assertEqual(expected_group.until, created_group.until) - self.assertEqual(expected_group.first, created_group.first) - self.assertEqual(expected_group.count, created_group.count) - self.assertEqual( - messages[expected_group.msg_index_to_payload], - created_group.payload) - # assertions for potential buffers - if expected_buffers: - for expected_buffer in expected_buffers: - created_buffer = created_source.buffer[expected_buffer.name] - self.assertEqual(expected_buffer.until, created_buffer.until) - self.assertEqual(expected_buffer.first, created_buffer.first) - self.assertEqual(expected_buffer.count, created_buffer.count) - self.assertEqual( - messages[expected_buffer.msg_index_to_payload], - created_buffer.payload) + self._adw.process_new_message(msg) + if msg["source"] not in test_sources: + test_sources.append(msg["source"]) + + for test_source in test_sources: + # assertions for the source + created_source = self._adw.aggr_data.sources[test_source] + self.assertEqual(created_source.last_event, self.mocked_utcnow) + self.assertEqual(created_source.time, expected_source_time) + self.assertEqual(len(expected_groups), len(created_source.groups)) + + # assertions for groups + for expected_group in expected_groups: + self.assertIn(expected_group.name, created_source.groups) + created_group = created_source.groups[expected_group.name] + self.assertIsInstance(created_group, HiFreqEventData) + self.assertEqual(expected_group.until, created_group.until) + self.assertEqual(expected_group.first, created_group.first) + self.assertEqual(expected_group.count, created_group.count) + self.assertEqual( + messages[expected_group.msg_index_to_payload], + created_group.payload) + # assertions for potential buffers + if expected_buffers: + for expected_buffer in expected_buffers: + created_buffer = created_source.buffer[expected_buffer.name] + self.assertEqual(expected_buffer.until, created_buffer.until) + self.assertEqual(expected_buffer.first, created_buffer.first) + self.assertEqual(expected_buffer.count, created_buffer.count) + self.assertEqual( + messages[expected_buffer.msg_index_to_payload], + created_buffer.payload) @foreach(_test_generate_suppressed_events_for_source_data) @@ -1336,44 +1625,41 @@ def test_generate_suppressed_events_for_source(self, new_message, expected_resul another_source_data = self._get_source_data_for_suppressed_events_tests( self.other_source_channel) hifreq_new_data = HiFreqEventData(new_message) - tested_source_data.groups['group1'] = hifreq_new_data + tested_source_data.groups["group1"] = hifreq_new_data # `time` attribute should be equal to last message's tested_source_data.time = datetime.datetime.strptime( - new_message['time'], '%Y-%m-%d %H:%M:%S') + new_message["time"], "%Y-%m-%d %H:%M:%S") another_source_data.time = datetime.datetime(2017, 6, 1, 10) # `last_event` attribute is not relevant for the test tested_source_data.last_event = datetime.datetime(2017, 6, 2, 20) another_source_data.last_event = datetime.datetime(2017, 6, 2, 20) - self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel] = tested_source_data - self._aggregator_data_wrapper.aggr_data.sources[ - 'othersource.otherchannel'] = another_source_data + self._adw.aggr_data.sources[self.tested_source_channel] = tested_source_data + self._adw.aggr_data.sources[self.other_source_channel] = another_source_data - generated_events = list( - self._aggregator_data_wrapper.generate_suppresed_events_for_source(new_message)) + generated_events = list(self._adw.generate_suppresed_events_for_source(new_message)) self.assertItemsEqual(expected_results, generated_events) # new `HiFreqEventData` object of the "group1" should be # in `groups` attribute, but not in `buffer` - suppressed # event of the "group1" should have been generated - self.assertIn('group1', self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel].groups) - self.assertNotIn('group1', self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel].buffer) + self.assertIn( + "group1", self._adw.aggr_data.sources[self.tested_source_channel].groups) + self.assertNotIn( + "group1", self._adw.aggr_data.sources[self.tested_source_channel].buffer) # if aggregated events of the "group2" were generated, then # there should not be any `HiFreqEventData` objects of this # group in `groups` nor `buffer` attribute if self.group2_expected_suppressed_event in expected_results: - self.assertNotIn('group2', self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel].groups) - self.assertNotIn('group2', self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel].buffer) + self.assertNotIn( + "group2", self._adw.aggr_data.sources[self.tested_source_channel].groups) + self.assertNotIn( + "group2", self._adw.aggr_data.sources[self.tested_source_channel].buffer) # check if the other source's elements, for which suppressed # events were not generated, are unchanged - self.assertIn('group2', self._aggregator_data_wrapper.aggr_data.sources[ - 'othersource.otherchannel'].groups) - self.assertIn('group1', self._aggregator_data_wrapper.aggr_data.sources[ - 'othersource.otherchannel'].buffer) + self.assertIn( + "group2", self._adw.aggr_data.sources[self.other_source_channel].groups) + self.assertIn( + "group1", self._adw.aggr_data.sources[self.other_source_channel].buffer) @foreach(_test_generate_suppressed_events_after_timeout_data) @@ -1394,14 +1680,12 @@ def test_generate_suppressed_events_after_timeout(self, another_source_data.time = datetime.datetime(2017, 6, 1, 10) tested_source_data.last_event = datetime.datetime(2017, 6, 1, 14) another_source_data.last_event = datetime.datetime(2017, 6, 1, 20) - self._aggregator_data_wrapper.aggr_data.sources[ - self.tested_source_channel] = tested_source_data - self._aggregator_data_wrapper.aggr_data.sources[ - 'othersource.otherchannel'] = another_source_data + self._adw.aggr_data.sources[self.tested_source_channel] = tested_source_data + self._adw.aggr_data.sources[self.other_source_channel] = another_source_data source_to_expected_events = self._get_source_to_expected_events_mapping() - with patch('n6.utils.aggregator.datetime') as datetime_mock: + with patch("n6.utils.aggregator.datetime") as datetime_mock: datetime_mock.datetime.utcnow.return_value = mocked_utcnow datetime_mock.datetime.side_effect = (lambda *args, **kw: datetime.datetime(*args, **kw)) @@ -1410,30 +1694,28 @@ def test_generate_suppressed_events_after_timeout(self, datetime_mock.timedelta.side_effect = (lambda *args, **kw: datetime.timedelta(*args, **kw)) # actual call - generated_events = list( - self._aggregator_data_wrapper.generate_suppresed_events_after_timeout()) - expected_events = [event for source, vals in source_to_expected_events.iteritems() if - source in expected_inactive_sources for event in vals] + generated_events = list(self._adw.generate_suppresed_events_after_timeout()) + expected_events = [event for source, vals in source_to_expected_events.iteritems() + if source in expected_inactive_sources for event in vals] self.assertEqual(expected_events, generated_events) for source in self.sources_tested_for_inactivity: # check if `groups` and `buffers` were cleared # for inactive sources if source in expected_inactive_sources: - self.assertFalse( - self._aggregator_data_wrapper.aggr_data.sources[source].groups) - self.assertFalse( - self._aggregator_data_wrapper.aggr_data.sources[source].buffer) + self.assertFalse(self._adw.aggr_data.sources[source].groups) + self.assertFalse(self._adw.aggr_data.sources[source].buffer) # make sure `groups` and `buffers` were intact # for still active sources else: - self.assertTrue(self._aggregator_data_wrapper.aggr_data.sources[source].groups) - self.assertTrue(self._aggregator_data_wrapper.aggr_data.sources[source].buffer) + self.assertTrue(self._adw.aggr_data.sources[source].groups) + self.assertTrue(self._adw.aggr_data.sources[source].buffer) # helper methods def _get_source_data_for_suppressed_events_tests(self, source_name): - source_data = SourceData(self.sample_time_tolerance) + source_data = SourceData(self._get_time_tolerance_from_source(source_name)) + group1_hifreq_buffered_data = HiFreqEventData.__new__(HiFreqEventData) group1_hifreq_buffered_data.payload = { "id": "c4ca4238a0b923820dcc509a6f75849b", @@ -1444,7 +1726,8 @@ def _get_source_data_for_suppressed_events_tests(self, source_name): group1_hifreq_buffered_data.first = datetime.datetime(2017, 6, 1, 7) group1_hifreq_buffered_data.until = datetime.datetime(2017, 6, 1, 9) group1_hifreq_buffered_data.count = 5 - source_data.buffer['group1'] = group1_hifreq_buffered_data + source_data.buffer["group1"] = group1_hifreq_buffered_data + group2_hifreq_data = HiFreqEventData.__new__(HiFreqEventData) group2_hifreq_data.payload = { "id": "c4ca4238a0b923820dcc509a6f75849c", @@ -1455,7 +1738,8 @@ def _get_source_data_for_suppressed_events_tests(self, source_name): group2_hifreq_data.until = datetime.datetime(2017, 6, 1, 10) group2_hifreq_data.first = datetime.datetime(2017, 6, 1, 8) group2_hifreq_data.count = 4 - source_data.groups['group2'] = group2_hifreq_data + source_data.groups["group2"] = group2_hifreq_data + group3_payload = { "id": "c4ca4238a0b923820dcc509a6f75849d", "source": source_name, @@ -1463,17 +1747,17 @@ def _get_source_data_for_suppressed_events_tests(self, source_name): "time": "2017-06-01 07:30:00", } group3_hifreq_data = HiFreqEventData(group3_payload) - source_data.groups['group3'] = group3_hifreq_data - return source_data + source_data.groups["group3"] = group3_hifreq_data + return source_data def _get_source_to_expected_events_mapping(self): group1_other_source_payload = self.group1_expected_suppressed_payload.copy() - group1_other_source_payload['source'] = self.other_source_channel - group1_other_source_event = ('suppressed', group1_other_source_payload) + group1_other_source_payload["source"] = self.other_source_channel + group1_other_source_event = ("suppressed", group1_other_source_payload) group2_other_source_payload = self.group2_expected_suppressed_payload.copy() - group2_other_source_payload['source'] = self.other_source_channel - group2_other_source_event = ('suppressed', group2_other_source_payload) + group2_other_source_payload["source"] = self.other_source_channel + group2_other_source_event = ("suppressed", group2_other_source_payload) group3_other_source_event = self.group3_expected_suppressed_event return { self.tested_source_channel: [ @@ -1488,19 +1772,26 @@ def _get_source_to_expected_events_mapping(self): ], } + def _get_time_tolerance_from_source(self, source): + return self.sample_time_tolerance_per_source.get(source) or self.sample_time_tolerance class TestAggregatorData(unittest.TestCase): - sample_source = 'testsource.testchannel' - sample_other_source = 'othersource.otherchannel' + sample_source = "testsource.testchannel" + sample_other_source = "othersource.otherchannel" + sample_group = "group1" + sample_other_group = "group2" sample_time_tolerance = 500 + sample_time_tolerance_per_source = { + sample_other_source: 1000, + } groups_hifreq_data = HiFreqEventData( { "id": "c4ca4238a0b923820dcc509a6f75849c", "source": sample_source, - "_group": "group1", + "_group": sample_group, "time": "2017-06-02 12:00:00", } ) @@ -1508,61 +1799,62 @@ class TestAggregatorData(unittest.TestCase): { "id": "c4ca4238a0b923820dcc509a6f75849b", "source": sample_source, - "_group": "group1", + "_group": sample_group, "time": "2017-06-01 10:00:00", } ) - buffer_hifreq_data.count = 4 - new_event_new_source_payload = { - "id": "c4ca4238a0b923820dcc509a6f75851d", - "source": sample_other_source, - "_group": "group1", - "time": "2017-05-01 12:00:00", - } - new_event_existing_source_payload = { - "id": "c4ca4238a0b923820dcc509a6f75860f", - "source": sample_source, - "_group": "group2", - "time": "2017-05-01 12:00:00", - } - def setUp(self): self._aggregator_data = AggregatorData() - self._sample_source_data = SourceData(500) + self._sample_source_data = SourceData(self.sample_time_tolerance) self._sample_source_data.time = datetime.datetime(2017, 6, 2, 12) self._sample_source_data.last_event = datetime.datetime(2017, 6, 2, 13) - self._sample_source_data.groups['group1'] = self.groups_hifreq_data - self._sample_source_data.buffer['group1'] = self.buffer_hifreq_data + self._sample_source_data.groups[self.sample_group] = self.groups_hifreq_data + self._sample_source_data.buffer[self.sample_group] = self.buffer_hifreq_data self._aggregator_data.sources[self.sample_source] = self._sample_source_data - def test_create_new_source_data(self): source_data = self._aggregator_data.get_or_create_sourcedata( - self.new_event_new_source_payload) + { + "id": "c4ca4238a0b923820dcc509a6f75851d", + "source": self.sample_other_source, + "_group": self.sample_group, + "time": "2017-05-01 12:00:00", + }, + self._get_time_tolerance_from_source(self.sample_other_source)) self.assertIsInstance(source_data, SourceData) self.assertEqual(source_data.time, None) self.assertEqual(source_data.last_event, None) self.assertFalse(source_data.groups) self.assertFalse(source_data.buffer) - self.assertEqual(source_data.time_tolerance, - datetime.timedelta(seconds=DEFAULT_TIME_TOLERANCE)) + self.assertEqual( + source_data.time_tolerance, + datetime.timedelta(seconds=self._get_time_tolerance_from_source( + self.sample_other_source))) self.assertIs(source_data, self._aggregator_data.sources[self.sample_other_source]) - def test_get_existing_source_data(self): source_data = self._aggregator_data.get_or_create_sourcedata( - self.new_event_existing_source_payload, - time_tolerance=self.sample_time_tolerance) + { + "id": "c4ca4238a0b923820dcc509a6f75860f", + "source": self.sample_source, + "_group": self.sample_other_group, + "time": "2017-05-01 12:00:00", + }, + self._get_time_tolerance_from_source(self.sample_other_source)) self.assertIsInstance(source_data, SourceData) self.assertEqual(source_data.time, self._sample_source_data.time) self.assertEqual(source_data.last_event, self._sample_source_data.last_event) - self.assertEqual(source_data.time_tolerance, - datetime.timedelta(seconds=self.sample_time_tolerance)) - self.assertIn('group1', source_data.groups) - self.assertIn('group1', source_data.buffer) + self.assertEqual( + source_data.time_tolerance, + datetime.timedelta(seconds=self._get_time_tolerance_from_source(self.sample_source))) + self.assertIn(self.sample_group, source_data.groups) + self.assertIn(self.sample_group, source_data.buffer) self.assertEqual(1, len(source_data.groups)) self.assertEqual(1, len(source_data.buffer)) - self.assertEqual(self.groups_hifreq_data, source_data.groups['group1']) - self.assertEqual(self.buffer_hifreq_data, source_data.buffer['group1']) + self.assertEqual(self.groups_hifreq_data, source_data.groups[self.sample_group]) + self.assertEqual(self.buffer_hifreq_data, source_data.buffer[self.sample_group]) self.assertIs(source_data, self._aggregator_data.sources[self.sample_source]) + + def _get_time_tolerance_from_source(self, source): + return self.sample_time_tolerance_per_source.get(source) or self.sample_time_tolerance diff --git a/N6Core/n6/tests/utils/test_enrich.py b/N6Core/n6/tests/utils/test_enrich.py index 1b2f85a..07e4295 100644 --- a/N6Core/n6/tests/utils/test_enrich.py +++ b/N6Core/n6/tests/utils/test_enrich.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2018 NASK. All rights reserved. +# Copyright (c) 2013-2020 NASK. All rights reserved. import datetime import hashlib +import os import unittest import iptools @@ -16,32 +17,47 @@ from n6lib.unit_test_helpers import TestCaseMixin -class MockConfig(object): - - config = { - 'enrich': { - 'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP', - 'asndatabasefilename': 'GeoLite2-ASN.mmdb', - 'citydatabasefilename': 'GeoLite2-City.mmdb', - }, - 'rabbitmq': { - 'host': 'localhost', - 'port': 5671, - 'ssl': 0, - 'heartbeat_interval': 10, - }, - } +DEFAULT_GEO_IP_DB_PATH = '/usr/share/GeoIP' +DEFAULT_ASN_DB_FILENAME = 'GeoLite2-ASN.mmdb' +DEFAULT_CC_DB_FILENAME = 'GeoLite2-City.mmdb' + + +class MockASNReader(mock.Mock): + + asn = mock.Mock(return_value=mock.MagicMock(autonomous_system_number="1234")) + + +class MockCCReader(mock.Mock): + + city = mock.Mock(return_value=mock.MagicMock(country=mock.MagicMock(iso_code="PL"))) + - def __init__(self, *args, **kwargs): - pass +class MockReader(object): - def __getitem__(self, key): - return self.config[key] + def __new__(cls, fileish, *args, **kwargs): + filename = os.path.basename(fileish) + if filename == DEFAULT_ASN_DB_FILENAME: + return MockASNReader() + elif filename == DEFAULT_CC_DB_FILENAME: + return MockCCReader() + raise ValueError('Unrecognized name of GeoIP database file: {!r}. ' + 'Should be one of: {!r}, {!r}'.format(filename, + DEFAULT_ASN_DB_FILENAME, + DEFAULT_CC_DB_FILENAME)) -class TestEnricher(TestCaseMixin, unittest.TestCase): +class _BaseTestEnricher(TestCaseMixin): + + """ + The class defines methods returning input test data. + Concrete classes should extend these methods, by taking + returned data and building assertions against expected + data. + + These tests should be common for cases testing the Enricher + with all the GeoIP databases, as well as only ASN, only CC + or none of them enabled. + """ COMMON_DATA = { "category": "other", @@ -52,22 +68,210 @@ class TestEnricher(TestCaseMixin, unittest.TestCase): "id": hashlib.md5("test").hexdigest(), "rid": hashlib.md5("test").hexdigest(), } + MOCK_CONFIG = NotImplemented @mock.patch('n6.base.queue.QueuedBase.get_connection_params_dict') - @mock.patch('n6.utils.enrich.Config', MockConfig) - def setUp(self, *args): + @mock.patch('n6.utils.enrich.database.Reader', MockReader) + @mock.patch('n6.utils.enrich.ConfigMixin.get_config_section') + def setUp(self, config_mock, *args): + config_mock.return_value = self.MOCK_CONFIG Enricher._setup_dnsresolver = mock.MagicMock() - Enricher._setup_geodb = mock.MagicMock() self.enricher = Enricher() self.enricher._resolver = mock.MagicMock() self.enricher._resolver.query = mock.MagicMock(return_value=["127.0.0.1"]) - self.enricher.gi_asn = mock.Mock( - asn=mock.Mock(return_value=mock.MagicMock( - autonomous_system_number="1234"))) - self.enricher.gi_cc = mock.Mock( - city=mock.Mock(return_value=mock.MagicMock( - country=mock.Mock( - iso_code="PL")))) + + def test__ip_to_asn__called_or_not(self): + """ + Prepare for a test, whether the `ip_to_asn()` method was + called for all IP addresses, or not. + """ + self.enricher.ip_to_asn = mock.MagicMock(return_value="") + data = self._make_actions_to_call_geoip_method_get_data() + return data + + def test__ip_to_cc__called_or_not(self): + """ + Prepare for a test, whether the `ip_to_cc()` method was + called for all IP addresses, or not. + """ + self.enricher.ip_to_cc = mock.MagicMock(return_value="") + data = self._make_actions_to_call_geoip_method_get_data() + return data + + def test__enrich__with_fqdn_given(self): + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + return data + + def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self): + self.enricher._resolver.query.return_value = [ + '2.2.2.2', + '127.0.0.1', + '13.1.2.3', + '1.1.1.1', + '127.0.0.1', # duplicate + '13.1.2.3', # duplicate + '12.11.10.9', + '13.1.2.3', # duplicate + '1.0.1.1', + ] + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + return data + + def test__enrich__with_url_given(self): + data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd"})) + self.enricher._resolver.query.assert_called_once_with("www.nask.pl", "A") + return data + + def test__enrich__with_ip_url_given(self): + return self.enricher.enrich(RecordDict({"url": "http://192.168.0.1/asd"})) + + def test__enrich__with_ip_url_given__with_nodns_flag(self): + return self.enricher.enrich(RecordDict({ + "url": "http://192.168.0.1/asd", + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_fqdn_and_url_given(self): + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl", + "url": "http://www.nask.pl/asd"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + return data + + def test__enrich__with_fqdn_and_ip_url_given(self): + data = self.enricher.enrich(RecordDict({ + "fqdn": "cert.pl", + "url": "http://192.168.0.1/asd"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + return data + + def test__enrich__with_address_and_fqdn_given(self): + return self.enricher.enrich(RecordDict({ + "fqdn": "cert.pl", + "address": [{"ip": "10.20.30.40"}]})) + + def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): + return self.enricher.enrich(RecordDict({ + "fqdn": "cert.pl", + "address": [{"ip": "10.20.30.40"}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_address_and_url_given(self): + return self.enricher.enrich(RecordDict({ + "url": "http://www.nask.pl/asd", + "address": [{"ip": "10.20.30.40"}]})) + + def test__enrich__with_address_and_ip_url_given(self): + return self.enricher.enrich(RecordDict({ + "url": "http://192.168.0.3/asd", + "address": [{"ip": "10.20.30.40"}]})) + + def test__enrich__with_address_and_fqdn_and_url_given(self): + return self.enricher.enrich(RecordDict({ + "fqdn": "cert.pl", + "url": "http://www.nask.pl/asd", + "address": [{"ip": "10.20.30.40"}]})) + + def test__enrich__with_address_and_fqdn_and_ip_url_given(self): + return self.enricher.enrich(RecordDict({ + "fqdn": "cert.pl", + "url": "http://192.168.0.1/asd", + "address": [{"ip": "10.20.30.40"}]})) + + def test__enrich__with_excluded_ips_config__without_any_ip_to_exclude(self): + self._prepare_config_for_excluded_ips(['2.2.2.2', '3.3.3.3']) + self.enricher.excluded_ips = self.enricher._get_excluded_ips() + data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd"})) + self.enricher._resolver.query.assert_called_once_with("www.nask.pl", "A") + return data + + # helper methods + def _prepare_config_for_excluded_ips(self, list_of_ips): + self.enricher._enrich_config = {'excluded_ips': list_of_ips} + + @staticmethod + def _get_actual_data_for_adding_asn_cc_if_possible(): + return RecordDict({ + "address": [{"ip": "127.0.0.1"}, + {"ip": "192.187.0.1"}, + {"ip": "10.15.1.255"}]}) + + @staticmethod + def _get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible(): + return RecordDict({ + "address": [{"ip": "127.0.0.1", "cc": "JP"}, + {"ip": "192.187.0.1", "cc": "US", "asn": 424242}, + {"ip": "10.15.1.255", "asn": 434343}]}) + + def _enricher_execution_helper(self, data_init, expected_num_of_warnings=None): + data = data_init + data.update(self.COMMON_DATA) + self.enricher.enrich(data) + self.expected_num_of_warnings = expected_num_of_warnings + return data + + def _set_asn_db_return_value_if_enabled(self, returned_asn): + if self.enricher.gi_asn is not None: + self.assertTrue(self.enricher.is_geodb_enabled) + self.enricher.gi_asn = mock.Mock() + self.enricher.gi_asn.asn = mock.Mock( + return_value=mock.MagicMock(autonomous_system_number=returned_asn)) + + def _set_asn_db_side_effect_if_enabled(self, side_effect): + if self.enricher.gi_asn is not None: + self.assertTrue(self.enricher.is_geodb_enabled) + self.enricher.gi_asn = mock.Mock() + self.enricher.gi_asn.asn = mock.MagicMock(side_effect=side_effect) + + def _set_cc_db_return_value_if_enabled(self, returned_cc): + if self.enricher.gi_cc is not None: + self.assertTrue(self.enricher.is_geodb_enabled) + self.enricher.gi_cc = mock.Mock() + self.enricher.gi_cc.city = mock.Mock( + return_value=mock.Mock(country=mock.Mock(iso_code=returned_cc))) + + def _set_cc_db_side_effect_if_enabled(self, side_effect): + if self.enricher.gi_cc is not None: + self.assertTrue(self.enricher.is_geodb_enabled) + self.enricher.gi_cc = mock.Mock() + self.enricher.gi_cc.city = mock.MagicMock(side_effect=side_effect) + + def _make_actions_to_call_geoip_method_get_data(self): + data = RecordDict({ + "address": [{"ip": "127.0.0.1"}, + {"ip": "192.187.0.1"}, + {"ip": "10.15.1.255"}]}) + data.update(self.COMMON_DATA) + self.enricher.enrich(data) + return data + + def _assert_geoip_method_called(self, meth, data): + for addr in data["address"]: + meth.assert_any_call(addr["ip"]) + self.assertEqual(len(data["address"]), meth.call_count) + + def _assert_geoip_method_not_called(self, meth): + self.assertFalse(meth.called) + + +class TestEnricherWithFullConfig(_BaseTestEnricher, unittest.TestCase): + + MOCK_CONFIG = { + 'dnshost': '8.8.8.8', + 'dnsport': 53, + 'geoippath': DEFAULT_GEO_IP_DB_PATH, + 'asndatabasefilename': DEFAULT_ASN_DB_FILENAME, + 'citydatabasefilename': DEFAULT_CC_DB_FILENAME, + 'excluded_ips': [], + } + + def test__ip_to_asn__called_or_not(self): + data = super(TestEnricherWithFullConfig, self).test__ip_to_asn__called_or_not() + self._assert_geoip_method_called(self.enricher.ip_to_asn, data) + + def test__ip_to_cc__called_or_not(self): + data = super(TestEnricherWithFullConfig, self).test__ip_to_cc__called_or_not() + self._assert_geoip_method_called(self.enricher.ip_to_cc, data) def test__enrich__with_no_data(self): data = self.enricher.enrich(RecordDict({})) @@ -79,8 +283,7 @@ def test__enrich__with_irrelevant_data(self): 'enriched': ([], {})}))) def test__enrich__with_fqdn_given(self): - data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) - self.enricher._resolver.asert_called_once_with("cert.pl") + data = super(TestEnricherWithFullConfig, self).test__enrich__with_fqdn_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"127.0.0.1": ["asn", "cc", "ip"]}), "fqdn": "cert.pl", @@ -92,26 +295,15 @@ def test__enrich__with_fqdn_given__with_nodns_flag(self): data = self.enricher.enrich(RecordDict({ "fqdn": "cert.pl", "_do_not_resolve_fqdn_to_ip": True})) - self.enricher._resolver.asert_called_once_with("cert.pl") + self.assertFalse(self.enricher._resolver.query.called) self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {}), "fqdn": "cert.pl", "_do_not_resolve_fqdn_to_ip": True})) def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self): - self.enricher._resolver.query.return_value = [ - '2.2.2.2', - '127.0.0.1', - '13.1.2.3', - '1.1.1.1', - '127.0.0.1', # duplicate - '13.1.2.3', # duplicate - '12.11.10.9', - '13.1.2.3', # duplicate - '1.0.1.1', - ] - data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) - self.enricher._resolver.asert_called_once_with("cert.pl") + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"1.0.1.1": ["asn", "cc", "ip"], "1.1.1.1": ["asn", "cc", "ip"], @@ -140,8 +332,7 @@ def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self) "cc": 'PL'}]})) def test__enrich__with_url_given(self): - data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd"})) - self.enricher._resolver.asert_called_once_with("www.nask.pl") + data = super(TestEnricherWithFullConfig, self).test__enrich__with_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {"127.0.0.1": ["asn", "cc", "ip"]}), "url": "http://www.nask.pl/asd", @@ -154,7 +345,7 @@ def test__enrich__with_url_given__with_nodns_flag(self): data = self.enricher.enrich(RecordDict({ "url": "http://www.nask.pl/asd", "_do_not_resolve_fqdn_to_ip": True})) - self.enricher._resolver.asert_called_once_with("www.nask.pl") + self.assertFalse(self.enricher._resolver.query.called) self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {}), "url": "http://www.nask.pl/asd", @@ -184,7 +375,7 @@ def test__enrich__with_fqdn_from_url_not_resolved(self): "fqdn": "www.nask.pl"})) def test__enrich__with_ip_url_given(self): - data = self.enricher.enrich(RecordDict({"url": "http://192.168.0.1/asd"})) + data = super(TestEnricherWithFullConfig, self).test__enrich__with_ip_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"192.168.0.1": ["asn", "cc", "ip"]}), "url": "http://192.168.0.1/asd", @@ -193,9 +384,8 @@ def test__enrich__with_ip_url_given(self): "cc": 'PL'}]})) def test__enrich__with_ip_url_given__with_nodns_flag(self): - data = self.enricher.enrich(RecordDict({ - "url": "http://192.168.0.1/asd", - "_do_not_resolve_fqdn_to_ip": True})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_ip_url_given__with_nodns_flag() self.assertEqualIncludingTypes(data, RecordDict({ # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) "enriched": ([], {"192.168.0.1": ["asn", "cc", "ip"]}), @@ -206,9 +396,7 @@ def test__enrich__with_ip_url_given__with_nodns_flag(self): "_do_not_resolve_fqdn_to_ip": True})) def test__enrich__with_fqdn_and_url_given(self): - data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl", - "url": "http://www.nask.pl/asd"})) - self.enricher._resolver.asert_called_once_with("cert.pl") + data = super(TestEnricherWithFullConfig, self).test__enrich__with_fqdn_and_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"127.0.0.1": ["asn", "cc", "ip"]}), "url": "http://www.nask.pl/asd", @@ -222,7 +410,7 @@ def test__enrich__with_fqdn_and_url_given__with_nodns_flag(self): "fqdn": "cert.pl", "url": "http://www.nask.pl/asd", "_do_not_resolve_fqdn_to_ip": True})) - self.enricher._resolver.asert_called_once_with("cert.pl") + self.assertFalse(self.enricher._resolver.query.called) self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {}), "url": "http://www.nask.pl/asd", @@ -230,10 +418,7 @@ def test__enrich__with_fqdn_and_url_given__with_nodns_flag(self): "_do_not_resolve_fqdn_to_ip": True})) def test__enrich__with_fqdn_and_ip_url_given(self): - data = self.enricher.enrich(RecordDict({ - "fqdn": "cert.pl", - "url": "http://192.168.0.1/asd"})) - self.enricher._resolver.asert_called_once_with("cert.pl") + data = super(TestEnricherWithFullConfig, self).test__enrich__with_fqdn_and_ip_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"127.0.0.1": ["asn", "cc", "ip"]}), "url": "http://192.168.0.1/asd", @@ -243,9 +428,7 @@ def test__enrich__with_fqdn_and_ip_url_given(self): "cc": 'PL'}]})) def test__enrich__with_address_and_fqdn_given(self): - data = self.enricher.enrich(RecordDict({ - "fqdn": "cert.pl", - "address": [{"ip": "10.20.30.40"}]})) + data = super(TestEnricherWithFullConfig, self).test__enrich__with_address_and_fqdn_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"10.20.30.40": ["asn", "cc"]}), "fqdn": "cert.pl", @@ -254,10 +437,8 @@ def test__enrich__with_address_and_fqdn_given(self): "cc": 'PL'}]})) def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): - data = self.enricher.enrich(RecordDict({ - "fqdn": "cert.pl", - "address": [{"ip": "10.20.30.40"}], - "_do_not_resolve_fqdn_to_ip": True})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_address_and_fqdn_given__with_nodns_flag() self.assertEqualIncludingTypes(data, RecordDict({ # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) "enriched": ([], {"10.20.30.40": ["asn", "cc"]}), @@ -268,9 +449,8 @@ def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): "_do_not_resolve_fqdn_to_ip": True})) def test__enrich__with_address_and_url_given(self): - data = self.enricher.enrich(RecordDict({ - "url": "http://www.nask.pl/asd", - "address": [{"ip": "10.20.30.40"}]})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_address_and_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {"10.20.30.40": ["asn", "cc"]}), "url": "http://www.nask.pl/asd", @@ -280,9 +460,8 @@ def test__enrich__with_address_and_url_given(self): "cc": 'PL'}]})) def test__enrich__with_address_and_ip_url_given(self): - data = self.enricher.enrich(RecordDict({ - "url": "http://192.168.0.3/asd", - "address": [{"ip": "10.20.30.40"}]})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_address_and_ip_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"10.20.30.40": ["asn", "cc"]}), "url": "http://192.168.0.3/asd", @@ -291,10 +470,8 @@ def test__enrich__with_address_and_ip_url_given(self): "cc": 'PL'}]})) def test__enrich__with_address_and_fqdn_and_url_given(self): - data = self.enricher.enrich(RecordDict({ - "fqdn": "cert.pl", - "url": "http://www.nask.pl/asd", - "address": [{"ip": "10.20.30.40"}]})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_address_and_fqdn_and_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"10.20.30.40": ["asn", "cc"]}), "fqdn": "cert.pl", @@ -304,10 +481,8 @@ def test__enrich__with_address_and_fqdn_and_url_given(self): "cc": 'PL'}]})) def test__enrich__with_address_and_fqdn_and_ip_url_given(self): - data = self.enricher.enrich(RecordDict({ - "fqdn": "cert.pl", - "url": "http://192.168.0.1/asd", - "address": [{"ip": "10.20.30.40"}]})) + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_address_and_fqdn_and_ip_url_given() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": ([], {"10.20.30.40": ["asn", "cc"]}), "fqdn": "cert.pl", @@ -342,63 +517,55 @@ def test__url_to_fqdn_or_ip__called_for_ip_url(self): def test_adding_asn_cc_if_asn_not_valid_and_cc_is_valid(self): """Test if asn/cc are (maybe) added""" - - data_init = self.actual_data_for_adding_asn_cc_if_possible( - mock.MagicMock(side_effect=GeoIP2Error), - mock.MagicMock(return_value=mock.MagicMock(country=mock.Mock(iso_code="PL"))) - ) - data_expected = self.enricher_execution_helper(data_init) + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_asn_db_side_effect_if_enabled(GeoIP2Error) + self._set_cc_db_return_value_if_enabled('PL') + data_expected = self._enricher_execution_helper(data_init) self.assertEqual([{u'cc': u'PL', u'ip': u'127.0.0.1'}, {u'cc': u'PL', u'ip': u'192.187.0.1'}, {u'cc': u'PL', u'ip': u'10.15.1.255'}], data_expected["address"]) - self.assertEqual(([], {u'10.15.1.255': [u'cc'], u'127.0.0.1': [u'cc'], u'192.187.0.1': [u'cc']}), + self.assertEqual(([], {u'10.15.1.255': [u'cc'], + u'127.0.0.1': [u'cc'], + u'192.187.0.1': [u'cc']}), data_expected["enriched"]) def test_adding_asn_cc_if_asn_and_cc_are_valid(self): """Test if asn/cc are (maybe) added""" - - data_init = self.actual_data_for_adding_asn_cc_if_possible( - mock.MagicMock(return_value=mock.Mock(autonomous_system_number=1234)), - mock.MagicMock(return_value=mock.MagicMock(country=mock.Mock(iso_code="UK"))) - ) - data_expected = self.enricher_execution_helper(data_init) + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_asn_db_return_value_if_enabled(1234) + self._set_cc_db_return_value_if_enabled('UK') + data_expected = self._enricher_execution_helper(data_init) self.assertEqual([{u'asn': 1234, u'cc': u'UK', u'ip': u'127.0.0.1'}, {u'asn': 1234, u'cc': u'UK', u'ip': u'192.187.0.1'}, - {u'asn': 1234, u'cc': u'UK', u'ip': u'10.15.1.255'}], data_expected["address"]) + {u'asn': 1234, u'cc': u'UK', u'ip': u'10.15.1.255'}], + data_expected["address"]) self.assertEqual(([], {u'10.15.1.255': [u'asn', u'cc'], u'127.0.0.1': [u'asn', u'cc'], - u'192.187.0.1': [u'asn', u'cc']}), data_expected["enriched"]) + u'192.187.0.1': [u'asn', u'cc']}), + data_expected["enriched"]) def test_adding_asn_cc_if_asn_is_valid_and_cc_is_not(self): """Test if asn/cc are (maybe) added""" - - data_init = self.actual_data_for_adding_asn_cc_if_possible( - mock.MagicMock(return_value=mock.Mock(autonomous_system_number=123456)), - mock.MagicMock(side_effect=GeoIP2Error) - ) - data_expected = self.enricher_execution_helper(data_init) + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_asn_db_return_value_if_enabled(123456) + self._set_cc_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init) self.assertEqual([{u'asn': 123456, u'ip': u'127.0.0.1'}, {u'asn': 123456, u'ip': u'192.187.0.1'}, - {u'asn': 123456, u'ip': u'10.15.1.255'}], data_expected["address"]) - self.assertEqual(([], {u'10.15.1.255': [u'asn'], u'127.0.0.1': [u'asn'], u'192.187.0.1': [u'asn']}), + {u'asn': 123456, u'ip': u'10.15.1.255'}], + data_expected["address"]) + self.assertEqual(([], {u'10.15.1.255': [u'asn'], + u'127.0.0.1': [u'asn'], + u'192.187.0.1': [u'asn']}), data_expected["enriched"]) - def actual_data_for_adding_asn_cc_if_possible(self, asn_mock, cc_mock): - self.enricher.gi_asn.asn = asn_mock - self.enricher.gi_cc.city = cc_mock - return RecordDict({ - "address": [{"ip": "127.0.0.1"}, - {"ip": "192.187.0.1"}, - {"ip": "10.15.1.255"}]}) - @mock.patch('n6.utils.enrich.LOGGER') def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_and_are_not_valid(self, LOGGER_mock): """Test if already existing asn/cc are removed and new ones are (maybe) added""" - - data_init = self.actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible( - mock.MagicMock(side_effect=GeoIP2Error), - mock.MagicMock(side_effect=GeoIP2Error)) - data_expected = self.enricher_execution_helper(data_init, expected_num_of_warnings=4) + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + self._set_asn_db_side_effect_if_enabled(GeoIP2Error) + self._set_cc_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=4) self.assertEqual([{u'ip': u'127.0.0.1'}, {u'ip': u'192.187.0.1'}, {u'ip': u'10.15.1.255'}], data_expected["address"]) @@ -408,16 +575,17 @@ def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_and_are_not_va @mock.patch('n6.utils.enrich.LOGGER') def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_is_not_valid(self, LOGGER_mock): """Test if already existing asn/cc are removed and new ones are (maybe) added""" - - data_init = self.actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible( - mock.MagicMock(side_effect=GeoIP2Error), - mock.MagicMock(return_value=mock.MagicMock(country=mock.Mock(iso_code="PL"))) - ) - data_expected = self.enricher_execution_helper(data_init, expected_num_of_warnings=4) + self._set_asn_db_side_effect_if_enabled(GeoIP2Error) + self._set_cc_db_return_value_if_enabled('PL') + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=4) self.assertEqual([{u'cc': u'PL', u'ip': u'127.0.0.1'}, {u'cc': u'PL', u'ip': u'192.187.0.1'}, - {u'cc': u'PL', u'ip': u'10.15.1.255'}], data_expected["address"]) - self.assertEqual(([], {u'10.15.1.255': [u'cc'], u'127.0.0.1': [u'cc'], u'192.187.0.1': [u'cc']}), + {u'cc': u'PL', u'ip': u'10.15.1.255'}], + data_expected["address"]) + self.assertEqual(([], {u'10.15.1.255': [u'cc'], + u'127.0.0.1': [u'cc'], + u'192.187.0.1': [u'cc']}), data_expected["enriched"]) self.assertEqual( len(LOGGER_mock.warning.mock_calls), @@ -426,15 +594,14 @@ def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_is_not_valid(s @mock.patch('n6.utils.enrich.LOGGER') def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_and_cc_are_valid(self, LOGGER_mock): """Test if already existing asn/cc are removed and new ones are (maybe) added""" - - data_init = self.actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible( - mock.MagicMock(return_value=mock.Mock(autonomous_system_number=12345)), - mock.MagicMock(return_value=mock.MagicMock(country=mock.Mock(iso_code="UK"))) - ) - data_expected = self.enricher_execution_helper(data_init, expected_num_of_warnings=4) + self._set_asn_db_return_value_if_enabled(12345) + self._set_cc_db_return_value_if_enabled('UK') + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=4) self.assertEqual([{u'asn': 12345, u'cc': u'UK', u'ip': u'127.0.0.1'}, {u'asn': 12345, u'cc': u'UK', u'ip': u'192.187.0.1'}, - {u'asn': 12345, u'cc': u'UK', u'ip': u'10.15.1.255'}], data_expected["address"]) + {u'asn': 12345, u'cc': u'UK', u'ip': u'10.15.1.255'}], + data_expected["address"]) self.assertEqual(([], {u'10.15.1.255': [u'asn', u'cc'], u'127.0.0.1': [u'asn', u'cc'], @@ -443,47 +610,6 @@ def test_existing_asn_cc_always_dropped_and_new_ones_added_if_asn_and_cc_are_val len(LOGGER_mock.warning.mock_calls), self.expected_num_of_warnings) - def actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible(self, mock_asn, mock_cc): - self.enricher.gi_asn.asn = mock_asn - self.enricher.gi_cc.city = mock_cc - return RecordDict({ - "address": [{"ip": "127.0.0.1", "cc": "JP"}, - {"ip": "192.187.0.1", "cc": "US", "asn": 424242}, - {"ip": "10.15.1.255", "asn": 434343}]}) - - def enricher_execution_helper(self, data_init, expected_num_of_warnings=None): - data = data_init - data.update(self.COMMON_DATA) - self.enricher.enrich(data) - self.expected_num_of_warnings = expected_num_of_warnings - return data - - def test__ip_to_asn__called(self): - """Test if ip_to_asn was called for all ips""" - data = RecordDict({ - "address": [{"ip": "127.0.0.1"}, - {"ip": "192.187.0.1"}, - {"ip": "10.15.1.255"}]}) - data.update(self.COMMON_DATA) - self.enricher.ip_to_asn = mock.MagicMock(return_value="") - self.enricher.enrich(data) - for addr in data["address"]: - self.enricher.ip_to_asn.assert_any_call(addr["ip"]) - self.assertEqual(len(data["address"]), self.enricher.ip_to_asn.call_count) - - def test__ip_to_cc__called(self): - """Test if ip_to_cc was called for all ips""" - data = RecordDict({ - "address": [{"ip": "127.0.0.1"}, - {"ip": "192.187.0.1"}, - {"ip": "10.15.1.255"}]}) - data.update(self.COMMON_DATA) - self.enricher.ip_to_cc = mock.MagicMock(return_value="") - self.enricher.enrich(data) - for addr in data["address"]: - self.enricher.ip_to_cc.assert_any_call(addr["ip"]) - self.assertEqual(len(data["address"]), self.enricher.ip_to_cc.call_count) - def test__fqdn_to_ip__not_called(self): """Test if fqdn_to_ip not called if address already present""" data = RecordDict({ @@ -513,59 +639,43 @@ def test_routing_key_modified(self): self.assertEqual(kwargs["routing_key"], "event.enriched.test.test-source") def test__get_excluded_ips__with_excluded_ips_in_config(self): - # config file with excluded_ips - self.enricher._enrich_config = {'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP', - 'excluded_ips': '1.1.1.1, 2.2.2.2,3.3.3.3'} + self._prepare_config_for_excluded_ips(['1.1.1.1', '2.2.2.2', '3.3.3.3']) expected = iptools.IpRangeList('1.1.1.1', '2.2.2.2', '3.3.3.3') result = self.enricher._get_excluded_ips() self.assertItemsEqual(expected, result) def test__get_excluded_ips__without_excluded_ips_in_config(self): - # config file without excluded_ips - self.enricher._enrich_config = {'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP'} + self._prepare_config_for_excluded_ips([]) expected = None result = self.enricher._get_excluded_ips() self.assertEqual(expected, result) def test__enrich__with_excluded_ips_config__with_some_ip_to_exclude__1(self): - self.enricher._enrich_config = {'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP', - 'excluded_ips': '127.0.0.1, 2.2.2.2, 3.3.3.3'} + self._prepare_config_for_excluded_ips(['127.0.0.1', '2.2.2.2', '3.3.3.3']) self.enricher.excluded_ips = self.enricher._get_excluded_ips() data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd", "address": [{'ip': "127.0.0.1"}]})) - self.enricher._resolver.asert_called_once_with("www.nask.pl") + # the 'data' field is present, so FQDN will not be resolved + # to IP addresses + self.assertFalse(self.enricher._resolver.query.called) self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {}), "url": "http://www.nask.pl/asd", "fqdn": "www.nask.pl"})) # (note: emptied `address` removed) def test__enrich__with_excluded_ips_config__with_some_ip_to_exclude__2(self): - self.enricher._enrich_config = {'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP', - 'excluded_ips': '127.0.0.1, 2.2.2.2, 3.3.3.3'} + self._prepare_config_for_excluded_ips(['127.0.0.1', '2.2.2.2', '3.3.3.3']) self.enricher.excluded_ips = self.enricher._get_excluded_ips() data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd"})) - self.enricher._resolver.asert_called_once_with("www.nask.pl") + self.enricher._resolver.query.assert_called_once_with("www.nask.pl", "A") self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {}), "url": "http://www.nask.pl/asd", "fqdn": "www.nask.pl"})) # (note: emptied `address` removed) def test__enrich__with_excluded_ips_config__without_any_ip_to_exclude(self): - self.enricher._enrich_config = {'dnshost': '8.8.8.8', - 'dnsport': '53', - 'geoippath': '/usr/share/GeoIP', - 'excluded_ips': '2.2.2.2, 3.3.3.3'} - self.enricher.excluded_ips = self.enricher._get_excluded_ips() - data = self.enricher.enrich(RecordDict({"url": "http://www.nask.pl/asd"})) - self.enricher._resolver.asert_called_once_with("www.nask.pl") + data = super(TestEnricherWithFullConfig, + self).test__enrich__with_excluded_ips_config__without_any_ip_to_exclude() self.assertEqualIncludingTypes(data, RecordDict({ "enriched": (["fqdn"], {"127.0.0.1": ["asn", "cc", "ip"]}), "url": "http://www.nask.pl/asd", @@ -683,3 +793,587 @@ def test__filter_out_excluded_ips__with_range_of_ips(self): self.enricher._filter_out_excluded_ips(data, ip_to_enr_mock) self.assertEqualIncludingTypes(expected, data) self.assertItemsEqual(ip_to_enr_mock.mock_calls, ip_to_enr_expected_call_items) + + +class TestEnricherNoASNDatabase(_BaseTestEnricher, unittest.TestCase): + + MOCK_CONFIG = { + 'dnshost': '8.8.8.8', + 'dnsport': 53, + 'geoippath': DEFAULT_GEO_IP_DB_PATH, + 'asndatabasefilename': '', + 'citydatabasefilename': DEFAULT_CC_DB_FILENAME, + 'excluded_ips': [], + } + + def test__ip_to_asn__called_or_not(self): + super(TestEnricherNoASNDatabase, self).test__ip_to_asn__called_or_not() + self._assert_geoip_method_not_called(self.enricher.ip_to_asn) + + def test__ip_to_cc__called_or_not(self): + data = super(TestEnricherNoASNDatabase, self).test__ip_to_cc__called_or_not() + self._assert_geoip_method_called(self.enricher.ip_to_cc, data) + + def test__enrich__with_fqdn_given(self): + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["cc", "ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "cc": 'PL'}]})) + + def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"1.0.1.1": ["cc", "ip"], + "1.1.1.1": ["cc", "ip"], + "12.11.10.9": ["cc", "ip"], + "127.0.0.1": ["cc", "ip"], + "13.1.2.3": ["cc", "ip"], + "2.2.2.2": ["cc", "ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '1.0.1.1', # note: *removed IP duplicates* and + "cc": 'PL'}, # *ordered* by IP (textually) + {"ip": '1.1.1.1', + "cc": 'PL'}, + {"ip": '12.11.10.9', + "cc": 'PL'}, + {"ip": '127.0.0.1', + "cc": 'PL'}, + {"ip": '13.1.2.3', + "cc": 'PL'}, + {"ip": '2.2.2.2', + "cc": 'PL'}]})) + + def test__enrich__with_url_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["cc", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1', + "cc": 'PL'}]})) + + def test__enrich__with_ip_url_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"192.168.0.1": ["cc", "ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1', + "cc": 'PL'}]})) + + def test__enrich__with_ip_url_given__with_nodns_flag(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_ip_url_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {"192.168.0.1": ["cc", "ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1', + "cc": 'PL'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_fqdn_and_url_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["cc", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "cc": 'PL'}]})) + + def test__enrich__with_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["cc", "ip"]}), + "url": "http://192.168.0.1/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "cc": 'PL'}]})) + + def test__enrich__with_address_and_fqdn_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_address_and_fqdn_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["cc"]}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}]})) + + def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_address_and_fqdn_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {"10.20.30.40": ["cc"]}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_address_and_url_given(self): + data = super(TestEnricherNoASNDatabase, self).test__enrich__with_address_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"10.20.30.40": ["cc"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}]})) + + def test__enrich__with_address_and_ip_url_given(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_address_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["cc"]}), + "url": "http://192.168.0.3/asd", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}]})) + + def test__enrich__with_address_and_fqdn_and_url_given(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_address_and_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["cc"]}), + "fqdn": "cert.pl", + "url": "http://www.nask.pl/asd", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}]})) + + def test__enrich__with_address_and_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_address_and_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["cc"]}), + "fqdn": "cert.pl", + "url": "http://192.168.0.1/asd", + "address": [{"ip": '10.20.30.40', + "cc": 'PL'}]})) + + def test_adding_geoip_data_if_cc_is_valid(self): + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_cc_db_return_value_if_enabled('US') + data_expected = self._enricher_execution_helper(data_init) + self.assertEqual([{u'cc': u'US', u'ip': u'127.0.0.1'}, + {u'cc': u'US', u'ip': u'192.187.0.1'}, + {u'cc': u'US', u'ip': u'10.15.1.255'}], data_expected["address"]) + self.assertEqual(([], {u'10.15.1.255': [u'cc'], + u'127.0.0.1': [u'cc'], + u'192.187.0.1': [u'cc']}), + data_expected["enriched"]) + + def test_adding_geoip_data_if_cc_is_not_valid(self): + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_cc_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init) + self.assertEqual([{u'ip': u'127.0.0.1'}, + {u'ip': u'192.187.0.1'}, + {u'ip': u'10.15.1.255'}], data_expected["address"]) + self.assertEqual(([], {}), data_expected["enriched"]) + + @mock.patch('n6.utils.enrich.LOGGER') + def test_existing_geoip_data__drop_and_add_cc__if_cc_is_valid(self, LOGGER_mock): + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + self._set_cc_db_return_value_if_enabled('FR') + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=2) + self.assertEqual([{u'ip': u'127.0.0.1', u'cc': u'FR'}, + {u'ip': u'192.187.0.1', u'cc': u'FR', u'asn': 424242}, + {u'ip': u'10.15.1.255', u'cc': u'FR', u'asn': 434343}], + data_expected["address"]) + self.assertEqual(([], {u'127.0.0.1': [u'cc'], + u'192.187.0.1': [u'cc'], + u'10.15.1.255': [u'cc']}), + data_expected["enriched"]) + self.assertEqual(len(LOGGER_mock.warning.mock_calls), self.expected_num_of_warnings) + + @mock.patch('n6.utils.enrich.LOGGER') + def test_existing_geoip_data__drop_cc__if_cc_is_invalid(self, LOGGER_mock): + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + self._set_cc_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=2) + self.assertEqual([{u'ip': u'127.0.0.1'}, + {u'ip': u'192.187.0.1', u'asn': 424242}, + {u'ip': u'10.15.1.255', u'asn': 434343}], + data_expected["address"]) + self.assertEqual(([], {}), data_expected["enriched"]) + self.assertEqual(len(LOGGER_mock.warning.mock_calls), self.expected_num_of_warnings) + + def test__enrich__with_excluded_ips_config__without_any_ip_to_exclude(self): + data = super(TestEnricherNoASNDatabase, + self).test__enrich__with_excluded_ips_config__without_any_ip_to_exclude() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["cc", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1', + "cc": 'PL'}]})) + + +class TestEnricherNoCCDatabase(_BaseTestEnricher, unittest.TestCase): + + MOCK_CONFIG = { + 'dnshost': '8.8.8.8', + 'dnsport': 53, + 'geoippath': DEFAULT_GEO_IP_DB_PATH, + 'asndatabasefilename': DEFAULT_ASN_DB_FILENAME, + 'citydatabasefilename': '', + 'excluded_ips': [], + } + + def test__ip_to_asn__called_or_not(self): + data = super(TestEnricherNoCCDatabase, self).test__ip_to_asn__called_or_not() + self._assert_geoip_method_called(self.enricher.ip_to_asn, data) + + def test__ip_to_cc__called_or_not(self): + super(TestEnricherNoCCDatabase, self).test__ip_to_cc__called_or_not() + self._assert_geoip_method_not_called(self.enricher.ip_to_cc) + + def test__enrich__with_fqdn_given(self): + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["asn", "ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "asn": '1234'}]})) + + def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"1.0.1.1": ["asn", "ip"], + "1.1.1.1": ["asn", "ip"], + "12.11.10.9": ["asn", "ip"], + "127.0.0.1": ["asn", "ip"], + "13.1.2.3": ["asn", "ip"], + "2.2.2.2": ["asn", "ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '1.0.1.1', # note: *removed IP duplicates* and + "asn": '1234'}, # *ordered* by IP (textually) + {"ip": '1.1.1.1', + "asn": '1234'}, + {"ip": '12.11.10.9', + "asn": '1234'}, + {"ip": '127.0.0.1', + "asn": '1234'}, + {"ip": '13.1.2.3', + "asn": '1234'}, + {"ip": '2.2.2.2', + "asn": '1234'}]})) + + def test__enrich__with_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["asn", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1', + "asn": '1234'}]})) + + def test__enrich__with_ip_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"192.168.0.1": ["asn", "ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1', + "asn": '1234'}]})) + + def test__enrich__with_ip_url_given__with_nodns_flag(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_ip_url_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {"192.168.0.1": ["asn", "ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1', + "asn": '1234'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_fqdn_and_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["asn", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "asn": '1234'}]})) + + def test__enrich__with_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["asn", "ip"]}), + "url": "http://192.168.0.1/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1', + "asn": '1234'}]})) + + def test__enrich__with_address_and_fqdn_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_address_and_fqdn_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["asn"]}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}]})) + + def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_address_and_fqdn_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {"10.20.30.40": ["asn"]}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_address_and_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_address_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"10.20.30.40": ["asn"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}]})) + + def test__enrich__with_address_and_ip_url_given(self): + data = super(TestEnricherNoCCDatabase, self).test__enrich__with_address_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["asn"]}), + "url": "http://192.168.0.3/asd", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}]})) + + def test__enrich__with_address_and_fqdn_and_url_given(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_address_and_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["asn"]}), + "fqdn": "cert.pl", + "url": "http://www.nask.pl/asd", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}]})) + + def test__enrich__with_address_and_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_address_and_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"10.20.30.40": ["asn"]}), + "fqdn": "cert.pl", + "url": "http://192.168.0.1/asd", + "address": [{"ip": '10.20.30.40', + "asn": '1234'}]})) + + def test_adding_geoip_data_if_asn_is_valid(self): + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_asn_db_return_value_if_enabled(45678) + data_expected = self._enricher_execution_helper(data_init) + self.assertEqual([{u'asn': 45678, u'ip': u'127.0.0.1'}, + {u'asn': 45678, u'ip': u'192.187.0.1'}, + {u'asn': 45678, u'ip': u'10.15.1.255'}], data_expected["address"]) + self.assertEqual(([], {u'10.15.1.255': [u'asn'], + u'127.0.0.1': [u'asn'], + u'192.187.0.1': [u'asn']}), + data_expected["enriched"]) + + def test_adding_geoip_data_if_asn_is_not_valid(self): + data_init = self._get_actual_data_for_adding_asn_cc_if_possible() + self._set_asn_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init) + self.assertEqual([{u'ip': u'127.0.0.1'}, + {u'ip': u'192.187.0.1'}, + {u'ip': u'10.15.1.255'}], data_expected["address"]) + self.assertEqual(([], {}), data_expected["enriched"]) + + @mock.patch('n6.utils.enrich.LOGGER') + def test_existing_geoip_data__drop_and_add_asn__if_asn_is_valid(self, LOGGER_mock): + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + self._set_asn_db_return_value_if_enabled(456789) + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=2) + self.assertEqual([{u'ip': u'127.0.0.1', u'cc': u'JP', u'asn': 456789}, + {u'ip': u'192.187.0.1', u'cc': u'US', u'asn': 456789}, + {u'ip': u'10.15.1.255', u'asn': 456789}], + data_expected["address"]) + self.assertEqual(([], {u'127.0.0.1': [u'asn'], + u'192.187.0.1': [u'asn'], + u'10.15.1.255': [u'asn']}), + data_expected["enriched"]) + self.assertEqual(len(LOGGER_mock.warning.mock_calls), self.expected_num_of_warnings) + + @mock.patch('n6.utils.enrich.LOGGER') + def test_existing_geoip_data__drop_asn__if_asn_is_invalid(self, LOGGER_mock): + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + self._set_asn_db_side_effect_if_enabled(GeoIP2Error) + data_expected = self._enricher_execution_helper(data_init, expected_num_of_warnings=2) + self.assertEqual([{u'ip': u'127.0.0.1', u'cc': 'JP'}, + {u'ip': u'192.187.0.1', u'cc': 'US'}, + {u'ip': u'10.15.1.255'}], + data_expected["address"]) + self.assertEqual(([], {}), data_expected["enriched"]) + self.assertEqual(len(LOGGER_mock.warning.mock_calls), self.expected_num_of_warnings) + + def test__enrich__with_excluded_ips_config__without_any_ip_to_exclude(self): + data = super(TestEnricherNoCCDatabase, + self).test__enrich__with_excluded_ips_config__without_any_ip_to_exclude() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["asn", "ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1', + "asn": '1234'}]})) + + +class TestEnricherNoGeoIPDatabase(_BaseTestEnricher, unittest.TestCase): + + MOCK_CONFIG = { + 'dnshost': '8.8.8.8', + 'dnsport': 53, + 'geoippath': '', + 'asndatabasefilename': '', + 'citydatabasefilename': '', + 'excluded_ips': [], + } + + def test__ip_to_asn__called_or_not(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__ip_to_asn__called_or_not() + self._assert_geoip_method_not_called(self.enricher.ip_to_asn) + + def test__ip_to_cc__called_or_not(self): + super(TestEnricherNoGeoIPDatabase, self).test__ip_to_cc__called_or_not() + self._assert_geoip_method_not_called(self.enricher.ip_to_cc) + + def test__enrich__with_fqdn_given(self): + data = self.enricher.enrich(RecordDict({"fqdn": "cert.pl"})) + self.enricher._resolver.query.assert_called_once_with("cert.pl", "A") + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1'}]})) + + def test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_fqdn_given__resolved_to_various_ips_with_duplicates() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"1.0.1.1": ["ip"], + "1.1.1.1": ["ip"], + "12.11.10.9": ["ip"], + "127.0.0.1": ["ip"], + "13.1.2.3": ["ip"], + "2.2.2.2": ["ip"]}), + "fqdn": "cert.pl", + "address": [{"ip": '1.0.1.1'}, # note: *removed IP duplicates* and + {"ip": '1.1.1.1'}, # *ordered* by IP (textually) + {"ip": '12.11.10.9'}, + {"ip": '127.0.0.1'}, + {"ip": '13.1.2.3'}, + {"ip": '2.2.2.2'}]})) + + def test__enrich__with_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1'}]})) + + def test__enrich__with_ip_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"192.168.0.1": ["ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1'}]})) + + def test__enrich__with_ip_url_given__with_nodns_flag(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_ip_url_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {"192.168.0.1": ["ip"]}), + "url": "http://192.168.0.1/asd", + "address": [{"ip": '192.168.0.1'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_fqdn_and_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1'}]})) + + def test__enrich__with_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {"127.0.0.1": ["ip"]}), + "url": "http://192.168.0.1/asd", + "fqdn": "cert.pl", + "address": [{"ip": '127.0.0.1'}]})) + + def test__enrich__with_address_and_fqdn_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_address_and_fqdn_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40'}]})) + + def test__enrich__with_address_and_fqdn_given__with_nodns_flag(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_address_and_fqdn_given__with_nodns_flag() + self.assertEqualIncludingTypes(data, RecordDict({ + # (here the '_do_not_resolve_fqdn_to_ip' flag did *not* change behaviour) + "enriched": ([], {}), + "fqdn": "cert.pl", + "address": [{"ip": '10.20.30.40'}], + "_do_not_resolve_fqdn_to_ip": True})) + + def test__enrich__with_address_and_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_address_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '10.20.30.40'}]})) + + def test__enrich__with_address_and_ip_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, self).test__enrich__with_address_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {}), + "url": "http://192.168.0.3/asd", + "address": [{"ip": '10.20.30.40'}]})) + + def test__enrich__with_address_and_fqdn_and_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_address_and_fqdn_and_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {}), + "fqdn": "cert.pl", + "url": "http://www.nask.pl/asd", + "address": [{"ip": '10.20.30.40'}]})) + + def test__enrich__with_address_and_fqdn_and_ip_url_given(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_address_and_fqdn_and_ip_url_given() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": ([], {}), + "fqdn": "cert.pl", + "url": "http://192.168.0.1/asd", + "address": [{"ip": '10.20.30.40'}]})) + + def test_existing_geoip_data__drop_and_add_asn__if_asn_is_valid(self): + # no additional GeoIP data should be added and existing ASN/CC + # values should not be dropped + data_init = self._get_actual_data_for_existing_asn_cc_always_dropped_and_new_ones_added_if_possible() + data_expected = self._enricher_execution_helper(data_init) + self.assertEqual([{u'ip': u'127.0.0.1', u'cc': u'JP'}, + {u'ip': u'192.187.0.1', u'cc': u'US', u'asn': 424242}, + {u'ip': u'10.15.1.255', u'asn': 434343}], + data_expected["address"]) + self.assertEqual(([], {}), data_expected["enriched"]) + + def test__enrich__with_excluded_ips_config__without_any_ip_to_exclude(self): + data = super(TestEnricherNoGeoIPDatabase, + self).test__enrich__with_excluded_ips_config__without_any_ip_to_exclude() + self.assertEqualIncludingTypes(data, RecordDict({ + "enriched": (["fqdn"], {"127.0.0.1": ["ip"]}), + "url": "http://www.nask.pl/asd", + "fqdn": "www.nask.pl", + "address": [{"ip": '127.0.0.1'}]})) diff --git a/N6Core/n6/tests/utils/test_filter.py b/N6Core/n6/tests/utils/test_filter.py index 207a05f..9944261 100644 --- a/N6Core/n6/tests/utils/test_filter.py +++ b/N6Core/n6/tests/utils/test_filter.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2018 NASK. All rights reserved. +# Copyright (c) 2013-2020 NASK. All rights reserved. import json import unittest @@ -9,7 +9,7 @@ from n6.base.queue import QueuedBase from n6.utils.filter import Filter -from n6lib.auth_api import AuthAPI +from n6lib.auth_api import InsideCriteriaResolver from n6lib.record_dict import RecordDict, AdjusterError @@ -97,22 +97,22 @@ class TestFilter(unittest.TestCase): def setUp(self): self.filter = Filter.__new__(Filter) - self.auth_api_mock = self.filter.auth_api = self._make_auth_api_mock() + self.per_test_inside_criteria = None # to be set in methods that need it + self.filter.auth_api = self._make_auth_api_mock() self.fqdn_only_categories = frozenset(['leak']) def _make_auth_api_mock(self): m = MagicMock() - get_inside_criteria_resolver = ( - # get the original method func, unwrapping it (removing the decorator) - AuthAPI.get_inside_criteria_resolver.__func__.func) - m.get_inside_criteria_resolver = lambda: get_inside_criteria_resolver(m) + m.get_inside_criteria_resolver.side_effect = ( + lambda: InsideCriteriaResolver(self.per_test_inside_criteria)) return m def tearDown(self): assert all( - c == call._get_inside_criteria() + c == call.get_inside_criteria_resolver() for c in self.filter.auth_api.mock_calls), 'test must be updated?' + def test_parameters_queue(self): """Test parameters queue.""" self.assertTrue(issubclass(Filter, QueuedBase)) @@ -122,15 +122,14 @@ def test_parameters_queue(self): 'topic') self.assertEqual(Filter.input_queue['queue_name'], 'filter') - self.assertEqual(Filter.input_queue['binding_keys'], - ['event.enriched.*.*', - 'bl-new.compared.*.*', - 'bl-update.compared.*.*', - 'bl-change.compared.*.*', - 'bl-delist.compared.*.*', - 'bl-expire.compared.*.*', - 'suppressed.enriched.*.*', - 'bl-update.enriched.*.*', ]) + self.assertEqual(Filter.input_queue['accepted_event_types'], + ['event', + 'bl-new', + 'bl-update', + 'bl-change', + 'bl-delist', + 'bl-expire', + 'suppressed']) self.assertEqual(Filter.output_queue['exchange'], 'event') self.assertEqual(Filter.output_queue['exchange_type'], @@ -165,7 +164,7 @@ def test__get_client_and_urls_matched__1(self): 'asn': ['43756', ['afbc']], 'fqdn': ['mycertbridgeonetalamakotawpmikmoknask.org', ['afbc']]} - self.auth_api_mock._get_inside_criteria.return_value = TEST_CRITERIA + self.per_test_inside_criteria = TEST_CRITERIA for i in input_data: body = self.reset_body(body) @@ -196,7 +195,7 @@ def test__get_client_and_urls_matched__2(self): 'asn': ['45975', ['fdc']], 'fqdn': ['onetbridgemikmokcert.eu', ['fdc']]} - self.auth_api_mock._get_inside_criteria.return_value = TEST_CRITERIA + self.per_test_inside_criteria = TEST_CRITERIA for i in input_data: body = self.reset_body(body) @@ -227,7 +226,7 @@ def test__get_client_and_urls_matched__3(self): 'asn': ['31110', ['befa']], 'fqdn': ['makabimikmokvirutonet.biz', ['befa']]} - self.auth_api_mock._get_inside_criteria.return_value = TEST_CRITERIA + self.per_test_inside_criteria = TEST_CRITERIA for i in input_data: body = self.reset_body(body) if i == 'fqdn': @@ -282,7 +281,7 @@ def test__get_client_and_urls_matched__empty_cc(self): 'asn': ['31110', ['befa']], 'fqdn': ['makabimikmokvirutonet.biz', ['befa']]} - self.auth_api_mock._get_inside_criteria.return_value = test_criteria_local + self.per_test_inside_criteria = test_criteria_local for i in input_data: body = self.reset_body(body) if i == 'fqdn': @@ -595,7 +594,7 @@ def test__get_client_and_urls_matched__empty_fileds_asn_ip_cc_fqdn_address(self) "sport": "2147", "dip": "10.28.71.43", "id": "023a00e7c2ef04ee5b0f767ba73ee397"} # test_all_fields - self.auth_api_mock._get_inside_criteria.return_value = test_criteria_local + self.per_test_inside_criteria = test_criteria_local body['fqdn'] = 'onet.pl' body['address'][0]['cc'] = 'GH' body['address'][0]['asn'] = '1234' @@ -919,7 +918,7 @@ def prepare_mock(self, test_criteria_local): "source": "hpfeeds.dionaea", "time": "2013-07-01 20:37:20", "dport": "445", "rid": "023a00e7c2ef04ee5b0f767ba73ee397", "sport": "2147", "dip": "10.28.71.43", "id": "023a00e7c2ef04ee5b0f767ba73ee397"} - self.auth_api_mock._get_inside_criteria.return_value = test_criteria_local + self.per_test_inside_criteria = test_criteria_local body = self.reset_body(body) return body @@ -1040,10 +1039,11 @@ def test__get_client_and_urls_matched__only_fqdn(self): ([], {})) def test__get_client_and_urls_matched__with_idna_fqdn(self): - self.auth_api_mock._get_inside_criteria.return_value = [ - {'org_id': 'org1000', - 'fqdn_seq': [u'xn--krlgr-1tac.pl'], # `królgór.pl`, IDNA-encoded + coerced to unicode - }] + test_criteria_local = [{ + 'org_id': 'org1000', + 'fqdn_seq': [u'xn--krlgr-1tac.pl'], # `królgór.pl`, IDNA-encoded + coerced to unicode + }] + self.per_test_inside_criteria = test_criteria_local body = {"category": "bots", "restriction": "public", "confidence": "medium", "name": "virut", "address": [{"cc": "XX", "ip": "1.1.1.1"}], "source": "hpfeeds.dionaea", "time": "2013-07-01 20:37:20", @@ -1055,10 +1055,11 @@ def test__get_client_and_urls_matched__with_idna_fqdn(self): (['org1000'], {})) def test__get_client_and_urls_matched__with_unicode_url_pattern(self): - self.auth_api_mock._get_inside_criteria.return_value = [ - {'org_id': 'org11', - 'url_seq': [u'władcażlebów.pl'], - }] + test_criteria_local = [{ + 'org_id': 'org11', + 'url_seq': [u'władcażlebów.pl'], + }] + self.per_test_inside_criteria = test_criteria_local body = {"category": "bots", "restriction": "public", "confidence": "medium", "name": "virut", "address": [{"cc": "XX", "ip": "1.1.1.1"}], "source": "hpfeeds.dionaea", "time": "2013-07-01 20:37:20", diff --git a/N6Core/n6/utils/aggregator.py b/N6Core/n6/utils/aggregator.py index 7f40ef1..a276eae 100644 --- a/N6Core/n6/utils/aggregator.py +++ b/N6Core/n6/utils/aggregator.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import collections import cPickle @@ -11,9 +11,12 @@ QueuedBase, n6QueueProcessingException, ) -from n6lib.config import Config +from n6lib.config import ConfigMixin from n6lib.datetime_helpers import parse_iso_datetime_to_utc -from n6lib.log_helpers import get_logger, logging_configured +from n6lib.log_helpers import ( + get_logger, + logging_configured, +) from n6lib.record_dict import RecordDict @@ -29,14 +32,11 @@ # in seconds, tick between checks of inactive sources TICK_TIMEOUT = 3600 -# in seconds -DEFAULT_TIME_TOLERANCE = 600 - class HiFreqEventData(object): def __init__(self, payload): - self.group = payload.get("_group") + self.group = payload.get('_group') self.until = parse_iso_datetime_to_utc(payload.get('time')) self.first = parse_iso_datetime_to_utc(payload.get('time')) self.count = 1 # XXX: see ticket #6243 @@ -105,7 +105,7 @@ def process_event(self, data): return True if (event_time > event.until + datetime.timedelta(hours=AGGREGATE_WAIT) or - event_time.date() > self.time.date()): + event_time.date() > self.time.date()): LOGGER.debug("A suppressed event is generated for the '%s' group of " "'%s' source due to passing of %s hours between events.", data['_group'], data['source'], AGGREGATE_WAIT) @@ -155,10 +155,10 @@ def generate_suppressed_events(self): del self.buffer[k] def generate_suppressed_events_after_inactive(self): - for k, v in self.buffer.iteritems(): + for _, v in self.buffer.iteritems(): # XXX: see ticket #6243 (check whether here is OK or also will need to be changed) yield 'suppressed', v.to_dict() if v.count > 1 else None - for k, v in self.groups.iteritems(): + for _, v in self.groups.iteritems(): # XXX: see ticket #6243 (check whether here is OK or also will need to be changed) yield 'suppressed', v.to_dict() if v.count > 1 else None self.groups.clear() @@ -174,7 +174,7 @@ class AggregatorData(object): def __init__(self): self.sources = {} - def get_or_create_sourcedata(self, event, time_tolerance=DEFAULT_TIME_TOLERANCE): + def get_or_create_sourcedata(self, event, time_tolerance): source = event['source'] sd = self.sources.get(source) if sd is None: @@ -182,50 +182,66 @@ def get_or_create_sourcedata(self, event, time_tolerance=DEFAULT_TIME_TOLERANCE) self.sources[source] = sd return sd + def get_sourcedata(self, event): + # event['source'] exists because it was created in + # `Aggregator.process_event()` where `process_new_message(data)` + # is run before `generate_suppresed_events_for_source(data)`. + return self.sources[event['source']] + def __repr__(self): return repr(self.sources) class AggregatorDataWrapper(object): - def __init__(self, dbpath, time_tolerance): + def __init__(self, dbpath, time_tolerance, time_tolerance_per_source): self.aggr_data = None self.dbpath = dbpath self.time_tolerance = time_tolerance + self.time_tolerance_per_source = time_tolerance_per_source try: self.restore_state() except: - LOGGER.error("Error restoring state from: %r", self.dbpath) + LOGGER.error('Error restoring state from: %r', self.dbpath) self.aggr_data = AggregatorData() def store_state(self): try: - with open(self.dbpath, "w") as f: + with open(self.dbpath, 'w') as f: cPickle.dump(self.aggr_data, f) except IOError: - LOGGER.error("Error saving state to: %r", self.dbpath) + LOGGER.error('Error saving state to: %r', self.dbpath) def restore_state(self): - with open(self.dbpath, "r") as f: + with open(self.dbpath, 'r') as f: self.aggr_data = cPickle.load(f) def process_new_message(self, data): - """Processes a message and validates agains db to detect suppressed event. + """ + Processes a message and validates agains db to detect suppressed + event. Adds new entry to db if necessary (new) or updates entry. Returns: - True: when first event in the group received (i.e. should not be suppressed) - False: when next event in group received (i.e. should be suppressed and count updated) + True: when first event in the group received + (i.e. should not be suppressed) + False: when next event in group received + (i.e. should be suppressed and count updated) """ - source_data = self.aggr_data.get_or_create_sourcedata(data, self.time_tolerance) + source_data = self.aggr_data.get_or_create_sourcedata( + data, + self.time_tolerance_per_source.get(data['source']) or self.time_tolerance, + ) result = source_data.process_event(data) return result def generate_suppresed_events_for_source(self, data): - """Called after each event in a given source was processed. Yields suppressed events """ - source_data = self.aggr_data.get_or_create_sourcedata(data) + Called after each event in a given source was processed. + Yields suppressed events. + """ + source_data = self.aggr_data.get_sourcedata(data) for event in source_data.generate_suppressed_events(): yield event @@ -245,22 +261,31 @@ def generate_suppresed_events_after_timeout(self): yield type_, event -class Aggregator(QueuedBase): - - input_queue = {"exchange": "event", - "exchange_type": "topic", - "queue_name": "aggregator", - "binding_keys": ["hifreq.parsed.*.*"] - } - output_queue = {"exchange": "event", - "exchange_type": "topic" - } +class Aggregator(ConfigMixin, QueuedBase): + + input_queue = { + 'exchange': 'event', + 'exchange_type': 'topic', + 'queue_name': 'aggregator', + 'accepted_event_types': [ + 'hifreq', + ], + } + output_queue = { + 'exchange': 'event', + 'exchange_type': 'topic', + } + + config_spec = ''' + [aggregator] + dbpath + time_tolerance :: int + time_tolerance_per_source = {} :: json + ''' def __init__(self, **kwargs): - config = Config(required={"aggregator": ("dbpath", "time_tolerance")}) - self.aggregator_config = config["aggregator"] - self.aggregator_config["dbpath"] = os.path.expanduser(self.aggregator_config["dbpath"]) - dbpath_dirname = os.path.dirname(self.aggregator_config["dbpath"]) + self.aggregator_config = self.get_config_section() + dbpath_dirname = os.path.dirname(self.aggregator_config['dbpath']) try: os.makedirs(dbpath_dirname, 0700) except OSError: @@ -268,27 +293,29 @@ def __init__(self, **kwargs): super(Aggregator, self).__init__(**kwargs) # store dir doesn't exist, stop aggregator if not os.path.isdir(dbpath_dirname): - raise Exception('store dir does not exist, stop aggregator, path:', - self.aggregator_config["dbpath"]) + raise Exception('store dir does not exist, stop aggregator, path:', + self.aggregator_config['dbpath']) # store directory exists, but it has no rights to write if not os.access(dbpath_dirname, os.W_OK): raise Exception('stop aggregator, remember to set the rights' ' for user, which runs aggregator, path:', - self.aggregator_config["dbpath"]) - self.db = AggregatorDataWrapper(self.aggregator_config["dbpath"], int(self.aggregator_config["time_tolerance"])) - self.timeout_id = None # id of the 'tick' timeout that executes source cleanup - - def run(self): - super(Aggregator, self).run() + self.aggregator_config['dbpath']) + self.db = AggregatorDataWrapper(self.aggregator_config['dbpath'], + self.aggregator_config['time_tolerance'], + self.aggregator_config['time_tolerance_per_source']) + self.timeout_id = None # id of the 'tick' timeout that executes source cleanup def start_publishing(self): - """Called on startup. - Processes data from db and generates new timeouts for remaining entries + """ + Called on startup. + Processes data from db and generates new timeouts for remaining + entries. """ self.set_timeout() def on_timeout(self): - """Callback called periodically after given timeout. + """ + Callback called periodically after given timeout. """ LOGGER.debug('Tick passed') for type_, event in self.db.generate_suppresed_events_after_timeout(): @@ -297,8 +324,10 @@ def on_timeout(self): self.set_timeout() def process_event(self, data): - """Processes the event aggregation. - Each event also triggers additional suppressed events based on time of the given source. + """ + Processes the event aggregation. + Each event also triggers additional suppressed events based + on time of the given source. """ do_publish_new_message = self.db.process_new_message(data) if do_publish_new_message: @@ -308,23 +337,23 @@ def process_event(self, data): self.publish_event((type_, event)) # XXX: can be removed after resolving ticket #6324 - def _clean_count_related_stuff(self, cleaned_payload): - COUNT_MAX = RecordDict.data_spec.count.max_value + @staticmethod + def _clean_count_related_stuff(cleaned_payload): + count_max = RecordDict.data_spec.count.max_value count = cleaned_payload.get('count', 1) - if count > COUNT_MAX: + if count > count_max: cleaned_payload['count_actual'] = count - cleaned_payload['count'] = COUNT_MAX + cleaned_payload['count'] = count_max def _get_cleaned_payload(self, type_, payload): cleaned_payload = payload.copy() - cleaned_payload["type"] = type_ + cleaned_payload['type'] = type_ cleaned_payload.pop('_group', None) self._clean_count_related_stuff(cleaned_payload) return cleaned_payload def publish_event(self, data): - """Publishes event to the output queue - """ + """Publishes event to the output queue""" type_, payload = data if type_ is None: return @@ -342,7 +371,7 @@ def input_callback(self, routing_key, body, properties): record_dict = RecordDict.from_json(body) with self.setting_error_event_info(record_dict): data = dict(record_dict) ## FIXME?: maybe it could be just the record_dict? - if "_group" not in data: + if '_group' not in data: raise n6QueueProcessingException("Hi-frequency source missing '_group' field.") self.process_event(data) diff --git a/N6Core/n6/utils/anonymizer.py b/N6Core/n6/utils/anonymizer.py index 906016e..166f669 100644 --- a/N6Core/n6/utils/anonymizer.py +++ b/N6Core/n6/utils/anonymizer.py @@ -35,9 +35,6 @@ class Anonymizer(QueuedBase): 'exchange': 'event', 'exchange_type': 'topic', 'queue_name': 'anonymizer', - 'binding_keys': [ - '*.filtered.*.*', - ], } output_queue = { diff --git a/N6Core/n6/utils/comparator.py b/N6Core/n6/utils/comparator.py index 20e392a..17d2353 100644 --- a/N6Core/n6/utils/comparator.py +++ b/N6Core/n6/utils/comparator.py @@ -300,14 +300,18 @@ def get_timeout(self, series_id): class Comparator(QueuedBase): - input_queue = {"exchange": "event", - "exchange_type": "topic", - "queue_name": "comparator", - "binding_keys": ["bl.enriched.*.*"] - } - output_queue = {"exchange": "event", - "exchange_type": "topic" - } + input_queue = { + "exchange": "event", + "exchange_type": "topic", + "queue_name": "comparator", + "accepted_event_types": [ + "bl", + ], + } + output_queue = { + "exchange": "event", + "exchange_type": "topic", + } def __init__(self, **kwargs): config = Config(required={"comparator": ("dbpath", "series_timeout", "cleanup_time")}) diff --git a/N6Core/n6/utils/enrich.py b/N6Core/n6/utils/enrich.py index b88a5df..69af0df 100644 --- a/N6Core/n6/utils/enrich.py +++ b/N6Core/n6/utils/enrich.py @@ -1,10 +1,14 @@ -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import collections import os import urlparse import dns.resolver +# TODO: After migration to Pyton 3.x: remove the `iptools` dependency, +# adjusting our code to use std lib's `ipaddress` (maybe also +# adding IPv4/v6/both-dedicated config converters?), and/or our +# own existing IP-address-related helpers... import iptools import maxminddb.const from dns.exception import DNSException @@ -12,7 +16,7 @@ from n6.base.queue import QueuedBase from n6lib.common_helpers import replace_segment, is_ipv4 -from n6lib.config import Config +from n6lib.config import ConfigMixin from n6lib.log_helpers import get_logger, logging_configured from n6lib.record_dict import RecordDict @@ -20,18 +24,17 @@ LOGGER = get_logger(__name__) -class Enricher(QueuedBase): +class Enricher(ConfigMixin, QueuedBase): input_queue = { 'exchange': 'event', 'exchange_type': 'topic', 'queue_name': 'enrichement', - 'binding_keys': [ - 'event.parsed.*.*', - 'bl.parsed.*.*', - 'event.aggregated.*.*', - 'suppressed.aggregated.*.*', - 'bl-update.parsed.*.*', + 'accepted_event_types': [ + 'event', + 'bl', + 'bl-update', + 'suppressed', ], } output_queue = { @@ -39,27 +42,35 @@ class Enricher(QueuedBase): 'exchange_type': 'topic', } + config_spec = """ + [enrich] + dnshost + dnsport :: int + geoippath = "" + asndatabasefilename = "" + citydatabasefilename = "" + excluded_ips = "" :: list_of_str + """ + single_instance = False # # Initialization def __init__(self, **kwargs): + self.is_geodb_enabled = False self.gi_asn = None self.gi_cc = None self._resolver = None - config = Config(required={"enrich": ( - "dnshost", "dnsport", "geoippath", "asndatabasefilename", "citydatabasefilename")}) - self._enrich_config = config["enrich"] + self._enrich_config = self.get_config_section() self.excluded_ips = self._get_excluded_ips() self._setup_geodb() - self._setup_dnsresolver(self._enrich_config["dnshost"], int(self._enrich_config["dnsport"])) + self._setup_dnsresolver(self._enrich_config["dnshost"], self._enrich_config["dnsport"]) super(Enricher, self).__init__(**kwargs) def _get_excluded_ips(self): - if self._enrich_config.get('excluded_ips'): - excluded_ips = [_ip.strip() for _ip in self._enrich_config['excluded_ips'].split(',')] - return iptools.IpRangeList(*excluded_ips) + if self._enrich_config['excluded_ips']: + return iptools.IpRangeList(*self._enrich_config['excluded_ips']) return None def _setup_dnsresolver(self, dnshost, dnsport): @@ -69,12 +80,17 @@ def _setup_dnsresolver(self, dnshost, dnsport): def _setup_geodb(self): geoipdb_path = self._enrich_config["geoippath"] - geoipdb_asn_file = self._enrich_config["asndatabasefilename"] - geoipdb_city_file = self._enrich_config["citydatabasefilename"] - self.gi_asn = database.Reader(fileish=os.path.join(geoipdb_path, geoipdb_asn_file), - mode=maxminddb.const.MODE_MEMORY) - self.gi_cc = database.Reader(fileish=os.path.join(geoipdb_path, geoipdb_city_file), - mode=maxminddb.const.MODE_MEMORY) + if geoipdb_path: + geoipdb_asn_file = self._enrich_config["asndatabasefilename"] + geoipdb_city_file = self._enrich_config["citydatabasefilename"] + if geoipdb_asn_file: + self.gi_asn = database.Reader(fileish=os.path.join(geoipdb_path, geoipdb_asn_file), + mode=maxminddb.const.MODE_MEMORY) + self.is_geodb_enabled = True + if geoipdb_city_file: + self.gi_cc = database.Reader(fileish=os.path.join(geoipdb_path, geoipdb_city_file), + mode=maxminddb.const.MODE_MEMORY) + self.is_geodb_enabled = True # # Main activity @@ -160,39 +176,49 @@ def _filter_out_excluded_ips(self, data, ip_to_enriched_address_keys): data['address'] = _address def _maybe_set_other_address_data(self, data, ip_to_enriched_address_keys): - assert 'address' in data - for addr in data['address']: - # ASN + if self.is_geodb_enabled: + assert 'address' in data + for addr in data['address']: + # ASN + self._maybe_set_asn(addr, data, ip_to_enriched_address_keys) + # CC + self._maybe_set_cc(addr, data, ip_to_enriched_address_keys) + + def _maybe_set_asn(self, addr, data, ip_to_enriched_address_keys): + if self.gi_asn is not None: ip = addr['ip'] existing_asn = addr.pop('asn', None) if existing_asn is not None: LOGGER.warning( - 'it should not happen: event\'s `address` ' - 'contained an `asn` (%r) *before* enrichment ' - '-- so the `asn` has been dropped! ' - '[ip: %s; source: %r; event id: %r; rid: %r]', - existing_asn, - ip, - data['source'], - data['id'], - data['rid']) + 'it should not happen: event\'s `address` ' + 'contained an `asn` (%r) *before* enrichment ' + '-- so the `asn` has been dropped! ' + '[ip: %s; source: %r; event id: %r; rid: %r]', + existing_asn, + ip, + data['source'], + data['id'], + data['rid']) asn = self.ip_to_asn(ip) if asn: addr['asn'] = asn ip_to_enriched_address_keys[ip].append('asn') - # CC + + def _maybe_set_cc(self, addr, data, ip_to_enriched_address_keys): + if self.gi_cc is not None: + ip = addr['ip'] existing_cc = addr.pop('cc', None) if existing_cc is not None: LOGGER.warning( - 'it should not happen: event\'s `address` ' - 'contained a `cc` (%r) *before* enrichment ' - '-- so the `cc` has been dropped! ' - '[ip: %s; source: %r; event id: %r; rid: %r]', - existing_cc, - ip, - data['source'], - data['id'], - data['rid']) + 'it should not happen: event\'s `address` ' + 'contained a `cc` (%r) *before* enrichment ' + '-- so the `cc` has been dropped! ' + '[ip: %s; source: %r; event id: %r; rid: %r]', + existing_cc, + ip, + data['source'], + data['id'], + data['rid']) cc = self.ip_to_cc(ip) if cc: addr['cc'] = cc @@ -241,6 +267,7 @@ def fqdn_to_ip(self, fqdn): return sorted(ip_set) def ip_to_asn(self, ip): + assert self.gi_asn is not None try: geoip_asn = self.gi_asn.asn(ip) except errors.GeoIP2Error: @@ -249,6 +276,7 @@ def ip_to_asn(self, ip): return geoip_asn.autonomous_system_number def ip_to_cc(self, ip): + assert self.gi_cc is not None try: geoip_city = self.gi_cc.city(ip) except errors.GeoIP2Error: diff --git a/N6Core/n6/utils/filter.py b/N6Core/n6/utils/filter.py index 0367ab8..e90f9b5 100644 --- a/N6Core/n6/utils/filter.py +++ b/N6Core/n6/utils/filter.py @@ -25,15 +25,14 @@ class Filter(ConfigMixin, QueuedBase): 'exchange': 'event', 'exchange_type': 'topic', 'queue_name': 'filter', - 'binding_keys': [ - 'event.enriched.*.*', - 'bl-new.compared.*.*', - 'bl-update.compared.*.*', - 'bl-change.compared.*.*', - 'bl-delist.compared.*.*', - 'bl-expire.compared.*.*', - 'suppressed.enriched.*.*', - 'bl-update.enriched.*.*', + 'accepted_event_types': [ + 'event', + 'bl-new', + 'bl-update', + 'bl-change', + 'bl-delist', + 'bl-expire', + 'suppressed', ], } diff --git a/N6Core/n6/utils/management/n6manage.py b/N6Core/n6/utils/management/n6manage.py index 868b3ea..a032464 100755 --- a/N6Core/n6/utils/management/n6manage.py +++ b/N6Core/n6/utils/management/n6manage.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import argparse import datetime @@ -14,7 +14,7 @@ from n6lib.common_helpers import ascii_str from n6lib.datetime_helpers import parse_iso_datetime_to_utc -from n6lib.manage_api import ManageAPI +from n6corelib.manage_api import ManageAPI # diff --git a/N6Core/n6/utils/recorder_conf_generator.py b/N6Core/n6/utils/recorder_conf_generator.py new file mode 100644 index 0000000..b7a05f4 --- /dev/null +++ b/N6Core/n6/utils/recorder_conf_generator.py @@ -0,0 +1,384 @@ +# Copyright (c) 2020 NASK. All rights reserved. + +import argparse +import sys + +import os.path as osp + +from n6lib.data_spec.fields import ( + FieldValueError, + SourceField, +) + +CONF_PATTERN = """ +[program:{prog}] +command={command} ; the program (relative uses PATH, can take args) +process_name=%(program_name)s ; process_name expr (default %(program_name)s) +numprocs=1 ; number of processes copies to start (def 1) + +autorestart=unexpected ; whether/when to restart (default: unexpected) +startsecs=1 ; number of secs prog must stay running (def. 1) +startretries=3 ; max # of serial start failures (default 3) +exitcodes=0 ; 'expected' exit codes for process (default 0) +stopsignal=INT ; signal used to kill process (default TERM) +stopwaitsecs=10 ; max num secs to wait b4 SIGKILL (default 10) +stopasgroup=false ; send stop signal to the UNIX process group (default false) +killasgroup=false ; SIGKILL the UNIX process group (def false) + +environment=HOME="/home/dataman" +""" + + +def _print(msg, file=None): + if file is None: + file = sys.stdout + file.write(msg+"\n") + + +def print_err(msg, *args, **kwargs): + file = kwargs.pop('file', sys.stderr) + formatted = "[{}] ERROR: {}".format( + sys.argv[0], msg.format(*args, **kwargs)) + _print(formatted, file) + + +def print_msg(msg, *args, **kwargs): + _print("[{}] {}".format( + sys.argv[0], msg.format(*args, **kwargs))) + + +class RecorderConfigGenerationError(Exception): + """ + General purpose exception to signal error + during recorder supervisors' config generation. + + If the `RecorderConfigGenerator` would be called + from other code raising this exception instead + of quitting on error allows caller to recover. + """ + def __init__(self, msg): + super(RecorderConfigGenerationError, self).__init__() + self.exit_msg = msg + + +class RecorderConfigGenerator(object): + """ + Generates supervisor configuration files + for the recorders run with flag `--n6recorder-blacklist` + or `--n6recorder-non-blacklist`. + + Sources to generate the configuration for will + be read from the path passed under the `source_file_path`. + File under this path should contain each source in a separate + line in format `source_label.source_channel`. + Sources file can contain blank lines and/or comments + starting with a '#' character. + Example: + + # important sources + source_l1.channel1 + important.important_channel + + # rest of the sources + some_s.rest_channel + + Most of the implementation is transactional. + What it means is that if an error occurs then no change + is made to the outside environment. Exception to that + is writing configuration to files. + If some error occurs during this phase then the already + written files will not be reverted to the state before. + + To generate configuration files call mwthod `gen_and_write_source_conf()`. + To generate non-blacklist configuration file call method `gen_and_write_non_bl_conf()`. + """ + + N6RECORDER_BL_CONF_NAME_PATT = "n6recorder_bl_{}" + N6RECORDER_NON_BL_CONF_BAME = "n6recorder_non_blacklist" + + def __init__(self, source_file_path, dest_path, overwrite=False, skip_errors=True): + """ + Initializes `RecorderConfigGenerator` instance. + + Args/kwargs: + `source_file_path`: + *Path to the file* containing sources to generate + the configurations for. + `dest_path`: + *Path to the directory* where the + configuration files will be generated to. + `overwrite`: + Should the content of the configuration files + be overwritten if the files already exists. + If this flag is not set and any of the files exist + then exception will be risen without any changes + being made to the outside environment. + Defaults to `False`. + `skip_errors`: + By default `RecorderConfigGenerator` halts on every + error. This flag changes this behavior so that + not all errors result in exception being thrown. + Instead the execution proceeds as if the error never + occured and a cause of the error is ignored. + If this flag is set then the following + errors will be skipped: + - wrong source format in the sources file, + - configuration file already exists and overwriting flag is not set. + + Raises: + `RecorderConfigGenerationError`: + If `source_file_path` is not an existing file or + `dest_path` is not an existing directory. + """ + super(RecorderConfigGenerator, self).__init__() + self.source_file_path = source_file_path + self.dest_path = dest_path + self.overwrite = overwrite + self.skip_errors = skip_errors + self._check_source_path() + self._check_dest_path() + + def _check_source_path(self): + if not osp.isfile(self.source_file_path): + raise RecorderConfigGenerationError( + "source file '{}' does not exist or is not a file".format( + self.source_file_path)) + + def _check_dest_path(self): + if not osp.isdir(self.dest_path): + raise RecorderConfigGenerationError( + "destination path '{}' does not exist or is not a directory".format( + self.dest_path)) + + # static helper functions + + @staticmethod + def generate_bl_recorder_conf(source): + """ + Creates a configuration for the blacklist recorder + for the passed source. + + Returns: + Created configuration as `str`. + """ + prog_fmt = RecorderConfigGenerator.N6RECORDER_BL_CONF_NAME_PATT + return CONF_PATTERN.format( + prog=prog_fmt.format(source.replace(".", "_")), + command="n6recorder --n6recorder-blacklist {}".format(source)) + + @staticmethod + def generate_non_bl_recorder_conf(): + """ + Creates a configuration for the non-blacklist + recorder. + + Returns: + Created configuration as `str`. + """ + return CONF_PATTERN.format( + prog=RecorderConfigGenerator.N6RECORDER_NON_BL_CONF_BAME, + command="n6recorder --n6recorder-non-blacklist") + + @staticmethod + def file_name_from_source(source): + """ + Creates a filename for the configuration file + from the given source. + + Returns: + Created filename as `str`. + """ + name_patt = RecorderConfigGenerator.N6RECORDER_BL_CONF_NAME_PATT + name_patt += ".conf" + return name_patt.format(source.replace(".", "_")) + + # logic implementation + + def get_source_configurations(self): + """ + Create a dictionary mapping source from the + sources file to the configuration generated for + this source. + + Returns: + Created dictionary. + + Raises: + `RecorderConfigGenerationError`: + If the `skip_errors` flag is set to `False` + and some source in the file has a wrong format. + """ + configs = {} + errors = [] + with open(self.source_file_path) as src: + for line, source in enumerate(src.readlines(), start=1): + source = source.rstrip() + if not source or self._is_comment(source): + continue + try: + source = SourceField().clean_result_value(source) + except FieldValueError as e: + err_msg = "({}:{}) {}".format(self.source_file_path, line, e) + if self.skip_errors: + print_msg("skipping error: {}".format(err_msg)) + continue + errors.append(err_msg) + continue + configs[source] = self.generate_bl_recorder_conf(source) + if errors: + for err in errors: + print_err(err) + raise RecorderConfigGenerationError( + "there were errors during config generation") + return configs + + def _is_comment(self, source): + return source.startswith('#') + + def _check_config_files(self, configs): + """ + This method checks if the path in the `configs` + dictionary exists. If so 3 things can be done: + - if `skip_errors` and `overwrite` flags are `False` + then an exception will be raised. + - if `skip_errors` is `True` and `overwrite` is `False` + then the path will be removed from the dictionary. + So that later no writes to it happen. + - if `overwrite` is `True` then nothing is done + and later the content of the file will be overwritten + with newly generated configuration. + + Args: + `configs`: + A dictionary mapping paths to the sources' + configuration files with configurations generated + for the sources. + + Raises: + `RecorderConfigGenerationError`: + If the `skip_errors` and `overwrite` flags are set to `False` + and one of the paths already exists. + """ + confs_to_del = [] + errors = [] + for conf_name in configs: + conf_path = osp.join(self.dest_path, conf_name) + if osp.exists(conf_path): + if not self.overwrite: + err_msg = ( + "config file '{}' already exists " + "and overwritting was not allowed " + "(use --overwrite if you want it " + "to be overwritten)").format(conf_path) + if self.skip_errors: + print_msg("skipping error: {}".format(err_msg)) + confs_to_del.append(conf_name) + continue + errors.append(err_msg) + else: + print_msg("config file '{}' will be overwritten", conf_path) + if errors: + for err in errors: + print_err(err) + raise RecorderConfigGenerationError( + "there were errors during files checking") + for conf in confs_to_del: + del configs[conf] + + def _write_configurations(self, configs): + """ + Writes configurations to their designated files + overwriting whatever content there was before. + + Args: + `configs`: + Dictionary mapping file paths to their + new content. + """ + for conf, content in configs.items(): + wrt_path = osp.join(self.dest_path, conf) + with open(wrt_path, 'w') as f: + f.write(content) + + def gen_and_write_non_bl_conf(self): + """ + Works like `gen_and_write_source_conf()` method but instead of + creating configuration for sources listed in the file + creates a single configuration file for the + non-blacklist recorder. + + Raises: + `RecorderConfigGenerationError`: + If the configuration file alredy exists and + the flags `skip_errors` and `overwrite` are + set to `False`. + """ + path = osp.join( + self.dest_path, + self.N6RECORDER_NON_BL_CONF_BAME + ".conf") + if osp.exists(path): + if not self.overwrite: + err_msg = ( + "config file '{}' already exists " + "and overwritting wasn't allowed " + "(use --overwrite if you want it " + " to be overwritten)").format(path) + if self.skip_errors: + print_msg("skipping error: {}".format(err_msg)) + return + raise RecorderConfigGenerationError(err_msg) + print_msg("config file '{}' will be overwritten", path) + with open(path, 'w') as f: + f.write(self.generate_non_bl_recorder_conf()) + + def gen_and_write_source_conf(self): + """ + Creates and writes the configuration files for the + sources in the source file to the destination path. + + Raises: + `RecorderConfigGenerationError`: + If there were errors in the called methods. + See other methods documentation for more details. + """ + configs = self.get_source_configurations() + configs = {self.file_name_from_source(k): v for k, v in configs.items()} + self._check_config_files(configs) + self._write_configurations(configs) + + +def get_argparser(): + parser = argparse.ArgumentParser( + description="Generate supervisor configuration in the given destination" + " directory for the sources given in the source file.") + parser.add_argument("source", + help="Path to the source file containing one source per line." + " Source is a string in format 'source_label.source_channel'") + parser.add_argument("dest", + help="Path to a directory to generate the config files to.") + parser.add_argument("-n", "--non-blacklist", action='store_true', + help="Additionaly to the bl recorders creates " + "configuration for the non blacklist recorder.") + parser.add_argument("-o", "--overwrite", action='store_true', + help="Should the configuration files be overwritten if already present") + parser.add_argument("-s", "--skip-errors", action='store_true', + help="If set then if possible script will try to skip on errors instead" + " of stopping execution" + "(for example: illegal value in the source file") + return parser + + +def main(): + args = get_argparser().parse_args() + try: + conf_generator = RecorderConfigGenerator( + args.source, args.dest, args.overwrite, args.skip_errors) + conf_generator.gen_and_write_source_conf() + if args.non_blacklist: + conf_generator.gen_and_write_non_bl_conf() + except RecorderConfigGenerationError as e: + print_err(e.exit_msg) + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/N6Core/setup.py b/N6Core/setup.py index d6bee6a..7fd8f65 100644 --- a/N6Core/setup.py +++ b/N6Core/setup.py @@ -1,4 +1,4 @@ -# Copyright (c) 2013-2018 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import glob import os.path as osp @@ -8,26 +8,53 @@ import pkgutil -setup_dir = osp.dirname(osp.abspath(__file__)) - -with open(osp.join(setup_dir, '.n6-version')) as f: - n6_version = f.read().strip() - - if "--collectors-only" in sys.argv: collectors_only = True sys.argv.remove("--collectors-only") else: collectors_only = False +setup_dir, setup_filename = osp.split(osp.abspath(__file__)) +setup_human_readable_ref = osp.join(osp.basename(setup_dir), setup_filename) + +def get_n6_version(filename_base): + path_base = osp.join(setup_dir, filename_base) + path_glob_pattern = path_base + '*' + # The non-suffixed path variant should be + # tried only if another one does not exist. + matching_paths = sorted(glob.iglob(path_glob_pattern), + reverse=True) + try: + path = matching_paths[0] + except IndexError: + sys.exit('[{}] Cannot determine the n6 version ' + '(no files match the pattern {!r}).' + .format(setup_human_readable_ref, + path_glob_pattern)) + try: + with open(path) as f: #3: add: `, encoding='ascii'` + return f.read().strip() + except (OSError, UnicodeError) as exc: + sys.exit('[{}] Cannot determine the n6 version ' + '(an error occurred when trying to ' + 'read it from the file {!r} - {}).' + .format(setup_human_readable_ref, + path, + exc)) def setup_data_line_generator(filename_base): path_base = osp.join(setup_dir, filename_base) - path_glob = path_base + '*' - for path in glob.glob(path_glob): - with open(path) as f: - for raw_line in f: - yield raw_line.strip() + path_glob_pattern = path_base + '*' + # Here we sort the paths just to make the order of operations deterministic. + matching_paths = sorted(glob.iglob(path_glob_pattern)) + for path in matching_paths: + try: + with open(path) as f: #3: add: `, encoding='ascii'` + for raw_line in f: + yield raw_line.strip() + except (OSError, UnicodeError) as exc: + sys.exit('[{}] Could not read from the file {!r} ({})' + .format(setup_human_readable_ref, path, exc)) def find_scripts(): console_scripts_list.extend(setup_data_line_generator('console_scripts')) @@ -48,7 +75,6 @@ def all_subclasses(cls): for indirect in all_subclasses(direct)) def find_parsers(): - global console_scripts_list from n6.parsers.generic import BaseParser dirname = "n6/parsers" for importer, package_name, _ in pkgutil.iter_modules([dirname]): @@ -67,7 +93,6 @@ def find_parsers(): console_scripts_list.append(console_line) def find_collectors(): - global console_scripts_list from n6.collectors.generic import AbstractBaseCollector dirname = "n6/collectors" for importer, package_name, _ in pkgutil.iter_modules([dirname]): @@ -86,7 +111,9 @@ def find_collectors(): console_scripts_list.append(console_line) -requirements = ['n6lib==' + n6_version] +n6_version = get_n6_version('.n6-version') + +requirements = ['n6sdk==' + n6_version, 'n6lib==' + n6_version, 'n6corelib==' + n6_version] console_scripts_list = ['n6config = n6.base.config:install_default_config'] if not collectors_only: @@ -101,8 +128,9 @@ def find_collectors(): packages=find_packages(), include_package_data=True, + python_requires='==2.7.*', zip_safe=False, - tests_require=['mock==1.0.1', 'unittest_expander'], + tests_require=['mock==3.0.5', 'unittest_expander==0.3.1'], test_suite='n6.tests', install_requires=requirements, entry_points={ diff --git a/N6CoreLib/.n6-version b/N6CoreLib/.n6-version new file mode 120000 index 0000000..33f888a --- /dev/null +++ b/N6CoreLib/.n6-version @@ -0,0 +1 @@ +../.n6-version \ No newline at end of file diff --git a/N6CoreLib/MANIFEST.in b/N6CoreLib/MANIFEST.in new file mode 100644 index 0000000..2730dd0 --- /dev/null +++ b/N6CoreLib/MANIFEST.in @@ -0,0 +1,3 @@ +include *.txt *.rst requirements requirements-* +recursive-include n6corelib *--data *.conf *.ini +recursive-include n6corelib/data * diff --git a/N6CoreLib/n6corelib/__init__.py b/N6CoreLib/n6corelib/__init__.py new file mode 100644 index 0000000..b1fe03f --- /dev/null +++ b/N6CoreLib/n6corelib/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) 2020-2021 NASK. All rights reserved. + +# Let's ensure that `TimeoutCallbackManager`, if needed, is prepared +# as early as possible (especially because it needs to have full +# control over any `signal.alarm()` calls...). +import os +if os.environ.get('N6_TIMEOUT_CALLBACK_MANAGER'): + # *Only* for `N6Core` (especially, *not* for web/WSGI components -- + # such as REST API, Portal or AdminPanel [at least for now; reason: + # Apache's `mod_wsgi` has its own signal handling machinery that may + # conflict with TimeoutCallbackManager; also, in these components + # TimeoutCallbackManager is hardly useful...]). + from n6corelib.timeout_callback_manager import TimeoutCallbackManager + TimeoutCallbackManager.ensure_preparations_and_monkey_patching_done() diff --git a/N6Lib/n6lib/concurrency_helpers.py b/N6CoreLib/n6corelib/concurrency_helpers.py similarity index 93% rename from N6Lib/n6lib/concurrency_helpers.py rename to N6CoreLib/n6corelib/concurrency_helpers.py index 4ed5c5b..ec921cf 100644 --- a/N6Lib/n6lib/concurrency_helpers.py +++ b/N6CoreLib/n6corelib/concurrency_helpers.py @@ -11,70 +11,15 @@ import time # ATTENTION: -# Because this module is used by the `n6lib.timeout_callback_manager` +# Because this module is used by the `n6corelib.timeout_callback_manager` # module which needs some early monkey patching, this module (that is, -# `n6lib.concurrency_helpers`) should import only standard library +# `n6corelib.concurrency_helpers`) should import only standard library # modules -- that is, it should *not* depend on any external libraries # or any n6-specific libraries. # (But, of course, other n6 modules *can* import this module.) -class NonBlockingLockWrapper(object): - - """ - A lock wrapper to acquire a lock in non-blocking manner. - - Constructor args/kwargs: - `lock`: - The threading.Lock or threading.RLock instance to be wrapped. - `lock_description` (optional): - The lock description (for debug purposes). - - Instance interface includes: - * the context manager (`with` statement) interface, - * explicit `acquire()` (argumentless, always non-blocking), - * explicit `release()`. - - If `lock` cannot be acquired, `RuntimeError` is raised (with - `lock_description`, if provided, used in the error message). - - Example use: - my_lock = threading.Lock() # or threading.RLock() - ... - with NonBlockingLockWrapper(my_lock, 'my very important lock') - ... - """ - - def __init__(self, lock, lock_description=None): - self.lock = lock - self._lock_ascii_description = self._make_lock_ascii_description(lock_description) - - def _make_lock_ascii_description(self, lock_description): - if lock_description is None: - return repr(self.lock) - else: - if isinstance(lock_description, str): - lock_description = lock_description.decode('utf-8') - return lock_description.encode('ascii', 'backslashreplace') - - def __enter__(self): - self.acquire() - return self.lock - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - def acquire(self): - if self.lock.acquire(False): - return True - raise RuntimeError('could not acquire {}'.format(self._lock_ascii_description)) - - def release(self): - self.lock.release() - - - def protected_from_concurrency(func=None, mutex_key=None, propagated_exc=Exception, diff --git a/N6Lib/n6lib/email_message.py b/N6CoreLib/n6corelib/email_message.py similarity index 94% rename from N6Lib/n6lib/email_message.py rename to N6CoreLib/n6corelib/email_message.py index b81c011..98cba2c 100644 --- a/N6Lib/n6lib/email_message.py +++ b/N6CoreLib/n6corelib/email_message.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. # # For some code in this module: # Copyright (c) 2001-2013 Python Software Foundation. All rights reserved. @@ -20,7 +20,7 @@ from cStringIO import StringIO from n6lib.log_helpers import get_logger -from n6lib.unpacking_helpers import iter_unzip_from_string, gunzip_from_string +from n6lib.unpacking_helpers import iter_unzip_from_bytes, gzip_decompress LOGGER = get_logger(__name__) @@ -28,12 +28,15 @@ class NoMatchingFileError(ValueError): """ - Raised by EmailMessage.get_matching_file_content if no file matches. + Raised by `ReceivedEmailMessage.get_matching_file_content()` + if no file matches. """ # TODO: doc -class EmailMessage(email.message.Message): +# TODO later: upgrade to have `email.message.EmailMessage` as its base +# with all goodies from modern `email.*` stuff... +class ReceivedEmailMessage(email.message.Message): # # Some constants @@ -160,7 +163,7 @@ def iter_filenames_and_contents(self, multifile_unpacking=False): if (ext in self.GZIP_FILENAME_EXTENSIONS or content_type in self.GZIP_CONTENT_TYPES): try: - payload = gunzip_from_string(payload) + payload = gzip_decompress(payload) except (IOError, EOFError) as exc: LOGGER.warning('Could not decompress file %r using GZip ' 'decoder (%s)', filename, exc) @@ -168,7 +171,7 @@ def iter_filenames_and_contents(self, multifile_unpacking=False): elif (ext in self.ZIP_FILENAME_EXTENSIONS or content_type in self.ZIP_CONTENT_TYPES): try: - names_and_contents = list(iter_unzip_from_string(payload)) + names_and_contents = list(iter_unzip_from_bytes(payload)) except (zipfile.BadZipfile, RuntimeError) as exc: LOGGER.warning('Could not unpack file %r using ZIP ' 'decoder (%s)', filename, exc) diff --git a/N6Lib/n6lib/manage_api/__init__.py b/N6CoreLib/n6corelib/manage_api/__init__.py similarity index 50% rename from N6Lib/n6lib/manage_api/__init__.py rename to N6CoreLib/n6corelib/manage_api/__init__.py index 99390cd..695f7f9 100644 --- a/N6Lib/n6lib/manage_api/__init__.py +++ b/N6CoreLib/n6corelib/manage_api/__init__.py @@ -1,9 +1,6 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +# Copyright (c) 2013-2021 NASK. All rights reserved. -# Copyright (c) 2013-2019 NASK. All rights reserved. - -from n6lib.manage_api._manage_api import ( +from n6corelib.manage_api._manage_api import ( AccessForbiddenError, ManageAPIError, ManageAPI, diff --git a/N6Lib/n6lib/manage_api/_ca_env.py b/N6CoreLib/n6corelib/manage_api/_ca_env.py similarity index 65% rename from N6Lib/n6lib/manage_api/_ca_env.py rename to N6CoreLib/n6corelib/manage_api/_ca_env.py index d5af591..76e5680 100644 --- a/N6Lib/n6lib/manage_api/_ca_env.py +++ b/N6CoreLib/n6corelib/manage_api/_ca_env.py @@ -1,12 +1,13 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. """ Low-level details of certificate generation (OpenSSL-based). """ import datetime +import logging import os import os.path as osp import shutil @@ -15,14 +16,20 @@ from n6lib.auth_db.models import SERVICE_CA_PROFILE_NAME from n6lib.auth_db.validators import is_cert_serial_number_valid -from n6lib.common_helpers import read_file +from n6lib.common_helpers import ( + ascii_str, + read_file, +) from n6lib.config import ConfigString from n6lib.const import CERTIFICATE_SERIAL_NUMBER_HEXDIGIT_NUM from n6lib.datetime_helpers import datetime_utc_normalize -from n6lib.x509_helpers import normalize_hex_serial_number +from n6corelib.x509_helpers import normalize_hex_serial_number -SERVER_COMPONENT_ADDITIONAL_OPENSSL_COMMAND_ARGS = ('-policy', 'server_component_serviceCA_policy') +SERVER_COMPONENT_POLICY_SECT_NAME = 'server_component_serviceCA_policy' +SERVER_COMPONENT_ADDITIONAL_OPENSSL_COMMAND_ARGS = ('-policy', SERVER_COMPONENT_POLICY_SECT_NAME) + +X509_EXTENSIONS_OPT_NAME = 'x509_extensions' DEFAULT_INDEX_ATTR_CONTENT = 'unique_subject = no' @@ -37,23 +44,20 @@ [pkcs11_section] engine_id = pkcs11 -dynamic_path = {pkcs11_dynamic_path} MODULE_PATH = {pkcs11_module_path} init = 0 ''' # ^ -# example `pkcs11_dynamic_path` value: "/usr/lib/engines/engine_pkcs11.so" -# example `pkcs11_module_path` value: "/usr/lib/x86_64-linux-gnu/opensc-pkcs11.so" -# -# The values of these two formattable fields will be taken from the +# An example value of the `pkcs11_module_path` formattable field: +# "opensc-pkcs11.so". The value of that field will taken from the # appropriate `ca_key_...` option (e.g., `ca_key_client_2 = -# pkcs11:/usr/lib/engines/engine_pkcs11.so:/usr/lib/x86_64-linux-gnu/opensc-pkcs11.so:-keyfile foo:bar -keyform spam`) -# in the appropriate section of the n6 config (by default, the section -# is `[manage_api]`). +# pkcs11:opensc-pkcs11.so:-keyfile foo:bar -keyform spam`) +# in the appropriate section of the n6 config (by default, the +# section is `[manage_api]`). # -# Functions used by the n6lib.manage_api._manage_api stuff +# Functions used by the ._manage_api stuff # def get_ca_env_configuration(ca, ca_key_path): @@ -67,11 +71,9 @@ def get_ca_env_configuration(ca, ca_key_path): ) if ca_key_path.startswith('pkcs11:'): (_, - pkcs11_dynamic_path, pkcs11_module_path, - pkcs11_additional_openssl_cmd_args) = ca_key_path.split(':', 3) + pkcs11_additional_openssl_cmd_args) = ca_key_path.split(':', 2) ca_env_configuration['tmp_env_init_kwargs_base']['pkcs11_opts_dict'] = { - 'pkcs11_dynamic_path': pkcs11_dynamic_path, 'pkcs11_module_path': pkcs11_module_path, 'pkcs11_additional_openssl_cmd_arg_list': pkcs11_additional_openssl_cmd_args.split(), } @@ -115,10 +117,16 @@ def generate_certificate_pem(ca_env_configuration, csr_pem, serial_number, ) if server_component_n6login is None: additional_openssl_command_args = () + is_server_component = False else: assert ca_env_configuration['ca'].profile == SERVICE_CA_PROFILE_NAME additional_openssl_command_args = SERVER_COMPONENT_ADDITIONAL_OPENSSL_COMMAND_ARGS + is_server_component = True with TmpEnv(**tmp_env_init_kwargs) as tmp_env: + if is_server_component: + tmp_env.ssl_conf.value.validate_for_public_server_cert() + else: + tmp_env.ssl_conf.value.validate_for_nonpublic_cert() cert_pem = tmp_env.execute_cert_generation(additional_openssl_command_args) return cert_pem @@ -143,6 +151,7 @@ def generate_crl_pem(ca_env_configuration): serial='', ) with TmpEnv(**tmp_env_init_kwargs) as tmp_env: + tmp_env.ssl_conf.value.validate_for_crl() crl_pem = tmp_env.execute_crl_generation() return crl_pem @@ -158,7 +167,7 @@ def revoke_certificate_and_generate_crl_pem(ca_env_configuration, cert_data): others, it specifies also the concerned CA). `cert_data`: The certificate that is being revoked as an instance of - a subclass of n6lib.manage_api._manage_api._CertificateBase. + a subclass of ._manage_api._CertificateBase. Returns: The generated CRL in the PEM format (as a string). @@ -173,6 +182,7 @@ def revoke_certificate_and_generate_crl_pem(ca_env_configuration, cert_data): revoke_cert=cert_data.certificate, ) with TmpEnv(**tmp_env_init_kwargs) as tmp_env: + tmp_env.ssl_conf.value.validate_for_crl() tmp_env.execute_cert_revocation() crl_pem = tmp_env.execute_crl_generation() return crl_pem @@ -187,11 +197,161 @@ def revoke_certificate_and_generate_crl_pem(ca_env_configuration, cert_data): class InvalidSSLConfigError(Exception): - def __init__(self, msg, original_exc): - self.original_exc = original_exc + def __init__(self, general_msg, actual_reason): + self.actual_reason = actual_reason + msg = '{}: {}'.format(general_msg, actual_reason) super(InvalidSSLConfigError, self).__init__(msg) +class IncompleteSSLConfigError(InvalidSSLConfigError): + + general_msg = "Incomplete SSL config error" + + def __init__(self, reason): + super(IncompleteSSLConfigError, self).__init__(self.general_msg, reason) + + +class SSLConfigMissingOptError(IncompleteSSLConfigError): + + def __init__(self, missing_opt_name, sect_name=None): + reason = "the option {!r} is missing".format(ascii_str(missing_opt_name)) + if sect_name is not None: + reason = "{} (section: {!r})".format(reason, ascii_str(sect_name)) + super(SSLConfigMissingOptError, self).__init__(reason) + + +class SSLConfigMissingSectError(IncompleteSSLConfigError): + + def __init__(self, missing_sect_name): + reason = "the section {!r} is missing".format(ascii_str(missing_sect_name)) + super(SSLConfigMissingSectError, self).__init__(reason) + + +class SSLConfigEmptySectError(IncompleteSSLConfigError): + + def __init__(self, empty_sect_name): + reason = "the section {!r} exists, but it is empty".format(ascii_str(empty_sect_name)) + super(SSLConfigEmptySectError, self).__init__(reason) + + +class OpenSSLConfig(object): + + _REQUIRED_CA_OPTS_MAPPING = { + 'for_cert': [ + 'default_md', + 'default_days', + ], + 'for_crl': [ + 'default_md', + 'default_crl_days', + ], + } + + def __init__(self, + config_string, + current_dir_path, + paths_to_substitute=None, + pkcs11_opts_dict=None): + try: + self.parsed_config = ConfigString(config_string) + except ValueError as exc: + raise InvalidSSLConfigError("SSL config is not valid", exc) + self._check_nonempty_sect_provided('ca') + self.ca_opt_pattern = self.ca_sect_name + '.{}' + self._substitute_config_opts(paths_to_substitute, current_dir_path) + self._apply_pkcs11_opts(pkcs11_opts_dict) + + def validate_for_public_server_cert(self): + self._validate_required_opt_names('for_cert') + self._check_nonempty_sect_provided(SERVER_COMPONENT_POLICY_SECT_NAME) + self._check_x509_extensions() + + def validate_for_nonpublic_cert(self): + self._validate_required_opt_names('for_cert') + self._check_ca_opt_provided('policy') + policy_sect_name = self.parsed_config.get_opt_value(self.ca_opt_pattern.format('policy')) + self._check_nonempty_sect_provided(policy_sect_name) + self._check_x509_extensions() + + def validate_for_crl(self): + self._validate_required_opt_names('for_crl') + + def __str__(self): + return str(self.parsed_config) + + @property + def ca_sect_name(self): + try: + return self.parsed_config.get_opt_value('ca.default_ca') + except KeyError as exc: + raise SSLConfigMissingOptError(exc.args[0], sect_name='ca') + + def _substitute_config_opts(self, paths_mapping, current_dir_path): + self.parsed_config = self._substitute_ca_opt('dir', current_dir_path) + # unify temporary environment paths and config paths, + # so they do not differ when used by OpenSSL + if paths_mapping: + for opt_name, opt_value in paths_mapping.iteritems(): + self.parsed_config = self._substitute_ca_opt(opt_name, opt_value) + + def _apply_pkcs11_opts(self, pkcs11_opts_dict): + if pkcs11_opts_dict: + pkcs11_opts = PKCS11_OPTS_PATTERN.format(**pkcs11_opts_dict) + self.parsed_config = self.parsed_config.insert_above('ca', pkcs11_opts) + # there is no need to catch a KeyError here, absence of + # this config option is noticed earlier in current + # implementation + self.parsed_config = self.parsed_config.remove( + self.ca_opt_pattern.format('private_key')) + + def _substitute_ca_opt(self, opt_name, opt_value): + config_opt = self.ca_opt_pattern.format(opt_name) + try: + return self.parsed_config.substitute(config_opt, '{} = {}'.format(opt_name, opt_value)) + except KeyError as exc: + raise SSLConfigMissingOptError(exc.args[0], sect_name=self.ca_sect_name) + + def _validate_required_opt_names(self, kind_of_operation): + try: + required_opt_names = self._REQUIRED_CA_OPTS_MAPPING[kind_of_operation] + except KeyError: + raise RuntimeError("Unknown 'kind of operation' = {!r}".format(kind_of_operation)) + for opt_name in required_opt_names: + self._check_ca_opt_provided(opt_name) + + def _check_x509_extensions(self): + opt_location = self.ca_opt_pattern.format(X509_EXTENSIONS_OPT_NAME) + if not self.parsed_config.contains(opt_location): + logging.warning("The option {!r} in OpenSSL config (section {!r}) is missing; " + "the section, which is referred to by the option, is not " + "required, but most likely " + "should be configured".format(X509_EXTENSIONS_OPT_NAME, + ascii_str(self.ca_sect_name))) + else: + sect_name = self.parsed_config.get_opt_value(opt_location) + try: + self._check_nonempty_sect_provided(sect_name) + except SSLConfigEmptySectError: + logging.warning("The section {!r} in OpenSSL config, referred to by the option " + "{!r}, is empty; although it is not required, it most likely " + "should be configured".format(ascii_str(sect_name), + X509_EXTENSIONS_OPT_NAME, + ascii_str(self.ca_sect_name))) + + def _check_ca_opt_provided(self, opt_name): + opt_location = self.ca_opt_pattern.format(opt_name) + if not self.parsed_config.contains(opt_location): + raise SSLConfigMissingOptError(opt_name, sect_name=self.ca_sect_name) + + def _check_nonempty_sect_provided(self, sect_name): + try: + opts = self.parsed_config.get_opt_names(sect_name) + except KeyError: + raise SSLConfigMissingSectError(sect_name) + if not opts: + raise SSLConfigEmptySectError(sect_name) + + class DirectoryStructure(object): """The directory structure for a TmpEnv's component.""" @@ -204,9 +364,10 @@ def __init__(self, name, rel_pth, path, opts=None): self.relative_pth = rel_pth self._value = None self._path = path.rstrip('/') + '/' - self.opts = (opts if opts is not None else None) + self.opts = opts self._makedir_if_nonexistent() + def _makedir_if_nonexistent(self): dir_path = osp.dirname(self.path) if not osp.exists(dir_path): @@ -222,6 +383,7 @@ def path(self): def value(self): return self._value + @value.setter def value(self, value): if self.name == 'openssl.cnf': @@ -231,45 +393,22 @@ def value(self, value): def _get_adjusted_openssl_config_str(self, value): + paths_mapping = self.opts.get('paths_to_substitute') if self.opts is not None else None + pkcs11_opts_dict = self.opts.get('pkcs11_opts_dict') if self.opts is not None else None + try: - value = ConfigString(value) + return OpenSSLConfig(value, + osp.dirname(self.path), + paths_to_substitute=paths_mapping, + pkcs11_opts_dict=pkcs11_opts_dict) except ValueError as exc: - raise InvalidSSLConfigError("SSL config is not valid: {}.".format(exc), exc) - ca_opt_pattern = value.get_opt_value('ca.default_ca') + '.{}' - - value = self._get_openssl_config_with_substituted_paths(ca_opt_pattern, value) - - pkcs11_opts_dict = self.opts.get('pkcs11_opts_dict') - if pkcs11_opts_dict: - pkcs11_opts = PKCS11_OPTS_PATTERN.format(**pkcs11_opts_dict) - value = value.insert_above('ca', pkcs11_opts) - value = value.remove(ca_opt_pattern.format('private_key')) - - return value - - - def _get_openssl_config_with_substituted_paths(self, ca_opt_pattern, value): - value = self._substitute_path(ca_opt_pattern, value, 'dir', osp.dirname(self.path)) - - # unify temporary environment paths and config paths, - # so they do not differ when used by OpenSSL - paths_mapping = self.opts.get('paths_to_substitute') - if paths_mapping: - for opt_name, tmp_path in paths_mapping.iteritems(): - value = self._substitute_path(ca_opt_pattern, value, opt_name, tmp_path) - - return value - - - def _substitute_path(self, ca_opt_pattern, value, opt_name, tmp_path): - config_opt = ca_opt_pattern.format(opt_name) - return value.substitute(config_opt, '{} = {}'.format(opt_name, tmp_path)) + raise InvalidSSLConfigError("SSL config is not valid", exc) def _create_file(self): if not osp.isdir(self.path): with open(self.path, 'w') as f: - f.write(self.value) + f.write(str(self.value)) @@ -370,7 +509,7 @@ def _execute_command(cmd_args): def _get_pkcs11_openssl_command_args(self): openssl_command_args = [] if self.pkcs11_opts_dict is not None: - ca_sect_name = self.ssl_conf.value.get_opt_value('ca.default_ca') + ca_sect_name = self.ssl_conf.value.ca_sect_name openssl_command_args.extend([ '-engine', 'pkcs11', ] + self.pkcs11_opts_dict['pkcs11_additional_openssl_cmd_arg_list'] + [ diff --git a/N6Lib/n6lib/manage_api/_manage_api.py b/N6CoreLib/n6corelib/manage_api/_manage_api.py similarity index 97% rename from N6Lib/n6lib/manage_api/_manage_api.py rename to N6CoreLib/n6corelib/manage_api/_manage_api.py index ded92b9..b483023 100644 --- a/N6Lib/n6lib/manage_api/_manage_api.py +++ b/N6CoreLib/n6corelib/manage_api/_manage_api.py @@ -1,7 +1,6 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2013-2020 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import contextlib import datetime @@ -35,14 +34,14 @@ CERTIFICATE_SERIAL_NUMBER_HEXDIGIT_NUM, ADMINS_SYSTEM_GROUP_NAME, ) -from n6lib.manage_api._ca_env import ( +from n6corelib.manage_api._ca_env import ( InvalidSSLConfigError, generate_certificate_pem, generate_crl_pem, get_ca_env_configuration, revoke_certificate_and_generate_crl_pem, ) -from n6lib.x509_helpers import ( +from n6corelib.x509_helpers import ( UnexpectedCertificateDataError, FORMAT_PEM, get_cert_authority_key_identifier, @@ -187,38 +186,17 @@ class ManageAPIAuthDBConnector(SQLAuthDBConnector): to pass the "connection context" to interface methods he uses. """ - config_spec_pattern = ''' - [{config_section}] - - # connection URL, e.g.: mysql+mysqldb://n6:somepassword@localhost/n6 - # it must start with `mysql+mysqldb:` (or just `mysql:`) because other - # dialects/drivers are not supported - url - - ssl_cacert - ssl_cert - ssl_key - - - [{config_section_session_variables}] - - # all MySQL variables specified within this section will be set by - # executing "SET SESSION = , SESSION = , ..." - # (without any escaping!) - - # should be significantly greater than `pool_recycle` defined below - wait_timeout = 7200 - ... + assert ('ssl_cacert = none' in SQLAuthDBConnector.config_spec_pattern and + 'ssl_cert = none' in SQLAuthDBConnector.config_spec_pattern and + 'ssl_key = none' in SQLAuthDBConnector.config_spec_pattern) + config_spec_pattern = ( + SQLAuthDBConnector.config_spec_pattern + # (here these three config options are *required*, not optional) + .replace('ssl_cacert = none', 'ssl_cacert') + .replace('ssl_cert = none', 'ssl_cert') + .replace('ssl_key = none', 'ssl_key')) - [{config_section_connection_pool}] - - # (see: SQLAlchemy docs) - pool_recycle = 3600 :: int - pool_timeout = 20 :: int - pool_size = 15 :: int - max_overflow = 12 :: int - ''' class ConnectionContext(object): @@ -251,6 +229,7 @@ def __init__(self, db_session, ca_path, cert_path, key_path): self.key_path = key_path self.database_login = self.db_session.bind.url.username + def __init__(self, *args, **kwargs): self._ssl_opts = None super(ManageAPIAuthDBConnector, self).__init__(*args, **kwargs) @@ -550,8 +529,13 @@ def revoke_cert(self, ca_label, serial_number, revocation_comment): raise ManageAPIError( 'The certificate with serial number: {!r} has already been revoked.'.format( serial_number)) - crl_pem = revoke_certificate_and_generate_crl_pem(ca_cert.get_env_configuration(), - cert) + try: + crl_pem = revoke_certificate_and_generate_crl_pem(ca_cert.get_env_configuration(), + cert) + except InvalidSSLConfigError as exc: + raise ManageAPIError( + "SSL config assigned to particular CA certificate (label {!r}) " + "is not valid: {}".format(ca_label, exc.actual_reason)) cert.set_revocation_fields(revoked_on=datetime.datetime.utcnow(), revoked_by_user=managing_entity.user_db_obj, revoked_by_component=managing_entity.component_db_obj, @@ -607,7 +591,12 @@ def get_crl_pem(self, ca_label): self._manage_api_config_section, self._settings, must_have_profile=True) - crl_pem = ca_cert.generate_crl_pem() + try: + crl_pem = ca_cert.generate_crl_pem() + except InvalidSSLConfigError as exc: + raise ManageAPIError( + "SSL config assigned to particular CA certificate (label {!r}) " + "is not valid: {}".format(ca_label, exc.actual_reason)) return crl_pem @@ -651,7 +640,7 @@ def get_subject_dict(self): def _load_pem_string(self, cert_str): """ - Do what n6lib.x509_helpers.load_cert_string() does but raise + Do what n6corelib.x509_helpers.load_cert_string() does but raise ManageAPIError on failure. """ try: @@ -1376,7 +1365,7 @@ def _make_new_pem(self): self._server_component_login) except InvalidSSLConfigError as exc: raise ManageAPIError("SSL config assigned to particular CA certificate (label {!r}) " - "is not valid: {}.".format(self.ca_cert_label, exc.original_exc)) + "is not valid: {}".format(self.ca_cert_label, exc.actual_reason)) @staticmethod def _make_serial_number(arbitrary_input_str): diff --git a/N6Lib/n6lib/pki_related_test_helpers.py b/N6CoreLib/n6corelib/pki_related_test_helpers.py similarity index 84% rename from N6Lib/n6lib/pki_related_test_helpers.py rename to N6CoreLib/n6corelib/pki_related_test_helpers.py index 5e678d0..6e0aea4 100644 --- a/N6Lib/n6lib/pki_related_test_helpers.py +++ b/N6CoreLib/n6corelib/pki_related_test_helpers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import glob import os.path as osp @@ -130,6 +130,19 @@ def get_params(cls, cert_label): +class _LoadByCALabelAndSSLConfigLabelMixin(object): + + file_pattern = '{ssl_config_label}-{what}---{ca_label}.{ext}' + + @classmethod + def get_params(cls, ca_label, ssl_config_label): + # noinspection PyUnresolvedReferences + params = super(_LoadByCALabelAndSSLConfigLabelMixin, cls).get_params(ca_label) + params['ssl_config_label'] = ssl_config_label + return params + + + class _LoadCertMetadataMixin(object): ext = 'pem' @@ -241,6 +254,11 @@ class _load_ca_ssl_config_cnf(_LoadCAMixin, _AbstractPKITestDataFileLoader): ext = 'cnf' what = 'config' +class _load_ca_ssl_config_cnf_by_label_and_ca_label(_LoadByCALabelAndSSLConfigLabelMixin, + _AbstractPKITestDataFileLoader): + ext = 'cnf' + what = 'config' + class _load_csr_pem(_LoadByCALabelAndCertSerialMixin, _AbstractPKITestDataFileLoader): @@ -257,9 +275,6 @@ class _load_cert_pem(_LoadByCALabelAndCertSerialMixin, _AbstractPKITestDataFileL ext = 'pem' what = 'cert' -class _load_cert_pem_by_label(_LoadByCertLabelMixin, _AbstractPKITestDataFileLoader): - ext = 'pem' - what = 'cert' class _load_cert_der(_LoadByCALabelAndCertSerialMixin, _AbstractPKITestDataFileLoader): ext = 'der' @@ -276,18 +291,15 @@ class _load_cert_metadata(_LoadByCALabelAndCertSerialMixin, _AbstractPKITestDataFileLoader): """ >>> from datetime import datetime as dt - >>> _load_cert_metadata('service-1', '1234') == { - ... 'cert_label': 'svrc-one', - ... 'ca_label': 'service-1', - ... 'serial_number': '00000000000000001234', - ... 'valid_from': dt(2015, 11, 26, 20, 29, 36), - ... 'expires_on': dt(2030, 7, 22, 0, 0), + >>> _load_cert_metadata('n6-service-ca', 'f9962d93676e439cdcb5') == { + ... 'ca_label': 'n6-service-ca', + ... 'cert_label': 'component','expires_on': dt(2027, 2, 27, 17, 13, 19), + ... 'serial_number': 'f9962d93676e439cdcb5', + ... 'subject_dict': {'cn': 'component-three', + ... 'o': 'test.org', + ... 'ou': 'n6components'}, ... 'usage': 3, - ... 'subject_dict': { - ... 'o': 'Naukowa i Akademicka Siec Komputerowa', - ... 'ou': 'CERT Polska', - ... 'cn': 'example.com', - ... }, + ... 'valid_from': dt(2018, 12, 11, 17, 13, 19) ... } True """ @@ -298,17 +310,16 @@ class _load_cert_metadata_by_label(_LoadByCertLabelMixin, _AbstractPKITestDataFileLoader): """ >>> from datetime import datetime as dt - >>> _load_cert_metadata_by_label('user-one') == { - ... 'cert_label': 'user-one', - ... 'ca_label': 'client-2', - ... 'serial_number': '00000000000000009abc', - ... 'valid_from': dt(2015, 11, 26, 23, 59, 52), - ... 'expires_on': dt(2030, 7, 22, 0, 0), - ... 'usage': 1, - ... 'subject_dict': { - ... 'o': 'x.example.jp', - ... 'cn': 'somebody@example.eu', - ... }, + >>> _load_cert_metadata_by_label('component') == { + ... 'ca_label': 'n6-service-ca', + ... 'cert_label': 'component', + ... 'expires_on': dt(2027, 2, 27, 17, 13, 19), + ... 'serial_number': 'f9962d93676e439cdcb5', + ... 'subject_dict': {'cn': 'component-three', + ... 'o': 'test.org', + ... 'ou': 'n6components'}, + ... 'usage': 3, + ... 'valid_from': dt(2018, 12, 11, 17, 13, 19) ... } True """ diff --git a/N6CoreLib/n6corelib/tests/__init__.py b/N6CoreLib/n6corelib/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/N6Lib/n6lib/tests/test_manage_api.py b/N6CoreLib/n6corelib/tests/test_manage_api.py similarity index 87% rename from N6Lib/n6lib/tests/test_manage_api.py rename to N6CoreLib/n6corelib/tests/test_manage_api.py index f0e83f5..12f9694 100644 --- a/N6Lib/n6lib/tests/test_manage_api.py +++ b/N6CoreLib/n6corelib/tests/test_manage_api.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2013-2019 NASK. All rights reserved. +# Copyright (c) 2013-2021 NASK. All rights reserved. import copy import datetime @@ -16,7 +16,7 @@ ) from n6lib.common_helpers import read_file -from n6lib.manage_api._manage_api import ( +from n6corelib.manage_api._manage_api import ( _AuthDBInterfaceMixin, AccessForbiddenError, CACertFile, @@ -31,19 +31,16 @@ ManageAPIError, ManagingEntity, ) -from n6lib.pki_related_test_helpers import ( +from n6corelib.pki_related_test_helpers import ( _load_ca_cert_pem, _load_ca_key_pem, _load_ca_ssl_config_cnf, + _load_ca_ssl_config_cnf_by_label_and_ca_label, _load_cert_pem, _load_csr_pem, _parse_crl_pem, ) -from n6lib.unit_test_helpers import ( - DBConnectionPatchMixin, - DBSessionMock, - QueryMock, -) +from n6lib.unit_test_helpers import DBConnectionPatchMixin from n6sdk.exceptions import FieldValueError @@ -80,7 +77,7 @@ def make_patches(self, collection=None, session_state=None): session_state if session_state is not None else dict()) self.patch_db_connector(self.session_mock) self.get_config_mock = self.patch( - 'n6lib.manage_api._manage_api.ManageAPI.get_config_section', + 'n6corelib.manage_api._manage_api.ManageAPI.get_config_section', return_value=self.config_patched) self.patch_cert_classes() @@ -100,7 +97,7 @@ def get_context_mock(session_mock): def patch_db_connector(self, session_mock): self.context_mock = self.get_context_mock(self.session_mock) self.connector_mock = self.patch( - 'n6lib.manage_api._manage_api.ManageAPI.auth_db_connector') + 'n6corelib.manage_api._manage_api.ManageAPI.auth_db_connector') self.connector_mock.return_value.__enter__.return_value = self.context_mock @staticmethod @@ -121,7 +118,7 @@ def patch_session_bind_attr(db_session, database_login): def patch_managing_entity(self, new_managing_entity=None, user_db_obj=None): self.managing_entity_mock = self.patch( - 'n6lib.manage_api._manage_api.ManageAPI._verify_and_get_managing_entity') + 'n6corelib.manage_api._manage_api.ManageAPI._verify_and_get_managing_entity') if new_managing_entity is None: self.managing_entity_mock.return_value = mock.MagicMock(hostname=CREATOR_HOSTNAME, cert_cn=CREATOR_CN, @@ -136,7 +133,7 @@ def patch_managing_entity(self, new_managing_entity=None, user_db_obj=None): def patch_cert_classes(self): self.config_mapping = mock.MagicMock() self.ca_get_config_mock = self.patch( - 'n6lib.manage_api._manage_api.CACertificate.get_config_section', + 'n6corelib.manage_api._manage_api.CACertificate.get_config_section', return_value=self.config_mapping) self.config_mapping.__getitem__.return_value = self.CA_KEY_PATH_SENTINEL_STR # read CA key file from `ca_key` instance attribute, if file @@ -144,12 +141,12 @@ def patch_cert_classes(self): # it will be assured by the `config_mapping` __getitem__() # magic method's patch self.read_file_mock = self.patch( - 'n6lib.manage_api._ca_env.read_file', side_effect=lambda name, *args: - (self.ca_key if name == self.CA_KEY_PATH_SENTINEL_STR - else read_file(name, *args))) + 'n6corelib.manage_api._ca_env.read_file', side_effect=lambda file, *a, **kw: + (self.ca_key if file == self.CA_KEY_PATH_SENTINEL_STR + else read_file(file, *a, **kw))) self.ca_key = '' self.make_serial_nr_mock = self.patch( - 'n6lib.manage_api._manage_api.CertificateCreated._make_serial_number', + 'n6corelib.manage_api._manage_api.CertificateCreated._make_serial_number', return_value=SERIAL_NUMBER) def _get_cert_class_mock_helpers(self, cert_class): @@ -190,12 +187,12 @@ def get_certificate_representation_class_inst_ref(self, manage_api_cert_class): def _get_certificate_created_class_inst_ref(self): cert_ref, new_init = self._get_cert_class_mock_helpers(CertificateCreated) - self.patch('n6lib.manage_api._manage_api.CertificateCreated.__init__', new=new_init) + self.patch('n6corelib.manage_api._manage_api.CertificateCreated.__init__', new=new_init) return cert_ref def _get_certificate_from_database_class_inst_ref(self): cert_ref, new_init = self._get_cert_class_mock_helpers(CertificateFromDatabase) - self.patch('n6lib.manage_api._manage_api.CertificateFromDatabase.__init__', new=new_init) + self.patch('n6corelib.manage_api._manage_api.CertificateFromDatabase.__init__', new=new_init) return cert_ref def load_ca_key(self, ca_label): @@ -262,9 +259,9 @@ class _BaseAPIActionTest(_WithMocksMixin, _DBInterfaceMixin): expected_values = None def setUp(self): - self.basic_api_action_test_setup() + self.basic_api_action_specific_setup() - def basic_api_action_test_setup(self, manage_api_cert_class=CertificateCreated): + def basic_api_action_specific_setup(self, manage_api_cert_class=CertificateCreated): self.make_patches() self._cert_inst_ref = self.get_certificate_representation_class_inst_ref( manage_api_cert_class) @@ -571,7 +568,7 @@ def setUp(self): # set mocks for certificate file read, return value # of the `_file_read_mock` should be set later through # the `_set_opened_cert()` method - self._open_mock = self.patch('n6lib.manage_api._manage_api.open', create=True) + self._open_mock = self.patch('n6corelib.manage_api._manage_api.open', create=True) self._file_read_mock = mock.Mock() self._open_mock.return_value.__enter__.return_value.read = self._file_read_mock # default values for `ManagingEntity` init args @@ -1165,7 +1162,7 @@ class _RevokeCertTestBase(_BaseAPIActionTest): dummy_openssl_config = 'dummy string' def setUp(self): - self.basic_api_action_test_setup(manage_api_cert_class=CertificateFromDatabase) + self.basic_api_action_specific_setup(manage_api_cert_class=CertificateFromDatabase) self._add_certs_to_mock_db() def _add_certs_to_mock_db(self): @@ -1335,5 +1332,175 @@ def test_revocation_fields_not_empty(self, key, val): self._test_revocation_fields_not_empty('765496b0d44901863497', key, val) +class _SSLConfigTestBase(_BaseAPIActionTest): + + ca_label = NotImplemented + ca_profile = NotImplemented + + default_serial_hex = NotImplemented + + missing_opt_label = 'missing-{opt_name}-opt' + missing_sect_label = 'missing-{sect_name}-sect' + empty_sect_label = 'empty-{sect_name}-sect' + + general_exc_msg_pattern = (r"SSL config assigned to particular CA certificate .*?" + r"(?P