mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Search] Removal of legacy web crawler and legacy create search index (#208279)
## Summary This PR removes Web Crawler connector creation and management from enterprise_search plugin. Because a large part of that was included in in the legacy create index flow this PR also removes the legacy search create index page in favor of the search_indices create index page. ### Checklist - [ ] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed ### Identify risks I want to do some more regression testing on this one because I had to delete whole folders of code. Types and unit tests are still passing but I'm still afraid I might have missed something. --------- Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
ada0e10541
commit
d05494781d
205 changed files with 106 additions and 21019 deletions
|
@ -16049,9 +16049,6 @@
|
|||
"xpack.enterpriseSearch.apiKeyConfig.newApiKeyCreatedCalloutLabel": "Nouvelle clé API créée avec succès",
|
||||
"xpack.enterpriseSearch.applications.navTitle": "Développer",
|
||||
"xpack.enterpriseSearch.applications.productName": "Applications",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.contentVerificationFailureMessage": "Impossible de vérifier le contenu, car le contrôle des \"Restrictions d'indexation\" a échoué.",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.indexingRestrictionsFailureMessage": "Impossible de déterminer les restrictions d'indexation, car le contrôle de la \"Connectivité réseau\" a échoué.",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.networkConnectivityFailureMessage": "Impossible d'établir une connexion réseau, car le contrôle de la \"Validation initiale\" a échoué.",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.deselectAllButtonLabel": "Tout désélectionner",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.selectAllButtonLabel": "Tout sélectionner",
|
||||
"xpack.enterpriseSearch.attachIndexBox.createSameIndexButtonLabel": "Créer et attacher un index nommé {indexName}",
|
||||
|
@ -16067,7 +16064,6 @@
|
|||
"xpack.enterpriseSearch.attachIndexBox.orPanelLabel": "OR",
|
||||
"xpack.enterpriseSearch.attachIndexBox.saveConfigurationButtonLabel": "Enregistrer la configuration",
|
||||
"xpack.enterpriseSearch.attachIndexBox.thisIndexWillHoldTextLabel": "Cet index contiendra le contenu de la source de données et il est optimisé avec les mappings de champ par défaut pour les expériences de recherche correspondantes. Donnez un nom unique à votre index et configurez pour lui un analyseur linguistique (facultatif).",
|
||||
"xpack.enterpriseSearch.automaticCrawlSchedule.title": "Fréquence d'indexation",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.description": "Tableaux de bord et outils permettant de visualiser le comportement des utilisateurs finaux et de mesurer les performances de vos applications de recherche",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.productCardCTA": "Explorer Behavioral Analytics",
|
||||
"xpack.enterpriseSearch.betaCalloutTitle": "Fonctionnalité bêta",
|
||||
|
@ -16079,8 +16075,6 @@
|
|||
"xpack.enterpriseSearch.connector.connectorTypePanel.title": "Type de connecteur",
|
||||
"xpack.enterpriseSearch.connector.connectorTypePanel.unknown.label": "Inconnu",
|
||||
"xpack.enterpriseSearch.connector.ingestionStatus.title": "Statut de l'ingestion",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupAConnectorClientContextMenuItemLabel": "Configurer un connecteur autogéré",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupANativeConnectorContextMenuItemLabel": "Configurer un connecteur géré par Elastic",
|
||||
"xpack.enterpriseSearch.connectorClientLabel": "Autogéré",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.configymlCodeBlockLabel": "config.yml",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.dockerTextLabel": "Exécuter avec Docker",
|
||||
|
@ -16094,11 +16088,6 @@
|
|||
"xpack.enterpriseSearch.connectorDeployment.orLabel": "ou",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.addTheFollowingConfigurationLabel": "Cloner ou télécharger le référentiel sur votre machine locale",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.runTheFollowingCommandLabel": "Exécutez la commande suivante dans votre terminal. Assurez-vous que Docker est installé sur votre machine",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.chooseADataSourceLabel": "Choisissez une source de données à synchroniser",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.configureConnectorLabel": "Déployez le code du connecteur sur votre propre infrastructure en l'exécutant depuis la source ou à l'aide de Docker",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.enterDetailsLabel": "Saisissez les informations d'accès et de connexion à votre source de données",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.chooseADataSourceLabel": "Choisissez une source de données à synchroniser",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.configureConnectorLabel": "Configurer votre connecteur avec notre IU Kibana",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.chooseADataSourceLabel": "Choisissez une source de données que vous souhaitez synchroniser",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.configureConnectorLabel": "Déployez le code du connecteur sur votre propre infrastructure en l'exécutant depuis la source ou à l'aide de Docker",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.enterDetailsLabel": "Saisissez les informations d'accès et de connexion à votre source de données",
|
||||
|
@ -16242,40 +16231,6 @@
|
|||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.connectorClient": "connecteur autogéré",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.content": "Convertissez-le en {link} afin qu'il soit autohébergé sur votre propre infrastructure. Les connecteurs gérés par Elastic sont disponibles uniquement dans votre déploiement Elastic Cloud.",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.title": "Les connecteurs gérés par Elastic (anciennement connecteurs natifs) ne sont plus pris en charge en dehors d'Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.crawler.authentication": "Authentification",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.addExtraFieldDescription": "Ajoutez un champ supplémentaire dans tous les documents contenant la valeur du HTML complet de la page en cours d'extraction.",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.extractionSwitchLabel": "Stocker le HTML complet",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.increasedSizeWarning": "Cette opération peut augmenter de façon significative la taille de l'index si le site en cours d'extraction est volumineux.",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.learnMoreLink": "Découvrez plus d'informations sur le stockage du HTML complet.",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.title": "Stocker le HTML complet",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlRules": "Règles d'indexation",
|
||||
"xpack.enterpriseSearch.content.crawler.deduplication": "Traitement des documents en double",
|
||||
"xpack.enterpriseSearch.content.crawler.domainDetail.title": "Gérer {domain}",
|
||||
"xpack.enterpriseSearch.content.crawler.entryPoints": "Points d'entrée",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules": "Règles d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.caption": "Supprimer la règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.title": "Supprimer cette règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.caption": "Modifier cette règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.title": "Modifier cette règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.caption": "Développer la règle",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.title": "Développer cette règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.label": "Actions",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.confirmLabel": "Supprimer la règle",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.description": "Cette action ne peut pas être annulée.",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.title": "Voulez-vous vraiment supprimer cette règle de champ ?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.confirmLabel": "Supprimer la règle",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.description": "Le retrait de cette règle supprimera également {fields, plural, one {une règle de champ} other {# règles de champ}}. Cette action ne peut pas être annulée.",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.title": "Voulez-vous vraiment supprimer cette règle d'extraction ?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.description": "Créez une règle d'extraction de contenu pour modifier l'emplacement à partir duquel les documents obtiennent leurs données lors d'une synchronisation.",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.caption": "Modifier cette règle de champ de contenu",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.title": "Modifier cette règle de champ de contenu",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.learnMoreLink": "Découvrez plus d'informations sur les règles d'extraction de contenu.",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.title": "Règles d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.addRuleLabel": "Ajouter une règle d'extraction",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageAddRuleLabel": "Ajouter une règle d'extraction de contenu",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageDescription": "Créez une règle d'extraction de contenu pour modifier l'emplacement à partir duquel les champs du document obtiennent leurs données lors d'une synchronisation.",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageTitle": "Il n'existe aucune règle d'extraction de contenu",
|
||||
"xpack.enterpriseSearch.content.crawler.siteMaps": "Plans de site",
|
||||
"xpack.enterpriseSearch.content.crawlers.breadcrumb": "Robots d'indexation",
|
||||
"xpack.enterpriseSearch.content.crawlers.deleteModal.title": "Supprimer {connectorCount} robot d'indexation ?",
|
||||
"xpack.enterpriseSearch.content.description": "Enterprise Search offre un certain nombre de moyens de rendre vos données facilement interrogeables. Vous pouvez choisir entre le robot d'indexation, les indices Elasticsearch, l'API, les téléchargements directs ou les connecteurs tiers.",
|
||||
|
@ -16381,62 +16336,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.connectorScheduling.unsaved.title": "Vous n'avez pas enregistré vos modifications, êtes-vous sûr de vouloir quitter ?",
|
||||
"xpack.enterpriseSearch.content.indices.defaultPipelines.successToast.title": "Pipeline par défaut mis à jour",
|
||||
"xpack.enterpriseSearch.content.indices.deleteIndex.successToast.title": "Votre index {indexName} et toute configuration d'ingestion associée ont été supprimés",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addContentField.title": "Ajouter la règle de champ de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addRule.title": "Créer une règle d'extraction de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.edilidtContentField.documentField.requiredError": "Un nom de champ est requis.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.cancelButton.label": "Annuler",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.description": "Remplissez le champ avec le contenu.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.arrayLabel": "Un tableau",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.stringLabel": "Une chaîne",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractedLabel": "Valeur extraite",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.fixedLabel": "Une valeur fixe",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.htmlLabel": "Sélecteur CSS ou expression XPath",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.label": "Utiliser le contenu de",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.requiredError": "Une valeur est requise pour ce champ de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.title": "Contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.urlLabel": "Modèle d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.description": "Sélectionnez un champ de document pour servir de base à la construction de la règle.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.label": "Nom du champ",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.title": "Champs de document",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.extractAs.label": "Stocker le contenu extrait en tant que",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.helpText": "Utilisez une valeur fixe pour ce champ de document.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.label": "Valeur fixe",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.placeHolder": "par ex., \"Une certaine valeur",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.saveButton.label": "Enregistrer",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.cssPlaceholder": "e.g. \".main_content\"",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.urlLabel": "e.g. /my-url/(.*/",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.description": "Emplacement à partir duquel le contenu doit être extrait pour ce champ.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.htmlLabel": "Élément HTML",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.label": "Extraire le contenu de",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.requiredError": "Une source est requise pour le contenu.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.title": "Source",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.title": "Modifier la règle de champ de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.cancelButtonLabel": "Annuler",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.cssSelectorsLink": "En savoir plus sur les sélecteurs CSS et les expressions XPath",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.differentContentLink": "En savoir plus sur le stockage de différents types de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.urlPatternsLinks": "En savoir plus sur les modèles d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionError": "Une description est requise pour une règle d'extraction de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionLabel": "Description de la règle",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.addContentFieldRuleLabel": "Ajouter la règle de champ de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.contentFieldDescription": "Créez un champ de contenu pour localiser quelles parties d'une page web seront utilisées pour extraire les données.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageAddRuleLabel": "Ajouter les champs de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageDescription": "Créez un champ de contenu pour localiser quelles parties d'une page web seront utilisées pour extraire les données.",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageTitle": "Cette règle d'extraction ne possède aucun champ de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.helpText": "Aider les autres à comprendre quelles données cette règle extraira",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.placeholderLabel": "par ex. \"Titres de documentation\"",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.saveButtonLabel": "Enregistrer la règle",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.title": "Modifier la règle d'extraction de contenu",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.applyAllLabel": "Appliquer à toutes les URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.specificLabel": "Appliquer à des URL spécifiques",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilter.": "Modèle d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.addFilter": "Ajouter un filtre d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterHelpText": "À quelles URL cela doit-il s'appliquer ?",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterLabel": "Filtre d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.patternPlaceholder": "par ex. \"/blog/*\"",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.removeFilter": "Supprimer ce filtre",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFiltersLink": "En savoir plus sur les filtres d'URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.createErrors": "Erreur lors de la création d'un pipeline",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.description": "Créez ou réutilisez un pipeline enfant qui servira de processeur dans votre pipeline principal.",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.emptyValueError": "Champ obligatoire.",
|
||||
|
@ -16574,21 +16473,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionFetchError.title": "Erreur lors de la récupération du modèle ELSER",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionStartError.title": "Erreur lors du démarrage du déploiement de ELSER",
|
||||
"xpack.enterpriseSearch.content.indices.searchIndex.convertConnector.buttonLabel": "Convertir un connecteur",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.allConnectorsLabel": "Tous les connecteurs",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.breadcrumb": "Sélectionner un connecteur",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.description": "Les connecteurs gérés par Elastic sont hébergés sur Elastic Cloud. Faites vos premiers pas avec un essai gratuit de 14 jours.",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.trialLink": "Essai Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.documentationLinkLabel": "Documentation",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.techPreviewLabel": "Préversion technique",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorClients": "Autogéré",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.description": "Choisissez quelle source de données tierce vous souhaitez synchroniser avec Elastic. Toutes les sources de données sont prises en charge par des connecteurs autogérés. Vérifiez la disponibilité des connecteurs gérés par Elastic en utilisant les filtres.",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.nativeLabel": "Géré par Elastic",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.search.ariaLabel": "Rechercher parmi les connecteurs",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.searchPlaceholder": "Recherche",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showBetaLabel": "Afficher la version bêta des connecteurs",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showTechPreviewLabel": "Afficher les connecteurs de préversion technique",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.title": "Sélectionner un connecteur",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.attach": "Attacher",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.create": "Créer un pipeline",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.steps.configure.title": "Configurer",
|
||||
|
@ -16619,78 +16503,20 @@
|
|||
"xpack.enterpriseSearch.content.ml_inference.zero_shot_classification": "Classification de texte Zero-Shot",
|
||||
"xpack.enterpriseSearch.content.nameAndDescription.name.error.empty": "Le nom du connecteur ne peut pas être vide",
|
||||
"xpack.enterpriseSearch.content.navTitle": "Contenu",
|
||||
"xpack.enterpriseSearch.content.new_connector_with_service_type.breadcrumbs": "Nouveau connecteur {name}",
|
||||
"xpack.enterpriseSearch.content.new_connector.breadcrumbs": "Nouveau connecteur",
|
||||
"xpack.enterpriseSearch.content.new_index.apiDescription": "Utilisez l’API pour ajouter des documents par programme à un index Elasticsearch. Commencez par créer votre index.",
|
||||
"xpack.enterpriseSearch.content.new_index.apiTitle": "Nouvel index de recherche",
|
||||
"xpack.enterpriseSearch.content.new_index.breadcrumbs": "Nouvel index de recherche",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorDescriptionWithServiceType": "Utilisez un connecteur pour synchroniser, extraire, transformer et indexer les données de votre source de données. Les connecteurs sont des intégrations Elastic qui écrivent directement aux index Elasticsearch.",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitle": "Nouvel index de recherche du connecteur",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitleWithServiceType": "Nouveau connecteur {name}",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerDescription": "Utilisez le robot d’indexation pour découvrir, extraire et indexer par programme le contenu interrogeable des sites web et des bases de connaissances.",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerTitle": "Index de recherche du robot d'indexation",
|
||||
"xpack.enterpriseSearch.content.new_index.defaultDescription": "Un index de recherche stocke vos données.",
|
||||
"xpack.enterpriseSearch.content.new_index.genericTitle": "Nouvel index de recherche",
|
||||
"xpack.enterpriseSearch.content.new_index.successToast.title": "L’index a bien été créé",
|
||||
"xpack.enterpriseSearch.content.new_web_crawler.breadcrumbs": "Nouveau robot d'indexation",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.createIndex.buttonText": "Créer un connecteur",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.formTitle": "Créer un connecteur",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.learnMoreConnectors.linkText": "En savoir plus sur les connecteurs",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputHelpText.lineTwo": "Les noms doivent être en minuscules et ne peuvent pas contenir d'espaces ni de caractères spéciaux.",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputLabel": "Nom du connecteur",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputPlaceholder": "Choisir un nom pour votre connecteur",
|
||||
"xpack.enterpriseSearch.content.newIndex.breadcrumb": "Nouvelle méthode d'ingestion",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.description": "Les données que vous ajoutez dans Search sont appelées \"index de recherche\", et vous pouvez effectuer des recherches à l'intérieur à la fois dans App Search et dans Workplace Search. Maintenant, vous pouvez utiliser vos connecteurs dans App Search et vos robots d'indexation dans Workplace Search.",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.footer.link": "Lisez les documents",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.footer.title": "Vous souhaitez en savoir plus sur les index de recherche ?",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.title": "Sélectionner une méthode d'ingestion",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.description": "Utilisez l'API pour vous connecter directement à votre point de terminaison d'index Elasticsearch.",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.label": "Créer un index d'API",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.title": "API",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.description": "Extraire, transformer, indexer et synchroniser des données issues d'une source de données tierce",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.label": "Choisir un connecteur source",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.title": "Connecteurs",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.description": "Découvrir, extraire et indexer du contenu interrogeable provenant de sites web et de bases de connaissances",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.label": "Indexer l'URL",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.nocodeLabel": "Pas de code",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.title": "Robot d'indexation",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.alreadyExists.error": "Un index portant le nom {indexName} existe déjà.",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.createIndex.buttonText": "Créer un index",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription": "Cet index contiendra le contenu de la source de données et il est optimisé avec les mappings de champ par défaut pour les expériences de recherche correspondantes. Donnez un nom unique à votre index et définissez éventuellement un {language_analyzer} par défaut pour l'index.",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription.linkText": "analyseur linguistique",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formTitle": "Créer un index Elasticsearch",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.isInvalid.error": "{indexName} n'est pas un nom d'index valide",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputHelpText": "La langue peut être modifiée ultérieurement, mais ce changement peut nécessiter une réindexation.",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputLabel": "Analyseur linguistique",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreApis.linkText": "En savoir plus sur les API d'ingestion",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreConnectors.linkText": "En savoir plus sur les connecteurs",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreCrawler.linkText": "En savoir plus sur le robot d'indexation Elastic",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreIndices.linkText": "En savoir plus sur les index",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineOne": "Votre index sera nommé : {indexName}",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineTwo": "Les noms doivent être en minuscules et ne peuvent pas contenir d'espaces ni de caractères spéciaux.",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputLabel": "Nom de l'index",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputPlaceholder": "Définir un nom pour votre index",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageDescription": "Créez un index de recherche optimisée Elasticsearch pour stocker votre contenu. Commencez par sélectionner une méthode d’ingestion.",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageTitle": "Sélectionner une méthode d'ingestion",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.cloudTrialButton": "Essai Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.manageLicenseButtonLabel": "Gérer la licence",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openCreateConnectorPopover": "Ouvrir le menu pour créer un connecteur de type {connectorType}",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openNativePopoverLabel": "Ouvrir une fenêtre contextuelle contenant des informations sur les connecteurs gérés par Elastic",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openPopoverLabel": "Ouvrir la fenêtre contextuelle de licence",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.subscriptionButtonLabel": "Plans d'abonnement",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeContent": "Les connecteurs autogérés qui envoient des données à une instance Elasticsearch autogérée nécessitent au moins une licence Platinum.",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeTitle": "Mettre à niveau vers Elastic Platinum",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.description": "Les connecteurs gérés par Elastic sont hébergés sur Elastic Cloud. Faites vos premiers pas avec un essai gratuit de 14 jours.",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.connectorAlreadyExists": "Un connecteur existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.genericError": "Nous n'avons pas pu créer votre index",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.indexAlreadyExists": "L'index existe déjà.",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.unauthorizedError": "Vous n'êtes pas autorisé à créer ce connecteur",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.api": "Point de terminaison d'API",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.connector": "Connecteur",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.crawler": "Robot d'indexation",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.elasticsearch": "Index Elasticsearch",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.json": "JSON",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.createNew": "Nouveauté",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.viewAll": "Gérer",
|
||||
"xpack.enterpriseSearch.content.overview.documentExample.clientLibraries.dotnet": ".NET",
|
||||
|
@ -16721,7 +16547,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.cancelSyncs.successMessage": "Annulation réussie des synchronisations",
|
||||
"xpack.enterpriseSearch.content.searchIndex.configurationTabLabel": "Configuration",
|
||||
"xpack.enterpriseSearch.content.searchIndex.connectorErrorCallOut.title": "Votre connecteur a rapporté une erreur",
|
||||
"xpack.enterpriseSearch.content.searchIndex.crawlerConfigurationTabLabel": "Configuration",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex": "Aucun index de contrôle d'accès ne sera créé tant que vous n'activez pas la sécurité au niveau du document et que vous n'effectuez pas la première synchronisation de contrôle d'accès.",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex.title": "Index de contrôle d'accès introuvable",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noMappings": "Aucun document trouvé pour l'index",
|
||||
|
@ -16734,7 +16559,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.documents.selector.contentIndexSync.title": "Synchronisations de contenu",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.selectorSync.accessControl.title": "Synchronisations de contrôle d'accès",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documentsTabLabel": "Documents",
|
||||
"xpack.enterpriseSearch.content.searchIndex.domainManagementTabLabel": "Gérer les domaines",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.accessControlSyncSuccess.message": "Une synchronisation de contrôle d’accès a bien été programmée, en attente de son activation par un connecteur",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.incSyncSuccess.message": "Une synchronisation incrémentielle a bien été programmée, en attente de son activation par un connecteur",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.recheckSuccess.message": "Votre connecteur a été à nouveau vérifié.",
|
||||
|
@ -16749,19 +16573,13 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.connectorClient": "connecteur autogéré",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.content": "Convertissez-le en {link} afin qu'il soit autogéré sur votre propre infrastructure. Les connecteurs natifs sont disponibles uniquement dans votre déploiement Elastic Cloud.",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.title": "Les connecteurs natifs ne sont plus compatibles en dehors d’Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.deleteIndex": "Supprimer l'index",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.description": "Nous n'avons pas trouvé de configuration de connecteur pour cet index de robot d'indexation. L'enregistrement doit être créé à nouveau, ou l'index doit être supprimé.",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.recreateConnectorRecord": "Créer à nouveau l'enregistrement de connecteur",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.title": "La configuration de connecteur de cet index a été supprimée.",
|
||||
"xpack.enterpriseSearch.content.searchIndex.overviewTabLabel": "Aperçu",
|
||||
"xpack.enterpriseSearch.content.searchIndex.pipelinesTabLabel": "Pipelines",
|
||||
"xpack.enterpriseSearch.content.searchIndex.schedulingTabLabel": "Planification",
|
||||
"xpack.enterpriseSearch.content.searchIndex.syncRulesTabLabel": "Règles de synchronisation",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.apiIngestionMethodLabel": "API",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.connectorIngestionMethodLabel": "Connecteur",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.crawlerIngestionMethodLabel": "Robot d'indexation",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.documentCountCardLabel": "Nombre de documents",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.domainCountCardLabel": "Nombre de domaines",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.ingestionTypeCardLabel": "Type d’ingestion",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.languageLabel": "Analyseur linguistique",
|
||||
"xpack.enterpriseSearch.content.searchIndex.transform.description": "Vous souhaitez ajouter des champs personnalisés ou utiliser des modèles de ML entraînés pour analyser et enrichir vos documents indexés ? Utilisez des pipelines d'ingestion spécifiques de l'index pour personnaliser les documents selon vos besoins.",
|
||||
|
@ -16799,7 +16617,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.configured.label": "Configuré",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connected.label": "Connecté",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connectorError.label": "Échec du connecteur",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.idle.label": "Inactif",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.incomplete.label": "Incomplet",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.syncError.label": "Échec de la synchronisation",
|
||||
"xpack.enterpriseSearch.content.searchIndices.name.columnTitle": "Nom de l'index",
|
||||
|
@ -16840,208 +16657,6 @@
|
|||
"xpack.enterpriseSearch.content.supportedLanguages.spanishLabel": "Espagnol",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.thaiLabel": "Thaï",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.universalLabel": "Universel",
|
||||
"xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage": "Voulez-vous vraiment supprimer le domaine \"{domainUrl}\" et tous ses paramètres ?",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.description": "Vous pouvez ajouter plusieurs domaines au robot d'indexation de cet index. Ajoutez un autre domaine ici et modifiez les points d'entrée et les règles d'indexation à partir de la page \"Gérer\".",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "Ajouter un domaine",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.title": "Ajouter un nouveau domaine",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "Vérification de contenu",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "Le point d'entrée du robot d'indexation a été défini sur {entryPointValue}",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "Un problème est survenu. Veuillez corriger les erreurs et réessayer.",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "Le robot d'indexation ne pourra pas indexer le contenu de ce domaine tant que les erreurs ci-dessus n'auront pas été corrigées.",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "Ignorer les échecs de validation et continuer",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "Restrictions d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "Validation initiale",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "Connectivité réseau",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "Ajouter un domaine",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "Tester l'URL dans le navigateur",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "Erreur inattendue",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "Les URL de domaine requièrent un protocole et ne peuvent pas contenir de chemins.",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "URL de domaine",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "Valider le domaine",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.basicAuthenticationLabel": "Authentification de base",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.description": "Les paramètres d'authentification pour l'indexation du contenu protégé ont été enregistrés. Pour mettre à jour un mécanisme d'authentification, supprimez les paramètres et redémarrez.",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.title": "Paramètres de configuration enregistrés",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.deleteButtonLabel": "Supprimer",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.description": "La suppression de ces paramètres empêchera peut-être le robot d'indexation d'indexer les zones protégées du domaine. Cette action ne peut pas être annulée.",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.title": "Voulez-vous vraiment supprimer ces paramètres ?",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.description": "Configurez l'authentification pour activer l'indexation du contenu protégé pour ce domaine.",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.editForm.headerValueLabel": "Valeur d'en-tête",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.addAuthenticationButtonLabel": "Ajouter l'authentification",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.description": "Cliquer sur {addAuthenticationButtonLabel} afin de fournir les informations d'identification nécessaires pour indexer le contenu protégé",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.title": "Aucune authentification configurée",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.rawAuthenticationLabel": "En-tête d'authentification",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.resetToDefaultsButtonLabel": "Ajouter les informations d'identification",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.title": "Authentification",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "Activer les indexations récurrentes selon le calendrier suivant",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingDescription": "Définissez la fréquence et la durée des indexations programmées. Le robot d'indexation utilise le fuseau horaire UTC.",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingTitle": "Planification d'une durée spécifique",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingDescription": "Définir la fréquence des indexations programmées",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingTitle": "Planification d'intervalle",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "En savoir plus sur la planification",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "Le calendrier d’indexation effectuera une indexation complète de chaque domaine de cet index.",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "Planifier la fréquence",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "Planifier des unités de temps",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "{crawlType} indexation sur {domainCount, plural, one {# domaine} other {# domaines}}",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "Profondeur maximale de l'indexation",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "Type d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.crawlTypeGroupLabel": "Type d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingDescription": "Définissez la fréquence et la durée des indexations programmées. Le robot d'indexation utilise le fuseau horaire UTC.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingTitle": "Planification d'une durée spécifique",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "URL de points d'entrée personnalisés",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "URL des plans de site personnalisés",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "Ajouter des domaines à votre indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "Veuillez sélectionner un domaine.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "Points d'entrée",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "Configurez une indexation ponctuelle ou des paramètres personnalisés d'indexation multiple.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "Configuration personnalisée de l'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "Inclure les plans de site découverts dans {robotsDotTxt}",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "Définir une profondeur d'exploration maximale pour indiquer le nombre de pages que le robot d'exploration doit parcourir. Définir la valeur sur un (1) pour limiter l'indexation aux seuls points d'entrée.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "Profondeur maximale de l'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingEnabled": "Activer les indexations récurrentes selon le calendrier suivant",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingFrequency": "Fréquence d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlsRadioLabel": "Indexations multiples",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlTabPrefix": "Indexer",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.oneTimeCrawlRadioLabel": "Indexation ponctuelle",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.postCrawlerCustomSchedulingSuccess.message": "La planification personnalisée de l'indexation a bien été enregistrée.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.readMoreLink": "En savoir plus sur la planification",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.saveMultipleCrawlersConfiguration": "Enregistrer la configuration",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.scheduleDescription": "Le calendrier d’indexation effectuera une indexation complète de chaque domaine de cet index.",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "URL de base",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "sélectionné",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "Plans de site",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "Appliquer et indexer maintenant",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "Aperçu",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "Raw JSON",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "Détails de la requête d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "Domaines",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "URL de base",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "URL des plans de site",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "Réponse moy.",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "Erreurs 4xx",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.configLink": "Activer les logs du robot d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "Durée",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "{configLink} dans votre fichier enterprise-search.yml ou dans les paramètres utilisateur pour obtenir des statistiques d'indexation plus détaillées.",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "URL visitées et extraites pendant l'indexation.",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "Pages visitées",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "Pages",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "Erreurs 5xx",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "URL trouvées par le robot pendant l'indexation, y compris celles qui ne sont pas suivies en raison de la configuration de l'indexation.",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "URL vues",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "Les modifications que vous effectuez maintenant ne prendront effet qu'au début de votre prochaine indexation.",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "Annuler les indexations",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "Indexation en cours…",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "En attente…",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "Indexer",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "Afficher uniquement les champs sélectionnés",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "Indexer",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "Démarrage en cours…",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "Arrêt en cours…",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "Annulé",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "Annulation",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "Échoué",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "En attente",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "En cours d'exécution",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "Ignoré",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "Démarrage",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "Succès",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "Suspendu",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "Suspension",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.description": "Les recherches d'indexations récentes sont consignées ici. Vous pouvez suivre la progression et examiner les événements d'indexation dans les interfaces utilisateur Logs ou Discover de Kibana.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.discoverCrawlerLogsTitle": "Tous les logs du robot d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.linkToDiscover": "Afficher dans Discover",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.title": "Demandes d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.userAgentDescription": "Les requêtes provenant du robot d'indexation peuvent être identifiées par l'agent utilisateur suivant. La configuration s'effectue dans le fichier enterprise-search.yml.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "Type d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "Créé",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "Domaines",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "ID de requête",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "Statut",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "Vous n'avez encore démarré aucune indexation.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "Aucune demande d'indexation récente",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "Commence par",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "Contient",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "Se termine par",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "Regex",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "Autoriser",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "Interdire",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "Ajouter une règle d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "La règle d'indexation a été supprimée.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.description": "Créez une règle d'indexation pour inclure ou exclure les pages dont l'URL correspond à la règle. Les règles sont exécutées dans l'ordre séquentiel et chaque URL est évaluée en fonction de la première correspondance.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "En savoir plus sur les règles d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "Modèle de chemin",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "Le modèle de chemin est une chaîne littérale, à l'exception du caractère astérisque (*), qui est un métacaractère pouvant correspondre à n'importe quel élément.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "Politique",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "Le modèle de chemin est une expression régulière compatible avec le moteur d'expression régulière du langage Ruby.",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "Règle",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.title": "Règles d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "Pleine",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "Partielle",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "Règles d'indexation réappliquées",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "Tous les champs",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.description": "Le robot d'indexation n'indexe que les pages uniques. Choisissez les champs que le robot d'indexation doit utiliser lorsqu'il recherche les pages en double. Désélectionnez tous les champs de schéma pour autoriser les documents en double dans ce domaine.",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "En savoir plus sur le hachage de contenu",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.preventDuplicateLabel": "Empêcher les documents en double",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "Réinitialiser aux valeurs par défaut",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "Champs sélectionnés",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "Afficher tous les champs",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.title": "Traitement des documents en double",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.deleteDomainButtonLabel": "Supprimer le domaine",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.description": "Supprimer le domaine {domainUrl} de votre robot d'indexation. Cela supprimera également tous les points d'entrée et toutes les règles d'indexation que vous avez configurés. Tous les documents associés à ce domaine seront supprimés lors de la prochaine indexation. {thisCannotBeUndoneMessage}",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.thisCannotBeUndoneMessage": "Cette action ne peut pas être annulée.",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.title": "Supprimer le domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.allDomainsButtonLabel": "Tous les domaines",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.deleteDomainButtonLabel": "Supprimer le domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState": "Vous n'avez aucun domaine dans cet index. Ajoutez votre premier domaine pour commencer à explorer et indexer les documents.",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.addDomainButtonLabel": "Ajouter votre premier domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.description": "Configurez les domaines que vous souhaitez indexer et, lorsque vous êtes prêt, déclenchez votre première indexation.",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.title": "Ajouter un domaine à votre index",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage": "Le domaine \"{domainUrl}\" a bien été ajouté",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "Supprimer ce domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.delete.successMessage": "Le domaine \"{domainUrl}\" a bien été supprimé",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "Gérer ce domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.actions": "Actions",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.documents": "Documents",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "Domaine",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "Dernière activité",
|
||||
"xpack.enterpriseSearch.crawler.domainsTitle": "Domaines",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "Ajouter un point d'entrée",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.description": "Inclure ici les URL les plus importantes pour votre site web. Les URL de point d'entrée seront les premières pages à être indexées et traitées pour servir de liens vers les autres pages.",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "{link} pour spécifier un point d'entrée pour le robot d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "Ajouter un point d'entrée",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "Il n'existe aucun point d'entrée.",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "Le robot d'indexation nécessite au moins un point d'entrée.",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "Découvrez plus d'informations sur les points d'entrée.",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.title": "Points d'entrée",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.extractionRules.fieldRulesTable.fieldNameLabel": "Nom du champ",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.beginsWithLabel": "Commence par",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.containsLabel": "Contient",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.endsWithLabel": "Se termine par",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.regexLabel": "Regex",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.descriptionTableLabel": "Description",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.editedByLabel": "Modifié par",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.lastUpdatedLabel": "Dernière mise à jour",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.rulesLabel": "Règles de champ",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.sourceLabel": "Source",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.title": "Règles d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.urlsLabel": "URL",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.arrayLabel": "tableau",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.contentLabel": "Contenu",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.extractedLabel": "Extrait comme :",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.fixedLabel": "Valeur fixe :",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.HTMLLabel": "HTML :",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.stringLabel": "chaîne",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.UrlLabel": "URL :",
|
||||
"xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "Les règles d'indexation sont en train d'être réappliquées dans l'arrière-plan",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "Ajouter un plan du site",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "Le plan du site a été supprimé.",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.description": "Ajoutez des URL de plan de site personnalisées pour ce domaine. Le robot d'indexation détecte automatiquement les plans de site existants.",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "Il n'existe aucun plan de site.",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.title": "Plans de site",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlAllDomainsMenuLabel": "Indexation de tous les domaines sur cet index",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "Indexation avec des paramètres personnalisés",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.reapplyCrawlRulesMenuLabel": "Réappliquer les règles d'indexation",
|
||||
"xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "Veuillez entrer une URL valide",
|
||||
"xpack.enterpriseSearch.crawlers.title": "Robot d'indexation Elasticsearch",
|
||||
"xpack.enterpriseSearch.createConnector..breadcrumb": "Nouveau connecteur",
|
||||
|
@ -17121,8 +16736,6 @@
|
|||
"xpack.enterpriseSearch.enabled": "Activé",
|
||||
"xpack.enterpriseSearch.exampleConnectorLabel": "Exemple",
|
||||
"xpack.enterpriseSearch.finishUpStep.euiButton.viewInDiscoverLabel": "Afficher dans Discover",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.connectorClientBadgeLabel": "Autogéré",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.nativeBadgeLabel": "Connecteur géré par Elastic",
|
||||
"xpack.enterpriseSearch.gettingStarted.description.ingestPipelinesLink.link": "pipelines d'ingestion",
|
||||
"xpack.enterpriseSearch.gettingStarted.pageTitle": "Prise en main de l'API d'Elastic",
|
||||
"xpack.enterpriseSearch.gettingStarted.pipeline.description": "Utilisez {ingestPipelinesLink} pour préparer vos données avant leur indexation dans Elasticsearch, ce qui est souvent plus facile que le post-traitement. Utilisez n'importe quelle combinaison de processeurs d'ingestion pour ajouter, supprimer ou transformer les champs dans vos documents.",
|
||||
|
@ -17527,13 +17140,6 @@
|
|||
"xpack.enterpriseSearch.searchNav.otherTools": "Autres outils",
|
||||
"xpack.enterpriseSearch.searchNav.relevance": "Pertinence",
|
||||
"xpack.enterpriseSearch.searchProvider.aiSearch.name": "Intelligence artificielle de recherche",
|
||||
"xpack.enterpriseSearch.selectConnector.badgeOnClick.ariaLabel": "Cliquer pour ouvrir la fenêtre contextuelle d'explication du connecteur",
|
||||
"xpack.enterpriseSearch.selectConnector.connectorClientBadgeLabel": "Autogéré",
|
||||
"xpack.enterpriseSearch.selectConnector.h4.connectorClientsLabel": "Connecteurs autogérés",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeBadgeLabel": "Géré par Elastic",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeConnectorsTitleLabel": "Connecteurs gérés par Elastic",
|
||||
"xpack.enterpriseSearch.selectConnector.p.areAvailableDirectlyWithinLabel": "Disponibles directement dans les déploiements Elastic Cloud. Aucune infrastructure supplémentaire n’est requise. Vous pouvez également convertir des connecteurs gérés par Elastic en connecteurs autogérés.",
|
||||
"xpack.enterpriseSearch.selectConnector.p.deployConnectorsOnYourLabel": "Déployez des connecteurs sur votre propre infrastructure. Vous pouvez également personnaliser les connecteurs autogérés existants ou créer les vôtres à l'aide de notre infrastructure de connecteurs.",
|
||||
"xpack.enterpriseSearch.SemanticSearch.description": "Ajoutez facilement une recherche sémantique à Elasticsearch avec des points de terminaison d'inférence et le type de champ semantic_text, cela permettra d'améliorer la pertinence de la recherche.",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.description": "Vous devez maintenant créer un index avec un ou plusieurs champs {semanticText}.",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.title": "Créez un index",
|
||||
|
|
|
@ -15912,9 +15912,6 @@
|
|||
"xpack.enterpriseSearch.apiKeyConfig.newApiKeyCreatedCalloutLabel": "新しいAPIキーが正常に作成されました",
|
||||
"xpack.enterpriseSearch.applications.navTitle": "ビルド",
|
||||
"xpack.enterpriseSearch.applications.productName": "アプリケーション",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.contentVerificationFailureMessage": "[インデックス制限]チェックが失敗したため、コンテンツを検証できません。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.indexingRestrictionsFailureMessage": "[ネットワーク接続]チェックが失敗したため、インデックス制限を判定できません。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.networkConnectivityFailureMessage": "[初期検証]チェックが失敗したため、ネットワーク接続を確立できません。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.deselectAllButtonLabel": "すべて選択解除",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.selectAllButtonLabel": "すべて選択",
|
||||
"xpack.enterpriseSearch.attachIndexBox.createSameIndexButtonLabel": "インデックス{indexName}を作成して付ける",
|
||||
|
@ -15929,7 +15926,6 @@
|
|||
"xpack.enterpriseSearch.attachIndexBox.optionsGroup.selectExistingIndex": "既存のインデックスを選択",
|
||||
"xpack.enterpriseSearch.attachIndexBox.orPanelLabel": "OR",
|
||||
"xpack.enterpriseSearch.attachIndexBox.saveConfigurationButtonLabel": "構成を保存",
|
||||
"xpack.enterpriseSearch.automaticCrawlSchedule.title": "クロール頻度",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.description": "エンドユーザーの行動を可視化し、検索アプリケーションのパフォーマンスを測定するためのダッシュボードとツール",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.productCardCTA": "Behavioral Analyticsの詳細",
|
||||
"xpack.enterpriseSearch.betaCalloutTitle": "ベータ機能",
|
||||
|
@ -15941,8 +15937,6 @@
|
|||
"xpack.enterpriseSearch.connector.connectorTypePanel.title": "コネクタータイプ",
|
||||
"xpack.enterpriseSearch.connector.connectorTypePanel.unknown.label": "不明",
|
||||
"xpack.enterpriseSearch.connector.ingestionStatus.title": "インジェスチョンステータス",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupAConnectorClientContextMenuItemLabel": "セルフマネージドコネクターを設定",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupANativeConnectorContextMenuItemLabel": "Elasticマネージドコネクターを設定",
|
||||
"xpack.enterpriseSearch.connectorClientLabel": "セルフマネージド",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.configymlCodeBlockLabel": "config.yml",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.dockerTextLabel": "Dockerで実行",
|
||||
|
@ -15956,11 +15950,6 @@
|
|||
"xpack.enterpriseSearch.connectorDeployment.orLabel": "または",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.addTheFollowingConfigurationLabel": "リポジトリを複製するか、ローカルコンピューターにダウンロード",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.runTheFollowingCommandLabel": "ターミナルで次のコマンドを実行します。コンピューターにDockerがインストールされていることを確認します",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.chooseADataSourceLabel": "同期したいデータソースを選択します。",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.configureConnectorLabel": "ソースから実行するか、Dockerを使用して、独自のインフラにコネクターコードをデプロイします。",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.enterDetailsLabel": "データソースのアクセスと接続の詳細情報を入力",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.chooseADataSourceLabel": "同期したいデータソースを選択します。",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.configureConnectorLabel": "Kibana UIを使用してコネクターを構成",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.chooseADataSourceLabel": "同期するデータソースを選択",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.configureConnectorLabel": "ソースから実行するか、Dockerを使用して、独自のインフラにコネクターコードをデプロイ",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.enterDetailsLabel": "データソースのアクセスと接続の詳細情報を入力",
|
||||
|
@ -16104,40 +16093,6 @@
|
|||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.connectorClient": "セルフマネージドコネクター",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.content": "独自のインフラでセルフホスティングされる{link}に変換します。ElasticマネージドコネクターはElastic Cloudデプロイでのみ使用できます。",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.title": "Elasticマネージドコネクター(旧ネイティブコネクター)はElastic Cloud以外ではサポートされなくなりました",
|
||||
"xpack.enterpriseSearch.content.crawler.authentication": "認証",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.addExtraFieldDescription": "すべてのドキュメントに、クロールされるページの完全なHTMLの値を持つ追加フィールドを追加",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.extractionSwitchLabel": "完全なHTMLの保存",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.increasedSizeWarning": "このため、クロール対象のサイトが大規模な場合、インデックスサイズが劇的に大きくなることがあります。",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.learnMoreLink": "完全なHTMLの保存の詳細をご覧ください。",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.title": "完全なHTMLの保存",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlRules": "クロールルール",
|
||||
"xpack.enterpriseSearch.content.crawler.deduplication": "ドキュメント処理を複製",
|
||||
"xpack.enterpriseSearch.content.crawler.domainDetail.title": "{domain}を管理",
|
||||
"xpack.enterpriseSearch.content.crawler.entryPoints": "エントリポイント",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules": "抽出ルール",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.caption": "抽出ルールを削除",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.title": "この抽出ルールを削除",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.caption": "この抽出ルールを編集",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.title": "この抽出ルールを編集",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.caption": "ルールを展開",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.title": "この抽出ルールを展開",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.label": "アクション",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.confirmLabel": "ルールの削除",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.description": "この操作は元に戻すことができません。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.title": "このフィールドルールを削除してよろしいですか?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.confirmLabel": "ルールの削除",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.description": "このルールを削除すると、{fields, plural, one {1個のフィールドルール} other {# フィールドルール}}も削除されます。この操作は元に戻すことができません。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.title": "この抽出ルールを削除してよろしいですか?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.description": "コンテンツ抽出ルールを作成し、同期中にドキュメントのデータを取得する場所を変更します。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.caption": "このコンテンツフィールドルールを編集",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.title": "このコンテンツフィールドルールを編集",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.learnMoreLink": "コンテンツ抽出ルールの詳細をご覧ください。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.title": "抽出ルール",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.addRuleLabel": "抽出ルールを追加",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageAddRuleLabel": "コンテンツ抽出ルールを追加",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageDescription": "コンテンツ抽出ルールを作成し、同期中にドキュメントフィールドのデータを取得する場所を変更します。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageTitle": "コンテンツ抽出ルールがありません",
|
||||
"xpack.enterpriseSearch.content.crawler.siteMaps": "サイトマップ",
|
||||
"xpack.enterpriseSearch.content.crawlers.breadcrumb": "Webクローラー",
|
||||
"xpack.enterpriseSearch.content.crawlers.deleteModal.title": "\"{connectorCount}\"クローラーを削除しますか?",
|
||||
"xpack.enterpriseSearch.content.description": "エンタープライズ サーチでは、さまざまな方法で簡単にデータを検索可能にできます。Webクローラー、Elasticsearchインデックス、API、直接アップロード、サードパーティコネクターから選択します。",
|
||||
|
@ -16243,62 +16198,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.connectorScheduling.unsaved.title": "変更を保存していません。移動しますか?",
|
||||
"xpack.enterpriseSearch.content.indices.defaultPipelines.successToast.title": "デフォルトパイプラインが正常に更新されました",
|
||||
"xpack.enterpriseSearch.content.indices.deleteIndex.successToast.title": "インデックス{indexName}と関連付けられたすべての統合構成が正常に削除されました",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addContentField.title": "コンテンツフィールドルールを追加",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addRule.title": "コンテンツ抽出ルールを作成",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.edilidtContentField.documentField.requiredError": "フィード名が必要です。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.cancelButton.label": "キャンセル",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.description": "フィールドにコンテンツを入力します。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.arrayLabel": "配列",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.stringLabel": "文字列",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractedLabel": "抽出された値",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.fixedLabel": "固定値",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.htmlLabel": "CSSセレクターまたはXPath式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.label": "コンテンツを使用",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.requiredError": "このコンテンツフィールドの値は必須です",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.title": "コンテンツ",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.urlLabel": "URLパターン",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.description": "ルールを構築するドキュメントフィールドを選択します。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.label": "フィールド名",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.title": "ドキュメントフィールド",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.extractAs.label": "抽出されたコンテンツを保存",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.helpText": "このドキュメントフィールドで固定値を使用します。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.label": "固定値",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.placeHolder": "例:「何らかの値",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.saveButton.label": "保存",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.cssPlaceholder": "例:「.main_content」",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.urlLabel": "例:/my-url/(.*/",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.description": "このフィールドのコンテンツを抽出する場所。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.htmlLabel": "HTMLエレメント",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.label": "コンテンツを抽出する場所",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.requiredError": "コンテンツのソースは必須です。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.title": "送信元",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.title": "コンテンツフィールドルールを編集",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.cancelButtonLabel": "キャンセル",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.cssSelectorsLink": "CSSセレクターとXPath式の詳細",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.differentContentLink": "さまざまな種類のコンテンツの保存に関する詳細をご覧ください",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.urlPatternsLinks": "URLパターンの詳細をご覧ください",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionError": "コンテンツ抽出ルールの説明は必須です",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionLabel": "ルールの説明",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.addContentFieldRuleLabel": "コンテンツフィールドルールを追加",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.contentFieldDescription": "データを取得するWebページの部分を特定するコンテンツフィールドを作成します。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageAddRuleLabel": "コンテンツフィールドを追加",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageDescription": "データを取得するWebページの部分を特定するコンテンツフィールドを作成します。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageTitle": "この抽出ルールにはコンテンツフィールドがありません",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.helpText": "このルールが抽出するデータを他のユーザーが理解できるようにします",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.placeholderLabel": "例:「ドキュメントタイトル」",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.saveButtonLabel": "ルールを保存",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.title": "コンテンツ抽出ルールを編集",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.applyAllLabel": "すべてのURLに適用",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.specificLabel": "特定のURLに適用",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilter.": "URLパターン",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.addFilter": "URLフィルターを追加",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterHelpText": "これが適用されるURL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterLabel": "URLフィルター",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.patternPlaceholder": "例:「/blog/*」",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.removeFilter": "このフィルターを削除",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFiltersLink": "URLフィルターの詳細をご覧ください",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.createErrors": "パイプラインの作成エラー",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.description": "メインパイプラインでプロセッサーとして使用される子パイプラインを作成または再利用します。",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.emptyValueError": "フィールドが必要です。",
|
||||
|
@ -16436,21 +16335,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionFetchError.title": "ELSERモデルの取得エラー",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionStartError.title": "ELSERデプロイの起動エラー",
|
||||
"xpack.enterpriseSearch.content.indices.searchIndex.convertConnector.buttonLabel": "コネクターを変換",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.allConnectorsLabel": "すべてのコネクター",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.breadcrumb": "コネクターを選択",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.description": "ElasticマネージドコネクターはElastic Cloudでホスティングされます。14日間の無料トライアルを開始しましょう。",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.trialLink": "Elastic Cloudのトライアル",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.documentationLinkLabel": "ドキュメント",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.techPreviewLabel": "テクニカルプレビュー",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorClients": "セルフマネージド",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.description": "Elasticと同期するサードパーティのデータソースを選択します。すべてのデータソースは、セルフマネージドコネクターでサポートされています。フィルターを使用して、Elasticが管理するコネクターの可用性を確認してください。",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.nativeLabel": "Elasticマネージド",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.search.ariaLabel": "コネクターを使用して検索",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.searchPlaceholder": "検索",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showBetaLabel": "ベータ版コネクターを表示",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showTechPreviewLabel": "テクニカルプレビュー版コネクターを表示",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.title": "コネクターを選択",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.attach": "接続",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.create": "パイプラインの作成",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.steps.configure.title": "構成",
|
||||
|
@ -16481,77 +16365,20 @@
|
|||
"xpack.enterpriseSearch.content.ml_inference.zero_shot_classification": "ゼロショットテキスト分類",
|
||||
"xpack.enterpriseSearch.content.nameAndDescription.name.error.empty": "コネクター名を空にすることはできません",
|
||||
"xpack.enterpriseSearch.content.navTitle": "コンテンツ",
|
||||
"xpack.enterpriseSearch.content.new_connector_with_service_type.breadcrumbs": "新しい{name}コネクター",
|
||||
"xpack.enterpriseSearch.content.new_connector.breadcrumbs": "新しいコネクター",
|
||||
"xpack.enterpriseSearch.content.new_index.apiDescription": "プログラム的にElasticsearchインデックスにドキュメントを追加するにはAPIを使用します。パイプラインを作成して開始",
|
||||
"xpack.enterpriseSearch.content.new_index.apiTitle": "新しい検索インデックス",
|
||||
"xpack.enterpriseSearch.content.new_index.breadcrumbs": "新しい検索インデックス",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorDescriptionWithServiceType": "コネクターを使用して、データソースからデータを同期、抽出、変換、インデックス化します。コネクターはElasticsearchのインデックスに直接書き込むElastic統合です。",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitle": "新しいコネクターの検索インデックス",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitleWithServiceType": "新しい{name}コネクター",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerDescription": "Webクローラーを使用して、Webサイトやナレッジベースから検索可能なコンテンツをプログラムで検出、抽出、インデックス化します。",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerTitle": "Webクローラー検索インデックス",
|
||||
"xpack.enterpriseSearch.content.new_index.defaultDescription": "検索インデックスにはデータが格納されます。",
|
||||
"xpack.enterpriseSearch.content.new_index.genericTitle": "新しい検索インデックス",
|
||||
"xpack.enterpriseSearch.content.new_index.successToast.title": "インデックスが正常に作成されました",
|
||||
"xpack.enterpriseSearch.content.new_web_crawler.breadcrumbs": "新しいWebクローラー",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.createIndex.buttonText": "コネクターを作成",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.formTitle": "コネクターを作成する",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.learnMoreConnectors.linkText": "コネクターの詳細",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputHelpText.lineTwo": "名前は小文字で入力してください。スペースや特殊文字は使用できません。",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputLabel": "コネクター名",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputPlaceholder": "コネクターの名前を設定",
|
||||
"xpack.enterpriseSearch.content.newIndex.breadcrumb": "新しいインジェスチョン方法",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.description": "Searchで追加したデータは検索インデックスと呼ばれ、App SearchとWorkplace Searchの両方で検索可能です。App SearchのコネクターとWorkplace SearchのWebクローラーを使用できます。",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.footer.title": "検索インデックスの詳細",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.title": "インジェスチョン方法を選択",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.description": "APIを使用して、直接Elasticsearchインデックスエンドポイントに接続します。",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.label": "APIインデックスを作成",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.title": "API",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.description": "サードパーティのデータソースからデータを抽出、変換、インデックス化、同期します",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.label": "ソースデータコネクターを選択",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.title": "コネクター",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.description": "Webサイトやナレッジベースから検索可能なコンテンツを検出、抽出、インデックス化します。",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.label": "クロールURL",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.nocodeLabel": "コードなし",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.title": "Webクローラー",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.alreadyExists.error": "名前{indexName}のインデックスはすでに存在します",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.createIndex.buttonText": "インデックスの作成",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription": "このインデックスには、データソースコンテンツが格納されます。また、デフォルトフィールドマッピングで最適化され、関連する検索エクスペリエンスを実現します。一意のインデックス名を指定し、任意でインデックスのデフォルト{language_analyzer}を設定します。",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription.linkText": "言語アナライザー",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formTitle": "Elasticsearchインデックスを作成",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.isInvalid.error": "{indexName}は無効なインデックス名です",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputHelpText": "言語は後から変更できますが、再インデックスが必要になる場合があります",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputLabel": "言語アナライザー",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreApis.linkText": "インジェストAPIの詳細",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreConnectors.linkText": "コネクターの詳細",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreCrawler.linkText": "Elastic Webクローラーの詳細",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreIndices.linkText": "インデックスの詳細",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineOne": "インデックスは次の名前になります:{indexName}",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineTwo": "名前は小文字で入力してください。スペースや特殊文字は使用できません。",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputLabel": "インデックス名",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputPlaceholder": "インデックスの名前を設定",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageDescription": "検索に最適化されたElasticsearchインデックスを作成し、コンテンツを保存します。まず、インジェスト方法を選択します。",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageTitle": "インジェスチョン方法を選択",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.cloudTrialButton": "Elastic Cloudのトライアル",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.manageLicenseButtonLabel": "ライセンスの管理",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openCreateConnectorPopover": "メニューを開いて、{connectorType}タイプのコネクターを作成",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openNativePopoverLabel": "Elasticマネージドコネクターに関する情報を表示したポップオーバーを開く",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openPopoverLabel": "ライセンスポップオーバーを開く",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.subscriptionButtonLabel": "サブスクリプションオプション",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeContent": "データをセルフマネージドElasticsearchインスタンスに送信するセルフマネージドコネクターには、少なくともPlatinumライセンスが必要です。",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeTitle": "Elastic Platinum へのアップグレード",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.description": "ElasticマネージドコネクターはElastic Cloudでホスティングされます。14日間の無料トライアルを開始しましょう。",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.connectorAlreadyExists": "このインデックスのコネクターはすでに存在します",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.genericError": "インデックスを作成できませんでした",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.indexAlreadyExists": "このインデックスはすでに存在します",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.unauthorizedError": "このコネクターを作成する権限がありません",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.api": "APIエンドポイント",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.connector": "コネクター",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.crawler": "Webクローラー",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.elasticsearch": "Elasticsearchインデックス",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.json": "JSON",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.createNew": "新規",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.viewAll": "管理",
|
||||
"xpack.enterpriseSearch.content.overview.documentExample.clientLibraries.dotnet": ".NET",
|
||||
|
@ -16582,7 +16409,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.cancelSyncs.successMessage": "同期が正常にキャンセルされました",
|
||||
"xpack.enterpriseSearch.content.searchIndex.configurationTabLabel": "構成",
|
||||
"xpack.enterpriseSearch.content.searchIndex.connectorErrorCallOut.title": "コネクターでエラーが発生しました",
|
||||
"xpack.enterpriseSearch.content.searchIndex.crawlerConfigurationTabLabel": "構成",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex": "ドキュメントレベルのセキュリティを有効にし、最初のアクセス制御同期を実行するまで、アクセス制御インデックスは作成されません。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex.title": "アクセス制御インデックスが見つかりません",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noMappings": "インデックスドキュメントが見つかりません",
|
||||
|
@ -16595,7 +16421,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.documents.selector.contentIndexSync.title": "コンテンツ同期",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.selectorSync.accessControl.title": "アクセス制御同期",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documentsTabLabel": "ドキュメント",
|
||||
"xpack.enterpriseSearch.content.searchIndex.domainManagementTabLabel": "ドメインを管理",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.accessControlSyncSuccess.message": "アクセス制御同期が正常にスケジュールされました。コネクターによって取得されるのを待機しています",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.incSyncSuccess.message": "増分同期が正常にスケジュールされました。コネクターによって取得されるのを待機しています",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.recheckSuccess.message": "コネクターは再チェックされました。",
|
||||
|
@ -16610,19 +16435,13 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.connectorClient": "セルフマネージドコネクター",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.content": "独自のインフラで自己管理される{link}に変換します。ネイティブコネクターはElastic Cloudデプロイでのみ使用できます。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.title": "ネイティブコネクターはElastic Cloud以外ではサポートされなくなりました",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.deleteIndex": "インデックスの削除",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.description": "このクローラーインデックスのコネクター構成が見つかりませんでした。レコードを編集するか、インデックスを削除してください。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.recreateConnectorRecord": "コネクターレコードを再作成",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.title": "このインデックスのコネクター構成が削除されました",
|
||||
"xpack.enterpriseSearch.content.searchIndex.overviewTabLabel": "概要",
|
||||
"xpack.enterpriseSearch.content.searchIndex.pipelinesTabLabel": "パイプライン",
|
||||
"xpack.enterpriseSearch.content.searchIndex.schedulingTabLabel": "スケジュール",
|
||||
"xpack.enterpriseSearch.content.searchIndex.syncRulesTabLabel": "同期ルール",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.apiIngestionMethodLabel": "API",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.connectorIngestionMethodLabel": "コネクター",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.crawlerIngestionMethodLabel": "Crawler",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.documentCountCardLabel": "ドキュメントカウント",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.domainCountCardLabel": "ドメインカウント",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.ingestionTypeCardLabel": "インジェスチョンタイプ",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.languageLabel": "言語アナライザー",
|
||||
"xpack.enterpriseSearch.content.searchIndex.transform.description": "カスタムフィールドを追加したり、学習済みのMLモデルを使用してインデックスされたドキュメントを分析したり、インデックスされたドキュメントをリッチ化したいですか?インデックス固有のインジェストパイプラインを使用して、ニーズに合わせてドキュメントをカスタマイズします。",
|
||||
|
@ -16660,7 +16479,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.configured.label": "構成済み",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connected.label": "接続済み",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connectorError.label": "コネクター失敗",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.idle.label": "アイドル",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.incomplete.label": "未完了",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.syncError.label": "同期失敗",
|
||||
"xpack.enterpriseSearch.content.searchIndices.name.columnTitle": "インデックス名",
|
||||
|
@ -16701,208 +16519,6 @@
|
|||
"xpack.enterpriseSearch.content.supportedLanguages.spanishLabel": "スペイン語",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.thaiLabel": "タイ語",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.universalLabel": "ユニバーサル",
|
||||
"xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage": "ドメイン\"{domainUrl}\"とすべての設定を削除しますか?",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.description": "複数のドメインをこのインデックスのWebクローラーに追加できます。ここで別のドメインを追加して、[管理]ページからエントリポイントとクロールルールを変更します。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "ドメインを追加",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.title": "新しいドメインを追加",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "コンテンツ検証",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "Webクローラーエントリポイントが{entryPointValue}として設定されました",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "何か問題が発生しましたエラーを解決して、再試行してください。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "上記のエラーが解決されるまで、Webクローラーはこのドメインのコンテンツにインデックスを作成できません。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "検証の失敗を無視して続行",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "インデックスの制約",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "初期検証",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "ネットワーク接続",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "ドメインを追加",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "ブラウザーでURLをテスト",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "予期しないエラー",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "ドメインURLにはプロトコルが必要です。パスを含めることはできません。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "ドメインURL",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "ドメインを検証",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.basicAuthenticationLabel": "基本認証",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.description": "クローリング保護されたコンテンツの認証設定が保存されました。認証メカニズムを更新するには、設定を削除して再起動してください。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.title": "構成設定が保存されました",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.deleteButtonLabel": "削除",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.description": "これらの設定を削除すると、クローラーがドメインの保護された領域にインデックスを作成できない可能性があります。この操作は元に戻せません。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.title": "これらの設定を削除しますか?",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.description": "認証を設定し、このドメインのクローリング保護されたコンテンツを有効化します。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.editForm.headerValueLabel": "ヘッダー値",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.addAuthenticationButtonLabel": "認証の追加",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.description": "{addAuthenticationButtonLabel}をクリックすると、保護されたコンテンツのクローリングに必要な資格情報を提供します",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.title": "認証が構成されていません",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.rawAuthenticationLabel": "認証ヘッダー",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.resetToDefaultsButtonLabel": "資格情報の追加",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.title": "認証",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "次のスケジュールで繰り返しクロールを有効にする",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingDescription": "スケジュールされたクロールの頻度と時刻を定義します。クローラーはタイムゾーンとしてUTCを使用します。",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingTitle": "特定の時刻スケジュール",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingDescription": "スケジュールされたクロールの頻度を定義",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingTitle": "間隔スケジュール",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "スケジュールの詳細",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "クローリングスケジュールは、このインデックスのすべてのドメインに対してフルクローリングを実行します。",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "スケジュール頻度",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "スケジュール時間単位",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "{domainCount, plural, other {# 件のドメイン}}で{crawlType}クロール",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "最大クロール深度",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "クロールタイプ",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.crawlTypeGroupLabel": "クロールタイプ",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingDescription": "スケジュールされたクロールの頻度と時刻を定義します。クローラーはタイムゾーンとしてUTCを使用します。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingTitle": "特定の時刻スケジュール",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "カスタム入力ポイントURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "カスタムサイトマップURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "ドメインをクロールに追加",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "ドメインを選択してください。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "エントリポイント",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "ワンタイムクロールまたは複数回のクロールのカスタム設定を行います。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "カスタムクロール構成",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "{robotsDotTxt}で検出されたサイトマップを含める",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "クローラーが走査するページの数を指定する最大クロール深度を設定します。クロールをエントリポイントのみに制限する場合は、値を1に設定します。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "最大クロール深度",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingEnabled": "次のスケジュールで繰り返しクロールを有効にする",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingFrequency": "クロール頻度",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlsRadioLabel": "複数のクロール",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlTabPrefix": "クローリング",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.oneTimeCrawlRadioLabel": "ワンタイムクロール",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.postCrawlerCustomSchedulingSuccess.message": "クローラーのカスタムスケジュールが正常に保存されました。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.readMoreLink": "スケジュールの詳細",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.saveMultipleCrawlersConfiguration": "構成を保存",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.scheduleDescription": "クローリングスケジュールは、このインデックスのすべてのドメインに対してフルクローリングを実行します。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "シードURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "選択済み",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "サイトマップ",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "今すぐ適用してクロール",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "プレビュー",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "元のJSON",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "クロールリクエスト詳細",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "ドメイン",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "シードURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "サイトマップURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "平均応答",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "4xxエラー",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.configLink": "Webクローラーログを有効化",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "期間",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "{configLink}をenterprise-search.ymlまたはユーザー設定に追加すると、より詳細なクロール統計が得られます。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "クロール中にアクセスされ抽出されたページ。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "アクセスされたページ",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "ページ",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "5xxエラー",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "クロール中にクローラーによって検出されたURL(クロール構成のため従われなかったURLを含む)。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "検出されたURL",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "行った変更は次回のクロールの開始まで適用されません。",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "クロールをキャンセル",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "クロール中...",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "保留中...",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "クローリング",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "選択したフィールドのみを表示",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "クローリング",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "開始中...",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "停止中...",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "キャンセル",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "キャンセル中",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "失敗",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "保留中",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "実行中",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "スキップ",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "開始中",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "成功",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "一時停止",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "一時停止中",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.description": "最近のクロールリクエストはここに記録されます。KibanaのDiscoverまたはログユーザーインターフェースで、進捗状況を追跡し、クロールイベントを検査できます",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.discoverCrawlerLogsTitle": "すべてのクローラーログ",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.linkToDiscover": "Discoverに表示",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.title": "クローリングリクエスト",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.userAgentDescription": "クローラーからのリクエストは、次のユーザーエージェントで特定できます。これはenterprise-search.ymlファイルで構成されます。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "クロールタイプ",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "作成済み",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "ドメイン",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "リクエストID",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "ステータス",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "まだクロールを開始していません。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "最近のクロールリクエストがありません",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "で開始",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "を含む",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "で終了",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "正規表現",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "許可",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "禁止",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "クロールルールを追加",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "クロールルールが削除されました。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.description": "URLがルールと一致するページを含めるか除外するためのクロールルールを作成します。ルールは連続で実行されます。各URLは最初の一致に従って評価されます。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "クロールルールの詳細",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "パスパターン",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "パスパターンはアスタリスク(*)を除くリテラル文字列です。アスタリスクはいずれかと一致するメタ文字です。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "ポリシー",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "パスパターンは、Ruby言語正規表現エンジンと互換性がある正規表現です。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "ルール",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.title": "クロールルール",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "完全",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "部分",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "再適用されたクロールルール",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "すべてのフィールド",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.description": "Webクローラーは一意のページにのみインデックスします。重複するページを検討するときにクローラーが使用するフィールドを選択します。すべてのスキーマフィールドを選択解除して、このドメインで重複するドキュメントを許可します。",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "コンテンツハッシュの詳細",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.preventDuplicateLabel": "重複するドキュメントの防止",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "デフォルトにリセット",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "選択したフィールド",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "すべてのフィールドを表示",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.title": "ドキュメント処理を複製",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.deleteDomainButtonLabel": "ドメインを削除",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.description": "ドメイン{domainUrl}をクローラーから削除します。これにより、設定したすべてのエントリポイントとクロールルールも削除されます。このドメインに関連するすべてのドキュメントは、次回のクロールで削除されます。{thisCannotBeUndoneMessage}",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.thisCannotBeUndoneMessage": "これは元に戻せません。",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.title": "ドメインを削除",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.allDomainsButtonLabel": "すべてのドメイン",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.deleteDomainButtonLabel": "ドメインを削除",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState": "このインデックスにはドメインがありません。最初のドメインを追加すると、ドキュメントのクローリングとインデックス作成が開始します。",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.addDomainButtonLabel": "最初のドメインを追加",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.description": "クローリングするドメインを構成し、準備が完了したら、最初のクローリングをトリガーします。",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.title": "ドメインをインデックスに追加",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage": "ドメイン''{domainUrl}'が正常に追加されました",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "このドメインを削除",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.delete.successMessage": "ドメイン''{domainUrl}''が正常に削除されました",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "このドメインを管理",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.actions": "アクション",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.documents": "ドキュメント",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "ドメイン",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "前回のアクティビティ",
|
||||
"xpack.enterpriseSearch.crawler.domainsTitle": "ドメイン",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "エントリポイントを追加",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.description": "ここではWebサイトの最も重要なURLを含めます。エントリポイントURLは、他のページへのリンク目的で最初にインデックスおよび処理されるページです。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "クローラーのエントリポイントを指定するには、{link}してください",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "エントリポイントを追加",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "既存のエントリポイントがありません。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "クローラーには1つ以上のエントリポイントが必要です。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "エントリポイントの詳細。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.title": "エントリポイント",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.extractionRules.fieldRulesTable.fieldNameLabel": "フィールド名",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.beginsWithLabel": "で開始",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.containsLabel": "を含む",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.endsWithLabel": "で終了",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.regexLabel": "正規表現",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.descriptionTableLabel": "説明",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.editedByLabel": "編集者",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.lastUpdatedLabel": "最終更新",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.rulesLabel": "フィールドルール",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.sourceLabel": "送信元",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.title": "クロールルール",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.urlsLabel": "URL",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.arrayLabel": "配列",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.contentLabel": "コンテンツ",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.extractedLabel": "抽出:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.fixedLabel": "固定値:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.HTMLLabel": "HTML:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.stringLabel": "文字列",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.UrlLabel": "URL:",
|
||||
"xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "クロールルールはバックグラウンドで再適用されています",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "サイトマップを追加",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "サイトマップが削除されました。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.description": "このドメインのカスタムサイトマップURLを追加します。クローラーは自動的に既存のサイトマップを検出します。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "既存のサイトマップがありません。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.title": "サイトマップ",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlAllDomainsMenuLabel": "このエンジンのすべてのドメインをクローリング",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "カスタム設定でクロール",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.reapplyCrawlRulesMenuLabel": "クローリングルールを再適用",
|
||||
"xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "有効なURLを入力してください",
|
||||
"xpack.enterpriseSearch.crawlers.title": "Elasticsearch Webクローラー",
|
||||
"xpack.enterpriseSearch.createConnector..breadcrumb": "新しいコネクター",
|
||||
|
@ -16982,8 +16598,6 @@
|
|||
"xpack.enterpriseSearch.enabled": "有効",
|
||||
"xpack.enterpriseSearch.exampleConnectorLabel": "例",
|
||||
"xpack.enterpriseSearch.finishUpStep.euiButton.viewInDiscoverLabel": "Discoverに表示",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.connectorClientBadgeLabel": "セルフマネージド",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.nativeBadgeLabel": "Elasticマネージドコネクター",
|
||||
"xpack.enterpriseSearch.gettingStarted.description.ingestPipelinesLink.link": "インジェストパイプライン",
|
||||
"xpack.enterpriseSearch.gettingStarted.pageTitle": "Elastic APIを使いはじめる",
|
||||
"xpack.enterpriseSearch.gettingStarted.pipeline.description": "{ingestPipelinesLink}を使うと、Elasticsearchにインデックス化される前にデータを前処理することができます。通常、これは後処理よりも大幅に簡単です。インジェストプロセッサーを自由に組み合わせて、ドキュメント内のフィールドを追加、削除、変換できます。",
|
||||
|
@ -17385,13 +16999,6 @@
|
|||
"xpack.enterpriseSearch.searchNav.mngt": "スタック管理",
|
||||
"xpack.enterpriseSearch.searchNav.otherTools": "その他のツール",
|
||||
"xpack.enterpriseSearch.searchProvider.aiSearch.name": "検索AI",
|
||||
"xpack.enterpriseSearch.selectConnector.badgeOnClick.ariaLabel": "クリックすると、コネクター説明ポップオーバーが開きます",
|
||||
"xpack.enterpriseSearch.selectConnector.connectorClientBadgeLabel": "セルフマネージド",
|
||||
"xpack.enterpriseSearch.selectConnector.h4.connectorClientsLabel": "セルフマネージドコネクター",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeBadgeLabel": "Elasticマネージド",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeConnectorsTitleLabel": "Elasticマネージドコネクター",
|
||||
"xpack.enterpriseSearch.selectConnector.p.areAvailableDirectlyWithinLabel": "直接Elastic Cloudデプロイ内で使用できます。追加のインフラストラクチャーは必要ありません。また、Elasticマネージドコネクターをセルフマネージドコネクターに変換することもできます。",
|
||||
"xpack.enterpriseSearch.selectConnector.p.deployConnectorsOnYourLabel": "独自のインフラにコネクターをデプロイします。また、既存のセルフマネージドコネクターをカスタマイズしたり、コネクターフレームワークを使用して独自のコネクタークライアントを構築したりすることもできます。",
|
||||
"xpack.enterpriseSearch.SemanticSearch.description": "推論エンドポイントとsemantic_textフィールド型を使用して、簡単にElasticsearchにセマンティック検索を追加し、検索の関連性を強化できます。",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.description": "次に、1つ以上の{semanticText}フィールドを含むインデックスを作成する必要があります。",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.title": "インデックスを作成",
|
||||
|
|
|
@ -15635,9 +15635,6 @@
|
|||
"xpack.enterpriseSearch.apiKeyConfig.newApiKeyCreatedCalloutLabel": "已成功创建新的 API 密钥",
|
||||
"xpack.enterpriseSearch.applications.navTitle": "构建",
|
||||
"xpack.enterpriseSearch.applications.productName": "应用程序",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.contentVerificationFailureMessage": "因为'索引限制'检查失败,无法验证内容。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.indexingRestrictionsFailureMessage": "无法确定索引限制,因为'网络连接性'检查失败。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.addDomainForm.networkConnectivityFailureMessage": "无法建立网络连接,因为'初始验证'检查失败。",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.deselectAllButtonLabel": "取消全选",
|
||||
"xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.selectAllButtonLabel": "全选",
|
||||
"xpack.enterpriseSearch.attachIndexBox.createSameIndexButtonLabel": "创建并附加名为 {indexName} 的索引",
|
||||
|
@ -15653,7 +15650,6 @@
|
|||
"xpack.enterpriseSearch.attachIndexBox.orPanelLabel": "OR",
|
||||
"xpack.enterpriseSearch.attachIndexBox.saveConfigurationButtonLabel": "保存配置",
|
||||
"xpack.enterpriseSearch.attachIndexBox.thisIndexWillHoldTextLabel": "此索引将存放您的数据源内容,并通过默认字段映射进行优化,以提供相关搜索体验。提供唯一的索引名称,并为索引设置默认的语言分析器(可选)。",
|
||||
"xpack.enterpriseSearch.automaticCrawlSchedule.title": "爬网频率",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.description": "用于对最终用户行为进行可视化并评估搜索应用程序性能的仪表板和工具",
|
||||
"xpack.enterpriseSearch.behavioralAnalytics.productCardCTA": "了解行为分析",
|
||||
"xpack.enterpriseSearch.betaCalloutTitle": "公测版功能",
|
||||
|
@ -15665,8 +15661,6 @@
|
|||
"xpack.enterpriseSearch.connector.connectorTypePanel.title": "连接器类型",
|
||||
"xpack.enterpriseSearch.connector.connectorTypePanel.unknown.label": "未知",
|
||||
"xpack.enterpriseSearch.connector.ingestionStatus.title": "采集状态",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupAConnectorClientContextMenuItemLabel": "设置自管型连接器",
|
||||
"xpack.enterpriseSearch.connectorCheckable.setupANativeConnectorContextMenuItemLabel": "设置 Elastic 托管连接器",
|
||||
"xpack.enterpriseSearch.connectorClientLabel": "自管型",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.configymlCodeBlockLabel": "config.yml",
|
||||
"xpack.enterpriseSearch.connectorConfiguration.dockerTextLabel": "通过 Docker 运行",
|
||||
|
@ -15680,11 +15674,6 @@
|
|||
"xpack.enterpriseSearch.connectorDeployment.orLabel": "或",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.addTheFollowingConfigurationLabel": "克隆此存储库或将其下载到本地计算机",
|
||||
"xpack.enterpriseSearch.connectorDeployment.p.runTheFollowingCommandLabel": "在终端中运行以下命令。确保已在计算机上安装 Docker",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.chooseADataSourceLabel": "选择要同步的数据源",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.configureConnectorLabel": "通过从源运行或使用 Docker 在您自己的基础设施上部署连接器代码",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.client.enterDetailsLabel": "输入您数据源的访问权限和连接详情",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.chooseADataSourceLabel": "选择要同步的数据源",
|
||||
"xpack.enterpriseSearch.connectorDescriptionBadge.native.configureConnectorLabel": "使用 Kibana UI 配置您的连接器",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.chooseADataSourceLabel": "选择要同步的数据源",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.configureConnectorLabel": "通过从源运行或使用 Docker 在您自己的基础设施上部署连接器代码",
|
||||
"xpack.enterpriseSearch.connectorDescriptionPopover.connectorDescriptionBadge.client.enterDetailsLabel": "输入您数据源的访问权限和连接详情",
|
||||
|
@ -15828,40 +15817,6 @@
|
|||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.connectorClient": "自管型连接器",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.content": "将其转换为将在您自己的基础设施上进行自我托管的 {link}。Elastic 托管连接器只可用于您的 Elastic Cloud 部署。",
|
||||
"xpack.enterpriseSearch.content.connectors.overview.nativeCloudCallout.title": "在 Elastic Cloud 以外不再支持 Elastic 托管连接器(之前的本机连接器)",
|
||||
"xpack.enterpriseSearch.content.crawler.authentication": "身份验证",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.addExtraFieldDescription": "用正爬取的页面的完整 HTML 的值在所有文档中添加一个附加字段。",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.extractionSwitchLabel": "存储完整 HTML",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.increasedSizeWarning": "如果正爬取的站点较大,这可能会显著增加索引大小。",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.learnMoreLink": "了解有关存储完整 HTML 的详情。",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlerConfiguration.extractHTML.title": "存储完整 HTML",
|
||||
"xpack.enterpriseSearch.content.crawler.crawlRules": "爬网规则",
|
||||
"xpack.enterpriseSearch.content.crawler.deduplication": "重复文档处理",
|
||||
"xpack.enterpriseSearch.content.crawler.domainDetail.title": "管理 {domain}",
|
||||
"xpack.enterpriseSearch.content.crawler.entryPoints": "入口点",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules": "提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.caption": "删除提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.deleteRule.title": "删除此提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.caption": "编辑此提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.editRule.title": "编辑此提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.caption": "展开规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.expandRule.title": "展开此提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.actions.label": "操作",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.confirmLabel": "删除规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.description": "此操作无法撤消。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteFieldModal.title": "是否确定要删除此字段规则?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.confirmLabel": "删除规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.description": "移除此规则还会删除{fields, plural, one {一个字段规则} other {# 个字段规则}}。此操作无法撤消。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.deleteModal.title": "是否确定要删除此提取规则?",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.description": "创建内容提取规则以更改文档在同步期间获取其数据的位置。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.caption": "编辑此内容字段规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.fieldRulesTable.editRule.title": "编辑此内容字段规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.learnMoreLink": "详细了解内容提取规则。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRules.title": "提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.addRuleLabel": "添加提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageAddRuleLabel": "添加内容提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageDescription": "创建内容提取规则以更改文档字段在同步期间获取其数据的位置。",
|
||||
"xpack.enterpriseSearch.content.crawler.extractionRulesTable.emptyMessageTitle": "没有内容提取规则",
|
||||
"xpack.enterpriseSearch.content.crawler.siteMaps": "站点地图",
|
||||
"xpack.enterpriseSearch.content.crawlers.breadcrumb": "网络爬虫",
|
||||
"xpack.enterpriseSearch.content.crawlers.deleteModal.title": "删除 {connectorCount} 个网络爬虫?",
|
||||
"xpack.enterpriseSearch.content.description": "Enterprise Search 提供了各种方法以便您轻松搜索数据。从网络爬虫、Elasticsearch 索引、API、直接上传或第三方连接器中选择。",
|
||||
|
@ -15967,62 +15922,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.connectorScheduling.unsaved.title": "您尚未保存更改,是否确定要离开?",
|
||||
"xpack.enterpriseSearch.content.indices.defaultPipelines.successToast.title": "已成功更新默认管道",
|
||||
"xpack.enterpriseSearch.content.indices.deleteIndex.successToast.title": "您的索引 {indexName} 和任何关联的采集配置已成功删除",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addContentField.title": "添加内容字段规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.addRule.title": "创建内容提取规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.edilidtContentField.documentField.requiredError": "'字段名称'必填。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.cancelButton.label": "取消",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.description": "用内容填充字段。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.arrayLabel": "数组",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractAs.stringLabel": "字符串",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.extractedLabel": "提取的值",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.fixedLabel": "固定值",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.htmlLabel": "CSS 选择器或 XPath 表达式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.label": "使用以下来源的内容",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.requiredError": "需要为此内容字段提供值",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.title": "内容",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.content.urlLabel": "URL 模式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.description": "选择文档字段以构建规则。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.label": "字段名称",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.documentField.title": "文档字段",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.extractAs.label": "将提取的内容存储为",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.helpText": "对此文档字段使用固定值。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.label": "固定值",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.fixedValue.placeHolder": "例如,'某一值'",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.saveButton.label": "保存",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.cssPlaceholder": "例如,'.main_content'",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.selector.urlLabel": "e.g. /my-url/(.*/",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.description": "从中为此字段提取内容的位置。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.htmlLabel": "HTML 元素",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.label": "提取以下来源的内容",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.requiredError": "需要提供内容源。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.title": "源",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.source.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editContentField.title": "编辑内容字段规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.cancelButtonLabel": "取消",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.cssSelectorsLink": "详细了解 CSS 选择器和 XPath 表达式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.differentContentLink": "详细了解如何存储不同类型内容",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.contentField.urlPatternsLinks": "详细了解 URL 模式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionError": "需要为内容提取规则提供描述",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.descriptionLabel": "规则描述",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.addContentFieldRuleLabel": "添加内容字段规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.contentFieldDescription": "创建内容字段以确定要从网页的哪些部分拉取数据。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageAddRuleLabel": "添加内容字段",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageDescription": "创建内容字段以确定要从网页的哪些部分拉取数据。",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.fieldRules.emptyMessageTitle": "此提取规则没有内容字段",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.helpText": "帮助其他人了解此规则会提取哪些数据",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.placeholderLabel": "例如:'文档标题'",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.saveButtonLabel": "保存规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.title": "编辑内容提取规则",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.applyAllLabel": "应用于所有 URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.specificLabel": "应用于特定 URL",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilter.": "URL 模式",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.addFilter": "添加 URL 筛选",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterHelpText": "应将此项应用于哪些 URL?",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.filterLabel": "URL 筛选",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.patternPlaceholder": "例如,'/blog/*'",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFilters.removeFilter": "移除此筛选",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.url.urlFiltersLink": "详细了解 URL 筛选",
|
||||
"xpack.enterpriseSearch.content.indices.extractionRules.editRule.urlLabel": "URL",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.createErrors": "创建管道时出错",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.description": "构建或重复使用将在您的主管道中用作处理器的子管道。",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.addInferencePipelineModal.steps.configure.emptyValueError": "'字段'必填。",
|
||||
|
@ -16157,21 +16056,6 @@
|
|||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionFetchError.title": "提取 ELSER 模型时出错",
|
||||
"xpack.enterpriseSearch.content.indices.pipelines.textExpansionStartError.title": "启动 ELSER 部署时出错",
|
||||
"xpack.enterpriseSearch.content.indices.searchIndex.convertConnector.buttonLabel": "转换连接器",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.allConnectorsLabel": "所有连接器",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.breadcrumb": "选择连接器",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.description": "Elastic 托管连接器在 Elastic Cloud 上进行托管。开始 14 天免费试用。",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.trialLink": "Elastic Cloud 试用",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.documentationLinkLabel": "文档",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.techPreviewLabel": "技术预览",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.connectorClients": "自管型",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.description": "选择要将哪些第三方数据源同步到 Elastic。所有数据源都受自管型连接器支持。通过使用筛选检查 Elastic 托管连接器的可用性。",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.nativeLabel": "Elastic 托管",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.search.ariaLabel": "通过连接器搜索",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.searchPlaceholder": "搜索",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showBetaLabel": "显示公测版连接器",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.showTechPreviewLabel": "显示技术预览连接器",
|
||||
"xpack.enterpriseSearch.content.indices.selectConnector.title": "选择连接器",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.attach": "附加",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.footer.create": "创建管道",
|
||||
"xpack.enterpriseSearch.content.indices.transforms.addInferencePipelineModal.steps.configure.title": "配置",
|
||||
|
@ -16202,78 +16086,20 @@
|
|||
"xpack.enterpriseSearch.content.ml_inference.zero_shot_classification": "Zero-Shot 文本分类",
|
||||
"xpack.enterpriseSearch.content.nameAndDescription.name.error.empty": "连接器名称不能为空",
|
||||
"xpack.enterpriseSearch.content.navTitle": "内容",
|
||||
"xpack.enterpriseSearch.content.new_connector_with_service_type.breadcrumbs": "新的 {name} 连接器",
|
||||
"xpack.enterpriseSearch.content.new_connector.breadcrumbs": "新连接器",
|
||||
"xpack.enterpriseSearch.content.new_index.apiDescription": "使用 API 以编程方式将文档添加到 Elasticsearch 索引。首先创建索引。",
|
||||
"xpack.enterpriseSearch.content.new_index.apiTitle": "新搜索索引",
|
||||
"xpack.enterpriseSearch.content.new_index.breadcrumbs": "新搜索索引",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorDescriptionWithServiceType": "使用连接器同步、提取、转换和索引您数据源中的数据。连接器为直接写入到 Elasticsearch 索引中的 Elastic 集成。",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitle": "新连接器搜索索引",
|
||||
"xpack.enterpriseSearch.content.new_index.connectorTitleWithServiceType": "新的 {name} 连接器",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerDescription": "使用网络爬虫以编程方式发现、提取和索引网站和知识库中的可搜索内容。",
|
||||
"xpack.enterpriseSearch.content.new_index.crawlerTitle": "网络爬虫搜索索引",
|
||||
"xpack.enterpriseSearch.content.new_index.defaultDescription": "搜索索引将存储您的数据。",
|
||||
"xpack.enterpriseSearch.content.new_index.genericTitle": "新搜索索引",
|
||||
"xpack.enterpriseSearch.content.new_index.successToast.title": "已成功创建索引",
|
||||
"xpack.enterpriseSearch.content.new_web_crawler.breadcrumbs": "新网络爬虫",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.createIndex.buttonText": "创建连接器",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.formTitle": "创建连接器",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.learnMoreConnectors.linkText": "详细了解连接器",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputHelpText.lineTwo": "名称应为小写,并且不能包含空格或特殊字符。",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputLabel": "连接器名称",
|
||||
"xpack.enterpriseSearch.content.newConnector.newConnectorTemplate.nameInputPlaceholder": "设置连接器的名称",
|
||||
"xpack.enterpriseSearch.content.newIndex.breadcrumb": "新采集方法",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.description": "您在 Search 中添加的数据称为搜索索引,您可在 App Search 和 Workplace Search 中搜索这些数据。现在,您可以在 App Search 中使用连接器,在 Workplace Search 中使用网络爬虫。",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.footer.link": "阅读文档",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.footer.title": "想要详细了解搜索索引?",
|
||||
"xpack.enterpriseSearch.content.newIndex.emptyState.title": "选择采集方法",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.description": "使用此 API 直接连接到您的 Elasticsearch 索引终端。",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.label": "创建 API 索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.api.title": "API",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.description": "提取、转换、索引和同步来自第三方数据源的数据",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.label": "选择源连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.connector.title": "连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.description": "发现、提取和索引网站和知识库中的可搜索内容",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.label": "爬网 URL",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.nocodeLabel": "无代码",
|
||||
"xpack.enterpriseSearch.content.newIndex.methodCard.crawler.title": "网络爬虫",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.alreadyExists.error": "名为 {indexName} 的索引已存在",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.createIndex.buttonText": "创建索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription": "此索引将存放您的数据源内容,并通过默认字段映射进行优化,以提供相关搜索体验。提供唯一的索引名称,并为索引设置默认的 {language_analyzer}(可选)。",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription.linkText": "语言分析器",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formTitle": "创建 Elasticsearch 索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.isInvalid.error": "{indexName} 为无效索引名称",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputHelpText": "可以在稍后更改语言,但可能需要重新索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputLabel": "语言分析器",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreApis.linkText": "详细了解采集 API",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreConnectors.linkText": "详细了解连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreCrawler.linkText": "详细了解 Elastic 网络爬虫",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreIndices.linkText": "详细了解索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineOne": "您的索引将命名为:{indexName}",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineTwo": "名称应为小写,并且不能包含空格或特殊字符。",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputLabel": "索引名称",
|
||||
"xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputPlaceholder": "设置索引的名称",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageDescription": "创建搜索优化的 Elasticsearch 索引来存储内容。首先选择采集方法。",
|
||||
"xpack.enterpriseSearch.content.newIndex.pageTitle": "选择采集方法",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.cloudTrialButton": "Elastic Cloud 试用",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.manageLicenseButtonLabel": "管理许可证",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openCreateConnectorPopover": "打开菜单以创建 {connectorType} 类型的连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openNativePopoverLabel": "打开包含 Elastic 托管连接器相关信息的弹出框",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.openPopoverLabel": "打开授权许可弹出框",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.subscriptionButtonLabel": "订阅计划",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeContent": "将数据发送到自管型 Elasticsearch 实例的自管型连接器至少需要白金级许可证。",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnector.upgradeTitle": "升级到 Elastic 白金级",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.description": "Elastic 托管连接器在 Elastic Cloud 上进行托管。开始 14 天免费试用。",
|
||||
"xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.title": "Elastic Cloud",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.connectorAlreadyExists": "此索引的连接器已存在",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.genericError": "无法创建您的索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.indexAlreadyExists": "此索引已存在",
|
||||
"xpack.enterpriseSearch.content.newIndex.steps.buildConnector.error.unauthorizedError": "您无权创建此连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.api": "API 终端",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.connector": "连接器",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.crawler": "网络爬虫",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.elasticsearch": "Elasticsearch 索引",
|
||||
"xpack.enterpriseSearch.content.newIndex.types.json": "JSON",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.createNew": "新建",
|
||||
"xpack.enterpriseSearch.content.overview.documementExample.generateApiKeyButton.viewAll": "管理",
|
||||
"xpack.enterpriseSearch.content.overview.documentExample.clientLibraries.dotnet": ".NET",
|
||||
|
@ -16304,7 +16130,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.cancelSyncs.successMessage": "已成功取消同步",
|
||||
"xpack.enterpriseSearch.content.searchIndex.configurationTabLabel": "配置",
|
||||
"xpack.enterpriseSearch.content.searchIndex.connectorErrorCallOut.title": "您的连接器报告了错误",
|
||||
"xpack.enterpriseSearch.content.searchIndex.crawlerConfigurationTabLabel": "配置",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex": "在您启用文档级别安全性并运行首次访问控制同步之前,不会创建访问控制索引。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noIndex.title": "找不到访问控制索引",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.noMappings": "找不到索引的文档",
|
||||
|
@ -16317,7 +16142,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.documents.selector.contentIndexSync.title": "内容同步",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documents.selectorSync.accessControl.title": "访问控制同步",
|
||||
"xpack.enterpriseSearch.content.searchIndex.documentsTabLabel": "文档",
|
||||
"xpack.enterpriseSearch.content.searchIndex.domainManagementTabLabel": "管理域",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.accessControlSyncSuccess.message": "已成功计划访问控制同步,等待连接器提取",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.incSyncSuccess.message": "已成功计划增量同步,等待连接器提取",
|
||||
"xpack.enterpriseSearch.content.searchIndex.index.recheckSuccess.message": "已重新检查您的连接器。",
|
||||
|
@ -16332,19 +16156,13 @@
|
|||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.connectorClient": "自管型连接器",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.content": "将其转换为将在您自己的基础设施上进行自我管理的 {link}。本机连接器只可用于您的 Elastic Cloud 部署。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.nativeCloudCallout.title": "在 Elastic Cloud 以外不再支持本机连接器",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.deleteIndex": "删除索引",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.description": "找不到此网络爬虫索引的连接器配置。应重新创建记录,或应删除索引。",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.recreateConnectorRecord": "重新创建连接器记录",
|
||||
"xpack.enterpriseSearch.content.searchIndex.noCrawlerConnectorFound.title": "已移除此索引的连接器配置",
|
||||
"xpack.enterpriseSearch.content.searchIndex.overviewTabLabel": "概览",
|
||||
"xpack.enterpriseSearch.content.searchIndex.pipelinesTabLabel": "管道",
|
||||
"xpack.enterpriseSearch.content.searchIndex.schedulingTabLabel": "正在计划",
|
||||
"xpack.enterpriseSearch.content.searchIndex.syncRulesTabLabel": "同步规则",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.apiIngestionMethodLabel": "API",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.connectorIngestionMethodLabel": "连接器",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.crawlerIngestionMethodLabel": "网络爬虫",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.documentCountCardLabel": "文档计数",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.domainCountCardLabel": "域计数",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.ingestionTypeCardLabel": "采集类型",
|
||||
"xpack.enterpriseSearch.content.searchIndex.totalStats.languageLabel": "语言分析器",
|
||||
"xpack.enterpriseSearch.content.searchIndex.transform.description": "想要添加定制字段,或使用已训练 ML 模型分析并扩充您的已索引文档?使用特定于索引的采集管道根据您的需求来定制文档。",
|
||||
|
@ -16382,7 +16200,6 @@
|
|||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.configured.label": "已配置",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connected.label": "已连接",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.connectorError.label": "连接器故障",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.idle.label": "空闲",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.incomplete.label": "不完整",
|
||||
"xpack.enterpriseSearch.content.searchIndices.ingestionStatus.syncError.label": "同步失败",
|
||||
"xpack.enterpriseSearch.content.searchIndices.name.columnTitle": "索引名称",
|
||||
|
@ -16423,205 +16240,6 @@
|
|||
"xpack.enterpriseSearch.content.supportedLanguages.spanishLabel": "西班牙语",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.thaiLabel": "泰语",
|
||||
"xpack.enterpriseSearch.content.supportedLanguages.universalLabel": "通用",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.description": "可以将多个域添加到此索引的网络爬虫。在此添加其他域并从'管理'页面修改入口点和爬网规则。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "添加域",
|
||||
"xpack.enterpriseSearch.crawler.addDomainFlyout.title": "添加新域",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "内容验证",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "网络爬虫入口点已设置为 {entryPointValue}",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "出问题了。请解决这些错误,然后重试。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "在解决以上错误之前,网络爬虫将无法索引此域上的任何内容。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "忽略验证失败并继续",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "索引限制",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "初始验证",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "网络连接性",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "添加域",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "在浏览器中测试 URL",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "意外错误",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "域 URL 需要协议,且不能包含任何路径。",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "域 URL",
|
||||
"xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "验证域",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.basicAuthenticationLabel": "基本身份验证",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.description": "已保存用于爬网受保护内容的身份验证设置。要更新身份验证机制,请删除设置并重启。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.configurationSavePanel.title": "已保存配置设置",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.deleteButtonLabel": "删除",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.description": "删除这些设置可能导致网络爬虫无法索引域的受保护区域。此操作无法撤消。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.deleteConfirmationModal.title": "是否确定要删除这些设置?",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.description": "设置身份验证以为此域启用爬网受保护内容。",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.editForm.headerValueLabel": "标头值",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.addAuthenticationButtonLabel": "添加身份验证",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.description": "单击{addAuthenticationButtonLabel}以提供爬网受保护内容所需的凭据",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.emptyPrompt.title": "未配置身份验证",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.rawAuthenticationLabel": "身份验证标头",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.resetToDefaultsButtonLabel": "添加凭据",
|
||||
"xpack.enterpriseSearch.crawler.authenticationPanel.title": "身份验证",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "通过以下计划启用重复爬网",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingDescription": "定义计划爬网的频率和时间。网络爬虫使用 UTC 作为其时区。",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingTitle": "特定时间计划",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingDescription": "定义计划爬网的频率",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingTitle": "时间间隔计划",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "详细了解计划",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "爬网计划将对此索引上的每个域执行全面爬网。",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "计划频率",
|
||||
"xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "计划时间单位",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "在 {domainCount, plural, other {# 个域}}上进行 {crawlType} 爬网",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "最大爬网深度",
|
||||
"xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "爬网类型",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.crawlTypeGroupLabel": "爬网类型",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingDescription": "定义计划爬网的频率和时间。网络爬虫使用 UTC 作为其时区。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingTitle": "特定时间计划",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "定制入口点 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "定制站点地图 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "添加域到您的爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "请选择域。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "入口点",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "设置一次性爬网或多次爬网定制设置。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "定制爬网配置",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "包括在 {robotsDotTxt} 中发现的站点地图",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "设置最大爬网深度以指定网络爬虫应遍历的页面深度。将该值设置为一 (1) 可将爬网仅限定为入口点。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "最大爬网深度",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingEnabled": "通过以下计划启用重复爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingFrequency": "爬网频率",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlsRadioLabel": "多次爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlTabPrefix": "爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.oneTimeCrawlRadioLabel": "一次性爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.postCrawlerCustomSchedulingSuccess.message": "已成功保存网络爬虫定制计划。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.readMoreLink": "详细了解计划",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.saveMultipleCrawlersConfiguration": "保存配置",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.scheduleDescription": "爬网计划将对此索引上的每个域执行全面爬网。",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "种子 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "已选定",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "站点地图",
|
||||
"xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "立即应用并爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "预览",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "原始 JSON",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "爬网请求详情",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "域",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "种子 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "站点地图 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "平均响应",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "4xx 错误",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.configLink": "启用网络爬虫日志",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "持续时间",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "请访问 enterprise-search.yml 或用户设置中的 {configLink} 以获取更详细的爬网统计信息。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "在爬网期间访问并提取的 URL。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "访问的页面",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "页面",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "5xx 错误",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "网络爬虫在爬网期间发现的 URL,包括那些由于爬网配置未跟踪的 URL。",
|
||||
"xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "看到的 URL",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "所做的更改不会立即生效,直到下一次爬网开始。",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "取消爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "正在爬网.....",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "待处理......",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "仅显示选定字段",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "爬网",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "正在启动......",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "正在停止......",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "已取消",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "正在取消",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "失败",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "待处理",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "正在运行",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "已跳过",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "正在启动",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "成功",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "已挂起",
|
||||
"xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "正在挂起",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.description": "此处记录了最近的爬网请求。您可以在 Kibana 的 Discover 或 Logs 用户界面中跟踪进度并检查爬网事件",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.discoverCrawlerLogsTitle": "所有网络爬虫日志",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.linkToDiscover": "在 Discover 中查看",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.title": "爬网请求",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsPanel.userAgentDescription": "可以通过以下用户代理识别源自网络爬虫的请求。这在 enterprise-search.yml 文件中进行配置。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "爬网类型",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "创建时间",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "域",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "请求 ID",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "状态",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "您尚未开始任何爬网。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "最近没有爬网请求",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "开始于",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "Contains",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "结束于",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "Regex",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "允许",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "不允许",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "添加爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "爬网规则已删除。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.description": "创建爬网规则以包括或排除 URL 匹配规则的页面。规则按顺序运行,每个 URL 根据第一个匹配进行评估。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "详细了解爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "路径模式",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "路径模式为文本字符串,但星号 (*) 字符除外,它是将匹配任何内容的元字符。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "策略",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "路径模式是与 Ruby 语言正则表达式引擎兼容的正则表达式。",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "规则",
|
||||
"xpack.enterpriseSearch.crawler.crawlRulesTable.title": "爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "实线",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "部分",
|
||||
"xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "已重新应用爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "所有字段",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.description": "网络爬虫仅索引唯一的页面。选择网络爬虫在考虑哪些网页重复时应使用的字段。取消选择所有架构字段以在此域上允许重复的文档。",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "详细了解内容哈希",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.preventDuplicateLabel": "阻止重复文档",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "重置为默认值",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "选定字段",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "显示所有字段",
|
||||
"xpack.enterpriseSearch.crawler.deduplicationPanel.title": "重复文档处理",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.deleteDomainButtonLabel": "删除域",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.description": "从网络爬虫中移除域 {domainUrl}。这还会删除您已设置的所有入口点和爬网规则。将在下次爬网时移除与此域相关的任何文档。{thisCannotBeUndoneMessage}",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.thisCannotBeUndoneMessage": "此操作无法撤消。",
|
||||
"xpack.enterpriseSearch.crawler.deleteDomainModal.title": "删除域",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.allDomainsButtonLabel": "所有域",
|
||||
"xpack.enterpriseSearch.crawler.domainDetail.deleteDomainButtonLabel": "删除域",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState": "此索引上没有任何域。添加第一个域以开始爬网并索引文档。",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.addDomainButtonLabel": "添加第一个域",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.description": "配置您要爬网的域,并在准备就绪后触发第一次爬网。",
|
||||
"xpack.enterpriseSearch.crawler.domainManagement.emptyState.title": "添加域到您的索引",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "删除此域",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "管理此域",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.actions": "操作",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.documents": "文档",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "域",
|
||||
"xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "上次活动",
|
||||
"xpack.enterpriseSearch.crawler.domainsTitle": "域",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "添加入口点",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.description": "在此加入您的网站最重要的 URL。入口点 URL 将是要为其他页面的链接索引和处理的首批页面。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "{link}以指定网络爬虫的入口点",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "添加入口点",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "当前没有入口点。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "网络爬虫需要至少一个入口点。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "详细了解入口点。",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.title": "入口点",
|
||||
"xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.extractionRules.fieldRulesTable.fieldNameLabel": "字段名称",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.beginsWithLabel": "开始于",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.containsLabel": "Contains",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.endsWithLabel": "结束于",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesExtractionFilter.regexLabel": "Regex",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.descriptionTableLabel": "描述",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.editedByLabel": "编辑者",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.lastUpdatedLabel": "上次更新时间",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.rulesLabel": "字段规则",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.sourceLabel": "源",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.title": "爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.extractionRulesTable.urlsLabel": "URL",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.arrayLabel": "数组",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.contentLabel": "内容",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.extractedLabel": "已提取为:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.fixedLabel": "固定值:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.HTMLLabel": "HTML:",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.stringLabel": "字符串",
|
||||
"xpack.enterpriseSearch.crawler.fieldRulesTable.UrlLabel": "URL:",
|
||||
"xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "正在后台重新应用爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "添加站点地图",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "站点地图已删除。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.description": "为此域添加定制站点地图 URL。网络爬虫会自动检测现有站点地图。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "当前没有站点地图。",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.title": "站点地图",
|
||||
"xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlAllDomainsMenuLabel": "爬网此索引上的所有域",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "使用定制设置执行爬网",
|
||||
"xpack.enterpriseSearch.crawler.startCrawlContextMenu.reapplyCrawlRulesMenuLabel": "重新应用爬网规则",
|
||||
"xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "请输入有效 URL",
|
||||
"xpack.enterpriseSearch.crawlers.title": "Elasticsearch 网络爬虫",
|
||||
"xpack.enterpriseSearch.createConnector..breadcrumb": "新连接器",
|
||||
|
@ -16701,8 +16319,6 @@
|
|||
"xpack.enterpriseSearch.enabled": "已启用",
|
||||
"xpack.enterpriseSearch.exampleConnectorLabel": "示例",
|
||||
"xpack.enterpriseSearch.finishUpStep.euiButton.viewInDiscoverLabel": "在 Discover 中查看",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.connectorClientBadgeLabel": "自管型",
|
||||
"xpack.enterpriseSearch.getConnectorTypeBadge.nativeBadgeLabel": "Elastic 托管连接器",
|
||||
"xpack.enterpriseSearch.gettingStarted.description.ingestPipelinesLink.link": "采集管道",
|
||||
"xpack.enterpriseSearch.gettingStarted.pageTitle": "开始使用 Elastic API",
|
||||
"xpack.enterpriseSearch.gettingStarted.pipeline.description": "使用 {ingestPipelinesLink} 在将您的数据索引到 Elasticsearch 之前预处理这些数据,这通常比后期处理更加方便。使用采集处理器的任意组合在您的文档中添加、删除或转换字段。",
|
||||
|
@ -17106,13 +16722,6 @@
|
|||
"xpack.enterpriseSearch.searchNav.otherTools": "其他工具",
|
||||
"xpack.enterpriseSearch.searchNav.relevance": "相关性",
|
||||
"xpack.enterpriseSearch.searchProvider.aiSearch.name": "搜索 AI",
|
||||
"xpack.enterpriseSearch.selectConnector.badgeOnClick.ariaLabel": "单击以打开连接器说明弹出框",
|
||||
"xpack.enterpriseSearch.selectConnector.connectorClientBadgeLabel": "自管型",
|
||||
"xpack.enterpriseSearch.selectConnector.h4.connectorClientsLabel": "自管型连接器",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeBadgeLabel": "Elastic 托管",
|
||||
"xpack.enterpriseSearch.selectConnector.nativeConnectorsTitleLabel": "Elastic 托管连接器",
|
||||
"xpack.enterpriseSearch.selectConnector.p.areAvailableDirectlyWithinLabel": "可直接用在 Elastic Cloud 部署中。无需额外的基础设施。您也可以将 Elastic 托管连接器转换为自管型连接器。",
|
||||
"xpack.enterpriseSearch.selectConnector.p.deployConnectorsOnYourLabel": "在您自己的基础设施上部署连接器。还可以定制现有自管型连接器,或使用我们的连接器框架构建自己的连接器。",
|
||||
"xpack.enterpriseSearch.SemanticSearch.description": "使用推理终端和 semantic_text 字段类型将语义搜索轻松添加到 Elasticsearch,以提高搜索相关性。",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.description": "现在您需要使用一个或多个 {semanticText} 字段创建索引。",
|
||||
"xpack.enterpriseSearch.semanticSearch.guide.createIndex.title": "创建索引",
|
||||
|
|
|
@ -208,6 +208,7 @@ export const MANAGE_API_KEYS_URL = '/app/management/security/api_keys';
|
|||
|
||||
export const ENTERPRISE_SEARCH_DOCUMENTS_DEFAULT_DOC_COUNT = 25;
|
||||
|
||||
// TODO: Remove?
|
||||
export const ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE = 'elastic-crawler';
|
||||
|
||||
export const DEFAULT_PIPELINE_NAME = 'search-default-ingestion';
|
||||
|
@ -235,7 +236,6 @@ export const defaultConnectorsPipelineMeta: DefaultConnectorsPipelineMeta = {
|
|||
export enum INGESTION_METHOD_IDS {
|
||||
API = 'api',
|
||||
CONNECTOR = 'connector',
|
||||
CRAWLER = 'crawler',
|
||||
}
|
||||
|
||||
export const DEFAULT_PRODUCT_FEATURES: ProductFeatures = {
|
||||
|
|
|
@ -14,6 +14,7 @@ export interface AlwaysShowPattern {
|
|||
index_pattern: string;
|
||||
}
|
||||
|
||||
// TODO: Remove this type
|
||||
export interface CrawlerIndex extends ElasticsearchIndex {
|
||||
connector: Connector;
|
||||
crawler: Crawler;
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { HttpError } from '../../../../../../common/types/api';
|
||||
import { ErrorCode } from '../../../../../../common/types/error_codes';
|
||||
import { HttpError } from '../types/api';
|
||||
import { ErrorCode } from '../types/error_codes';
|
||||
|
||||
export const errorToText = (error?: HttpError): string | undefined => {
|
||||
if (!error) {
|
|
@ -22,11 +22,7 @@ import { EuiContainedStepProps } from '@elastic/eui/src/components/steps/steps';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { ENTERPRISE_SEARCH_CONTENT_PLUGIN } from '../../../../../common/constants';
|
||||
import {
|
||||
ML_MANAGE_TRAINED_MODELS_PATH,
|
||||
NEW_INDEX_PATH,
|
||||
} from '../../../enterprise_search_content/routes';
|
||||
import { ML_MANAGE_TRAINED_MODELS_PATH } from '../../../enterprise_search_content/routes';
|
||||
import { docLinks } from '../../../shared/doc_links';
|
||||
import { EuiLinkTo } from '../../../shared/react_router_helpers';
|
||||
|
||||
|
@ -90,7 +86,7 @@ const steps: EuiContainedStepProps[] = [
|
|||
children: (
|
||||
<EuiLinkTo
|
||||
data-telemetry-id="entSearch-aiSearch-semanticSearch-nlpEnrichmentPanel-createIndexButton"
|
||||
to={generatePath(ENTERPRISE_SEARCH_CONTENT_PLUGIN.URL + NEW_INDEX_PATH)}
|
||||
to={generatePath('/app/elasticsearch/indices/create')}
|
||||
shouldNotCreateHref
|
||||
>
|
||||
<EuiButton iconType="plusInCircle">
|
||||
|
|
|
@ -16,9 +16,7 @@ import {
|
|||
IngestionMethod,
|
||||
} from '@kbn/search-connectors';
|
||||
|
||||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../../common/constants';
|
||||
|
||||
import { ApiViewIndex, CrawlerViewIndex } from '../types';
|
||||
import { ApiViewIndex } from '../types';
|
||||
|
||||
export const apiIndex: ApiViewIndex = {
|
||||
count: 1,
|
||||
|
@ -170,142 +168,4 @@ export const connectorIndex: ConnectorViewIndex = {
|
|||
},
|
||||
};
|
||||
|
||||
export const crawlerIndex: CrawlerViewIndex = {
|
||||
connector: {
|
||||
api_key_id: null,
|
||||
api_key_secret_id: null,
|
||||
configuration: {
|
||||
foo: {
|
||||
default_value: '',
|
||||
depends_on: [],
|
||||
display: DisplayType.TEXTBOX,
|
||||
label: 'bar',
|
||||
options: [],
|
||||
order: 1,
|
||||
required: false,
|
||||
sensitive: false,
|
||||
tooltip: '',
|
||||
type: FieldType.STRING,
|
||||
ui_restrictions: [],
|
||||
validations: [],
|
||||
value: 'barbar',
|
||||
},
|
||||
},
|
||||
custom_scheduling: {
|
||||
foo: {
|
||||
configuration_overrides: {},
|
||||
enabled: false,
|
||||
interval: '',
|
||||
last_synced: null,
|
||||
name: '',
|
||||
},
|
||||
},
|
||||
deleted: false,
|
||||
description: null,
|
||||
error: null,
|
||||
features: null,
|
||||
filtering: [
|
||||
{
|
||||
active: {
|
||||
advanced_snippet: {
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
created_at: expect.any(String),
|
||||
field: '_',
|
||||
id: 'DEFAULT',
|
||||
order: 0,
|
||||
policy: 'include',
|
||||
rule: 'regex',
|
||||
updated_at: expect.any(String),
|
||||
value: '.*',
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
errors: [],
|
||||
state: FilteringValidationState.VALID,
|
||||
},
|
||||
},
|
||||
domain: 'DEFAULT',
|
||||
draft: {
|
||||
advanced_snippet: {
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
created_at: expect.any(String),
|
||||
field: '_',
|
||||
id: 'DEFAULT',
|
||||
order: 0,
|
||||
policy: 'include',
|
||||
rule: 'regex',
|
||||
updated_at: expect.any(String),
|
||||
value: '.*',
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
errors: [],
|
||||
state: FilteringValidationState.VALID,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
id: '4',
|
||||
index_name: 'crawler',
|
||||
is_native: true,
|
||||
language: 'en',
|
||||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: SyncStatus.COMPLETED,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
last_sync_status: SyncStatus.COMPLETED,
|
||||
last_synced: null,
|
||||
name: 'crawler',
|
||||
scheduling: {
|
||||
access_control: {
|
||||
enabled: false,
|
||||
interval: '',
|
||||
},
|
||||
full: {
|
||||
enabled: false,
|
||||
interval: '',
|
||||
},
|
||||
incremental: {
|
||||
enabled: false,
|
||||
interval: '',
|
||||
},
|
||||
},
|
||||
service_type: ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE,
|
||||
status: ConnectorStatus.CONFIGURED,
|
||||
sync_now: false,
|
||||
},
|
||||
count: 1,
|
||||
crawler: {
|
||||
id: '5',
|
||||
index_name: 'connector-crawler',
|
||||
},
|
||||
hidden: false,
|
||||
ingestionMethod: IngestionMethod.CRAWLER,
|
||||
ingestionStatus: IngestionStatus.CONFIGURED,
|
||||
lastUpdated: null,
|
||||
name: 'crawler',
|
||||
total: {
|
||||
docs: {
|
||||
count: 1,
|
||||
deleted: 0,
|
||||
},
|
||||
store: { size_in_bytes: '8024' },
|
||||
},
|
||||
};
|
||||
|
||||
export const elasticsearchViewIndices = [apiIndex, connectorIndex, crawlerIndex];
|
||||
export const elasticsearchViewIndices = [apiIndex, connectorIndex];
|
||||
|
|
|
@ -23,22 +23,7 @@ describe('startSync', () => {
|
|||
const result = startSync({ connectorId: 'connectorId' });
|
||||
await nextTick();
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
'/internal/enterprise_search/connectors/connectorId/start_sync',
|
||||
{ body: '{}' }
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
|
||||
it('calls correct api with nextSyncConfig', async () => {
|
||||
const promise = Promise.resolve('result');
|
||||
http.post.mockReturnValue(promise);
|
||||
const nextSyncConfig = { max_crawl_depth: 3 };
|
||||
const result = startSync({ connectorId: 'connectorId', nextSyncConfig });
|
||||
const body = JSON.stringify({ nextSyncConfig: JSON.stringify(nextSyncConfig) });
|
||||
await nextTick();
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
'/internal/enterprise_search/connectors/connectorId/start_sync',
|
||||
{ body }
|
||||
'/internal/enterprise_search/connectors/connectorId/start_sync'
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
|
|
|
@ -9,20 +9,14 @@ import { i18n } from '@kbn/i18n';
|
|||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
import { CrawlRequestOverrides } from '../../components/search_index/crawler/crawler_logic';
|
||||
|
||||
export interface StartSyncArgs {
|
||||
connectorId: string;
|
||||
nextSyncConfig?: CrawlRequestOverrides;
|
||||
}
|
||||
|
||||
export const startSync = async ({ connectorId, nextSyncConfig }: StartSyncArgs) => {
|
||||
export const startSync = async ({ connectorId }: StartSyncArgs) => {
|
||||
const route = `/internal/enterprise_search/connectors/${connectorId}/start_sync`;
|
||||
return await HttpLogic.values.http.post(route, {
|
||||
// ConnectorConfiguration type is a record of key-value pair where value is a string
|
||||
// To store nextSyncConfig into ConnectorConfiguration the object should be casted to string
|
||||
body: JSON.stringify({ nextSyncConfig: JSON.stringify(nextSyncConfig) }),
|
||||
});
|
||||
return await HttpLogic.values.http.post(route);
|
||||
};
|
||||
|
||||
export const StartSyncApiLogic = createApiLogic(['start_sync_api_logic'], startSync, {
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CrawlConfig,
|
||||
CrawlConfigFromServer,
|
||||
CrawlerStatus,
|
||||
CrawlEvent,
|
||||
CrawlEventFromServer,
|
||||
CrawlRequest,
|
||||
CrawlRequestFromServer,
|
||||
CrawlRequestStats,
|
||||
CrawlRequestStatsFromServer,
|
||||
CrawlRequestWithDetails,
|
||||
CrawlRequestWithDetailsFromServer,
|
||||
CrawlType,
|
||||
} from '../types';
|
||||
|
||||
// Server
|
||||
|
||||
export const CRAWL_CONFIG_FROM_SERVER: CrawlConfigFromServer = {
|
||||
domain_allowlist: ['https://elastic.co'],
|
||||
max_crawl_depth: 2,
|
||||
seed_urls: ['https://elastic.co/guide', 'https://elastic.co/blogs'],
|
||||
sitemap_urls: ['https://elastic.co/sitemap.txt'],
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST_FROM_SERVER: CrawlRequestFromServer = {
|
||||
began_at: '1657235281',
|
||||
completed_at: '1657235291',
|
||||
created_at: '1657235271',
|
||||
id: 'crawl-request-1',
|
||||
status: CrawlerStatus.Success,
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST_STATS_FROM_SERVER: CrawlRequestStatsFromServer = {
|
||||
status: {
|
||||
avg_response_time_msec: 100,
|
||||
crawl_duration_msec: 5000,
|
||||
pages_visited: 20,
|
||||
status_codes: {
|
||||
'200': 20,
|
||||
},
|
||||
urls_allowed: 10,
|
||||
},
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER: CrawlRequestWithDetailsFromServer = {
|
||||
...CRAWL_REQUEST_FROM_SERVER,
|
||||
crawl_config: CRAWL_CONFIG_FROM_SERVER,
|
||||
stats: CRAWL_REQUEST_STATS_FROM_SERVER,
|
||||
type: CrawlType.Full,
|
||||
};
|
||||
|
||||
export const CRAWL_EVENT_FROM_SERVER: CrawlEventFromServer = {
|
||||
...CRAWL_REQUEST_FROM_SERVER,
|
||||
crawl_config: CRAWL_CONFIG_FROM_SERVER,
|
||||
id: 'crawl-event-1',
|
||||
stage: 'crawl',
|
||||
type: CrawlType.Full,
|
||||
};
|
||||
|
||||
// Client
|
||||
|
||||
export const CRAWL_CONFIG: CrawlConfig = {
|
||||
domainAllowlist: ['https://elastic.co'],
|
||||
maxCrawlDepth: 2,
|
||||
seedUrls: ['https://elastic.co/guide', 'https://elastic.co/blogs'],
|
||||
sitemapUrls: ['https://elastic.co/sitemap.txt'],
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST: CrawlRequest = {
|
||||
beganAt: '1657235281',
|
||||
completedAt: '1657235291',
|
||||
createdAt: '1657235271',
|
||||
id: 'crawl-request-1',
|
||||
status: CrawlerStatus.Success,
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST_STATS: CrawlRequestStats = {
|
||||
status: {
|
||||
avgResponseTimeMSec: 100,
|
||||
crawlDurationMSec: 5000,
|
||||
pagesVisited: 20,
|
||||
statusCodes: {
|
||||
'200': 20,
|
||||
},
|
||||
urlsAllowed: 10,
|
||||
},
|
||||
};
|
||||
|
||||
export const CRAWL_REQUEST_WITH_DETAILS: CrawlRequestWithDetails = {
|
||||
...CRAWL_REQUEST,
|
||||
crawlConfig: CRAWL_CONFIG,
|
||||
stats: CRAWL_REQUEST_STATS,
|
||||
type: CrawlType.Full,
|
||||
};
|
||||
|
||||
export const CRAWL_EVENT: CrawlEvent = {
|
||||
...CRAWL_REQUEST,
|
||||
crawlConfig: CRAWL_CONFIG,
|
||||
id: 'crawl-event-1',
|
||||
stage: 'crawl',
|
||||
type: CrawlType.Full,
|
||||
};
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CRAWL_EVENT,
|
||||
CRAWL_EVENT_FROM_SERVER,
|
||||
CRAWL_REQUEST,
|
||||
CRAWL_REQUEST_FROM_SERVER,
|
||||
} from './crawl_events.mock';
|
||||
import { CRAWLER_DOMAIN, CRAWLER_DOMAIN_FROM_SERVER } from './crawler_domains.mock';
|
||||
|
||||
import { CrawlerData, CrawlerDataFromServer } from '../types';
|
||||
|
||||
export const CRAWLER_DATA: CrawlerData = {
|
||||
domains: [CRAWLER_DOMAIN],
|
||||
events: [CRAWL_EVENT],
|
||||
mostRecentCrawlRequest: CRAWL_REQUEST,
|
||||
userAgent: 'Elastic Crawler (0.0.1)',
|
||||
};
|
||||
|
||||
export const CRAWLER_DATA_FROM_SERVER: CrawlerDataFromServer = {
|
||||
domains: [CRAWLER_DOMAIN_FROM_SERVER],
|
||||
events: [CRAWL_EVENT_FROM_SERVER],
|
||||
most_recent_crawl_request: CRAWL_REQUEST_FROM_SERVER,
|
||||
user_agent: 'Elastic Crawler (0.0.1)',
|
||||
};
|
|
@ -1,114 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Meta } from '../../../../../../common/types';
|
||||
|
||||
import {
|
||||
CrawlerDomain,
|
||||
CrawlerPolicies,
|
||||
CrawlRule,
|
||||
CrawlerRules,
|
||||
EntryPoint,
|
||||
Sitemap,
|
||||
CrawlerDomainFromServer,
|
||||
DomainConfigFromServer,
|
||||
DomainConfig,
|
||||
CrawlerDomainsWithMeta,
|
||||
CrawlerDomainsWithMetaFromServer,
|
||||
} from '../types';
|
||||
|
||||
export const CRAWL_RULE: CrawlRule = {
|
||||
id: 'crawl-rule-1',
|
||||
pattern: 'elasticsearch',
|
||||
policy: CrawlerPolicies.allow,
|
||||
rule: CrawlerRules.contains,
|
||||
};
|
||||
|
||||
export const ENTRY_POINT: EntryPoint = {
|
||||
id: 'entry-point-1',
|
||||
value: '/guide',
|
||||
};
|
||||
|
||||
export const SITEMAP: Sitemap = {
|
||||
id: 'sitemap-1',
|
||||
url: '/sitemap.txt',
|
||||
};
|
||||
|
||||
export const META: Meta = {
|
||||
page: {
|
||||
current: 1,
|
||||
size: 10,
|
||||
total_pages: 1,
|
||||
total_results: 8,
|
||||
},
|
||||
};
|
||||
|
||||
// Server
|
||||
|
||||
export const CRAWLER_DOMAIN_CONFIG_FROM_SERVER: DomainConfigFromServer = {
|
||||
id: 'crawler-domain-config-1',
|
||||
name: 'https://www.elastic.co',
|
||||
seed_urls: ['https://www.elastic.co/guide', 'https://www.elastic.co/blogs'],
|
||||
sitemap_urls: ['https://www.elastic.co/sitemap.txt'],
|
||||
};
|
||||
|
||||
export const CRAWLER_DOMAIN_FROM_SERVER: CrawlerDomainFromServer = {
|
||||
auth: {
|
||||
type: 'basic',
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
},
|
||||
available_deduplication_fields: ['title', 'url'],
|
||||
crawl_rules: [CRAWL_RULE],
|
||||
created_on: '1657234422',
|
||||
deduplication_enabled: true,
|
||||
deduplication_fields: ['url'],
|
||||
document_count: 400,
|
||||
entry_points: [ENTRY_POINT],
|
||||
extraction_rules: [],
|
||||
id: '123abc',
|
||||
name: 'https://www.elastic.co',
|
||||
sitemaps: [SITEMAP],
|
||||
};
|
||||
|
||||
export const CRAWLER_DOMAINS_WITH_META_FROM_SERVER: CrawlerDomainsWithMetaFromServer = {
|
||||
meta: META,
|
||||
results: [CRAWLER_DOMAIN_FROM_SERVER],
|
||||
};
|
||||
|
||||
// Client
|
||||
|
||||
export const CRAWLER_DOMAIN_CONFIG: DomainConfig = {
|
||||
id: 'crawler-domain-config-1',
|
||||
name: 'https://www.elastic.co',
|
||||
seedUrls: ['https://www.elastic.co/guide', 'https://www.elastic.co/blogs'],
|
||||
sitemapUrls: ['https://www.elastic.co/sitemap.txt'],
|
||||
};
|
||||
|
||||
export const CRAWLER_DOMAIN: CrawlerDomain = {
|
||||
auth: {
|
||||
type: 'basic',
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
},
|
||||
availableDeduplicationFields: ['title', 'url'],
|
||||
crawlRules: [CRAWL_RULE],
|
||||
createdOn: '1657234422',
|
||||
deduplicationEnabled: true,
|
||||
deduplicationFields: ['url'],
|
||||
documentCount: 400,
|
||||
entryPoints: [ENTRY_POINT],
|
||||
extractionRules: [],
|
||||
id: '123abc',
|
||||
sitemaps: [SITEMAP],
|
||||
url: 'https://www.elastic.co',
|
||||
};
|
||||
|
||||
export const CRAWLER_DOMAINS_WITH_META: CrawlerDomainsWithMeta = {
|
||||
domains: [CRAWLER_DOMAIN],
|
||||
meta: META,
|
||||
};
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { createCrawlerIndex } from './create_crawler_index_api_logic';
|
||||
|
||||
describe('CreateCrawlerIndexApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('createCrawlerIndex', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
const language = 'Universal';
|
||||
http.post.mockReturnValue(Promise.resolve({ created: indexName }));
|
||||
|
||||
const result = createCrawlerIndex({ indexName, language });
|
||||
await nextTick();
|
||||
|
||||
expect(http.post).toHaveBeenCalledWith('/internal/enterprise_search/crawler', {
|
||||
body: JSON.stringify({ index_name: indexName, language }),
|
||||
});
|
||||
await expect(result).resolves.toEqual({ created: indexName });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
import { LanguageForOptimization } from '../../components/new_index/types';
|
||||
|
||||
export interface CreateCrawlerIndexArgs {
|
||||
indexName: string;
|
||||
language: LanguageForOptimization;
|
||||
}
|
||||
|
||||
interface CreateCrawlerIndexRequest {
|
||||
index_name: string;
|
||||
language: LanguageForOptimization;
|
||||
}
|
||||
|
||||
export interface CreateCrawlerIndexResponse {
|
||||
created: string; // the name of the newly created index
|
||||
}
|
||||
|
||||
export const createCrawlerIndex = async ({ indexName, language }: CreateCrawlerIndexArgs) => {
|
||||
const route = '/internal/enterprise_search/crawler';
|
||||
|
||||
const params: CreateCrawlerIndexRequest = {
|
||||
index_name: indexName,
|
||||
language,
|
||||
};
|
||||
|
||||
return await HttpLogic.values.http.post<CreateCrawlerIndexResponse>(route, {
|
||||
body: JSON.stringify(params),
|
||||
});
|
||||
};
|
||||
|
||||
export const CreateCrawlerIndexApiLogic = createApiLogic(
|
||||
['create_crawler_index_api_logic'],
|
||||
createCrawlerIndex
|
||||
);
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { CRAWLER_DOMAIN } from './_mocks_/crawler_domains.mock';
|
||||
import { deleteCrawlerDomain } from './delete_crawler_domain_api_logic';
|
||||
|
||||
describe('DeleteCrawlerDomainApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('deleteCrawlerDomain', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
http.post.mockReturnValue(Promise.resolve());
|
||||
|
||||
const result = deleteCrawlerDomain({ domain: CRAWLER_DOMAIN, indexName });
|
||||
await nextTick();
|
||||
expect(http.delete).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/elastic-co-crawler/crawler/domains/${CRAWLER_DOMAIN.id}`
|
||||
);
|
||||
await expect(result).resolves.toEqual({ domain: CRAWLER_DOMAIN });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
import { CrawlerDomain } from './types';
|
||||
|
||||
export interface DeleteCrawlerDomainArgs {
|
||||
domain: CrawlerDomain;
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export interface DeleteCrawlerDomainResponse {
|
||||
domain: CrawlerDomain;
|
||||
}
|
||||
|
||||
export const deleteCrawlerDomain = async ({
|
||||
domain,
|
||||
indexName,
|
||||
}: DeleteCrawlerDomainArgs): Promise<DeleteCrawlerDomainResponse> => {
|
||||
await HttpLogic.values.http.delete(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}`
|
||||
);
|
||||
|
||||
return {
|
||||
domain,
|
||||
};
|
||||
};
|
||||
|
||||
export const DeleteCrawlerDomainApiLogic = createApiLogic(
|
||||
['delete_crawler_domain'],
|
||||
deleteCrawlerDomain,
|
||||
{
|
||||
showSuccessFlashFn: ({ domain }) =>
|
||||
i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.action.delete.successMessage', {
|
||||
defaultMessage: "Successfully deleted domain ''{domainUrl}''",
|
||||
values: {
|
||||
domainUrl: domain.url,
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { addExtractionRule } from './add_extraction_rule_api_logic';
|
||||
|
||||
describe('AddExtractionRuleApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('addExtractionRule', () => {
|
||||
it('calls correct api', async () => {
|
||||
const domainId = 'domain-id';
|
||||
const indexName = 'elastic-crawler';
|
||||
const rule = { rules: 'fake' } as any;
|
||||
http.post.mockReturnValue(Promise.resolve('result'));
|
||||
|
||||
const result = addExtractionRule({
|
||||
domainId,
|
||||
indexName,
|
||||
rule,
|
||||
});
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules`,
|
||||
{ body: JSON.stringify({ extraction_rule: rule }) }
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
ExtractionRule,
|
||||
ExtractionRuleBase,
|
||||
} from '../../../../../../common/types/extraction_rules';
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { createApiLogic } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
|
||||
export interface AddExtractionRuleArgs {
|
||||
domainId: string;
|
||||
indexName: string;
|
||||
rule: ExtractionRuleBase;
|
||||
}
|
||||
|
||||
export interface AddExtractionRuleResponse {
|
||||
extraction_rules: ExtractionRule[];
|
||||
}
|
||||
|
||||
export const addExtractionRule = async ({
|
||||
domainId,
|
||||
indexName,
|
||||
rule: { description, rules, url_filters: urlFilters },
|
||||
}: AddExtractionRuleArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules`;
|
||||
|
||||
const params = {
|
||||
extraction_rule: {
|
||||
description,
|
||||
rules,
|
||||
url_filters: urlFilters,
|
||||
},
|
||||
};
|
||||
|
||||
return await HttpLogic.values.http.post<AddExtractionRuleResponse>(route, {
|
||||
body: JSON.stringify(params),
|
||||
});
|
||||
};
|
||||
|
||||
export const AddExtractionRuleApiLogic = createApiLogic(
|
||||
['add_extraction_rule_api_logic'],
|
||||
addExtractionRule
|
||||
);
|
||||
|
||||
export type AddExtractionRuleActions = Actions<AddExtractionRuleArgs, AddExtractionRuleResponse>;
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { deleteExtractionRule } from './delete_extraction_rule_api_logic';
|
||||
|
||||
describe('DeleteExtractionRuleApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('deleteExtractionRule', () => {
|
||||
it('calls correct api', async () => {
|
||||
const domainId = 'domain-id';
|
||||
const indexName = 'elastic-crawler';
|
||||
const extractionRuleId = 'extraction_rule_id';
|
||||
http.delete.mockReturnValue(Promise.resolve('result'));
|
||||
|
||||
const result = deleteExtractionRule({
|
||||
domainId,
|
||||
extractionRuleId,
|
||||
indexName,
|
||||
});
|
||||
expect(http.delete).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules/${extractionRuleId}`
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ExtractionRule } from '../../../../../../common/types/extraction_rules';
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { createApiLogic } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
|
||||
export interface DeleteExtractionRuleArgs {
|
||||
domainId: string;
|
||||
extractionRuleId: string;
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export interface DeleteExtractionRuleResponse {
|
||||
extraction_rules: ExtractionRule[];
|
||||
}
|
||||
|
||||
export const deleteExtractionRule = async ({
|
||||
domainId,
|
||||
extractionRuleId,
|
||||
indexName,
|
||||
}: DeleteExtractionRuleArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules/${extractionRuleId}`;
|
||||
|
||||
return await HttpLogic.values.http.delete<DeleteExtractionRuleResponse>(route);
|
||||
};
|
||||
|
||||
export const DeleteExtractionRuleApiLogic = createApiLogic(
|
||||
['delete_extraction_rule_api_logic'],
|
||||
deleteExtractionRule
|
||||
);
|
||||
|
||||
export type DeleteExtractionRuleActions = Actions<
|
||||
DeleteExtractionRuleArgs,
|
||||
DeleteExtractionRuleResponse
|
||||
>;
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { fetchExtractionRules } from './fetch_extraction_rules_api_logic';
|
||||
|
||||
describe('FetchExtractionRuleApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('fetchExtractionRule', () => {
|
||||
it('calls correct api', async () => {
|
||||
const domainId = 'domain-id';
|
||||
const indexName = 'elastic-crawler';
|
||||
http.get.mockReturnValue(Promise.resolve('result'));
|
||||
|
||||
const result = fetchExtractionRules({
|
||||
domainId,
|
||||
indexName,
|
||||
});
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules`
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ExtractionRule } from '../../../../../../common/types/extraction_rules';
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { createApiLogic } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
|
||||
export interface FetchExtractionRulesArgs {
|
||||
domainId: string;
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export interface FetchExtractionRulesResponse {
|
||||
extraction_rules: ExtractionRule[];
|
||||
}
|
||||
|
||||
export const fetchExtractionRules = async ({ domainId, indexName }: FetchExtractionRulesArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules`;
|
||||
|
||||
return await HttpLogic.values.http.get<FetchExtractionRulesResponse>(route);
|
||||
};
|
||||
|
||||
export const FetchExtractionRulesApiLogic = createApiLogic(
|
||||
['fetch_extraction_rule_api_logic'],
|
||||
fetchExtractionRules
|
||||
);
|
||||
|
||||
export type FetchExtractionRulesActions = Actions<
|
||||
FetchExtractionRulesArgs,
|
||||
FetchExtractionRulesResponse
|
||||
>;
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { updateExtractionRule } from './update_extraction_rule_api_logic';
|
||||
|
||||
describe('UpdateExtractionRuleApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('updateExtractionRule', () => {
|
||||
it('calls correct api', async () => {
|
||||
const domainId = 'domain-id';
|
||||
const extractionRuleId = 'extraction_rule_id';
|
||||
const indexName = 'elastic-crawler';
|
||||
const rule = {
|
||||
description: 'haha',
|
||||
id: extractionRuleId,
|
||||
rules: ['a'],
|
||||
url_filters: ['b'],
|
||||
} as any;
|
||||
http.put.mockReturnValue(Promise.resolve('result'));
|
||||
|
||||
const result = updateExtractionRule({
|
||||
domainId,
|
||||
indexName,
|
||||
rule,
|
||||
});
|
||||
expect(http.put).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules/${extractionRuleId}`,
|
||||
{
|
||||
body: JSON.stringify({
|
||||
extraction_rule: { description: 'haha', rules: ['a'], url_filters: ['b'] },
|
||||
}),
|
||||
}
|
||||
);
|
||||
await expect(result).resolves.toEqual('result');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
ExtractionRule,
|
||||
ExtractionRuleBase,
|
||||
} from '../../../../../../common/types/extraction_rules';
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { createApiLogic } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
|
||||
export interface UpdateExtractionRuleArgs {
|
||||
domainId: string;
|
||||
indexName: string;
|
||||
rule: ExtractionRule;
|
||||
}
|
||||
|
||||
export interface UpdateExtractionRuleResponse {
|
||||
extraction_rules: ExtractionRule[];
|
||||
}
|
||||
|
||||
export const updateExtractionRule = async ({
|
||||
domainId,
|
||||
indexName,
|
||||
rule,
|
||||
}: UpdateExtractionRuleArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/extraction_rules/${rule.id}`;
|
||||
|
||||
const params: { extraction_rule: ExtractionRuleBase } = {
|
||||
extraction_rule: {
|
||||
description: rule.description,
|
||||
rules: rule.rules,
|
||||
url_filters: rule.url_filters,
|
||||
},
|
||||
};
|
||||
|
||||
return await HttpLogic.values.http.put<UpdateExtractionRuleResponse>(route, {
|
||||
body: JSON.stringify(params),
|
||||
});
|
||||
};
|
||||
|
||||
export const UpdateExtractionRuleApiLogic = createApiLogic(
|
||||
['update_extraction_rule_api_logic'],
|
||||
updateExtractionRule
|
||||
);
|
||||
|
||||
export type UpdateExtractionRuleActions = Actions<
|
||||
UpdateExtractionRuleArgs,
|
||||
UpdateExtractionRuleResponse
|
||||
>;
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { CRAWLER_DATA_FROM_SERVER } from './_mocks_/crawler.mock';
|
||||
import { getCrawler } from './get_crawler_api_logic';
|
||||
import { crawlerDataServerToClient } from './utils';
|
||||
|
||||
describe('GetCrawlerApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('getCrawler', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
http.get.mockReturnValue(Promise.resolve(CRAWLER_DATA_FROM_SERVER));
|
||||
|
||||
const result = getCrawler({ indexName });
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler`
|
||||
);
|
||||
await expect(result).resolves.toEqual(crawlerDataServerToClient(CRAWLER_DATA_FROM_SERVER));
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
import { CrawlerData, CrawlerDataFromServer } from './types';
|
||||
|
||||
import { crawlerDataServerToClient } from './utils';
|
||||
|
||||
export interface GetCrawlerArgs {
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export const getCrawler = async ({ indexName }: GetCrawlerArgs): Promise<CrawlerData> => {
|
||||
const response = await HttpLogic.values.http.get<CrawlerDataFromServer>(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler`
|
||||
);
|
||||
|
||||
return crawlerDataServerToClient(response);
|
||||
};
|
||||
|
||||
export const GetCrawlerApiLogic = createApiLogic(['get_crawler_domain'], getCrawler);
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { CRAWLER_DOMAIN_FROM_SERVER } from './_mocks_/crawler_domains.mock';
|
||||
import { getCrawlerDomain } from './get_crawler_domain_api_logic';
|
||||
import { crawlerDomainServerToClient } from './utils';
|
||||
|
||||
describe('GetCrawlerDomainApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('getCrawlerDomain', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
const domainId = CRAWLER_DOMAIN_FROM_SERVER.id;
|
||||
|
||||
http.get.mockReturnValue(Promise.resolve(CRAWLER_DOMAIN_FROM_SERVER));
|
||||
|
||||
const result = getCrawlerDomain({ domainId, indexName });
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
|
||||
);
|
||||
await expect(result).resolves.toEqual(
|
||||
crawlerDomainServerToClient(CRAWLER_DOMAIN_FROM_SERVER)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
import { CrawlerDomain, CrawlerDomainFromServer } from './types';
|
||||
|
||||
import { crawlerDomainServerToClient } from './utils';
|
||||
|
||||
export interface GetCrawlerDomainArgs {
|
||||
domainId: string;
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export const getCrawlerDomain = async ({
|
||||
indexName,
|
||||
domainId,
|
||||
}: GetCrawlerDomainArgs): Promise<CrawlerDomain> => {
|
||||
const response = await HttpLogic.values.http.get<CrawlerDomainFromServer>(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
|
||||
);
|
||||
|
||||
return crawlerDomainServerToClient(response);
|
||||
};
|
||||
|
||||
export const GetCrawlerDomainApiLogic = createApiLogic(['get_crawler_domain'], getCrawlerDomain);
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { CRAWLER_DOMAINS_WITH_META_FROM_SERVER, META } from './_mocks_/crawler_domains.mock';
|
||||
import { getCrawlerDomains } from './get_crawler_domains_api_logic';
|
||||
import { crawlerDomainsWithMetaServerToClient } from './utils';
|
||||
|
||||
describe('GetCrawlerDomainsApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('getCrawlerDomains', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
|
||||
http.get.mockReturnValue(Promise.resolve(CRAWLER_DOMAINS_WITH_META_FROM_SERVER));
|
||||
|
||||
const result = getCrawlerDomains({ indexName, meta: META });
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains`,
|
||||
{
|
||||
query: {
|
||||
'page[current]': META.page.current,
|
||||
'page[size]': META.page.size,
|
||||
},
|
||||
}
|
||||
);
|
||||
await expect(result).resolves.toEqual(
|
||||
crawlerDomainsWithMetaServerToClient(CRAWLER_DOMAINS_WITH_META_FROM_SERVER)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Meta } from '../../../../../common/types';
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
import { CrawlerDomainsWithMetaFromServer } from './types';
|
||||
|
||||
import { crawlerDomainsWithMetaServerToClient } from './utils';
|
||||
|
||||
export interface GetCrawlerDomainsArgs {
|
||||
indexName: string;
|
||||
meta: Meta;
|
||||
}
|
||||
|
||||
export const getCrawlerDomains = async ({ indexName, meta }: GetCrawlerDomainsArgs) => {
|
||||
const query = {
|
||||
'page[current]': meta.page.current,
|
||||
'page[size]': meta.page.size,
|
||||
};
|
||||
|
||||
const response = await HttpLogic.values.http.get<CrawlerDomainsWithMetaFromServer>(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/domains`,
|
||||
{
|
||||
query,
|
||||
}
|
||||
);
|
||||
|
||||
return crawlerDomainsWithMetaServerToClient(response);
|
||||
};
|
||||
|
||||
export const GetCrawlerDomainsApiLogic = createApiLogic(['get_crawler_domains'], getCrawlerDomains);
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { recreateCrawlerConnector } from './recreate_crawler_connector_api_logic';
|
||||
|
||||
describe('CreateCrawlerIndexApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('createCrawlerIndex', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
http.post.mockReturnValue(Promise.resolve({ connector_id: 'connectorId' }));
|
||||
|
||||
const result = recreateCrawlerConnector({ indexName });
|
||||
await nextTick();
|
||||
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
'/internal/enterprise_search/indices/elastic-co-crawler/crawler/connector'
|
||||
);
|
||||
await expect(result).resolves.toEqual({ connector_id: 'connectorId' });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Actions, createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
export interface RecreateCrawlerConnectorArgs {
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export interface RecreateCrawlerConnectorResponse {
|
||||
created: string; // the name of the newly created index
|
||||
}
|
||||
|
||||
export const recreateCrawlerConnector = async ({ indexName }: RecreateCrawlerConnectorArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/connector`;
|
||||
|
||||
return await HttpLogic.values.http.post<RecreateCrawlerConnectorResponse>(route);
|
||||
};
|
||||
|
||||
export const RecreateCrawlerConnectorApiLogic = createApiLogic(
|
||||
['recreate_crawler_connector_api_logic'],
|
||||
recreateCrawlerConnector
|
||||
);
|
||||
|
||||
export type RecreateCrawlerConnectorActions = Actions<
|
||||
RecreateCrawlerConnectorArgs,
|
||||
RecreateCrawlerConnectorResponse
|
||||
>;
|
|
@ -1,294 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Meta } from '../../../../../common/types';
|
||||
import { CrawlerStatus } from '../../../../../common/types/crawler';
|
||||
import { ExtractionRule } from '../../../../../common/types/extraction_rules';
|
||||
|
||||
// TODO remove this proxy export, which will affect a lot of files
|
||||
export { CrawlerStatus };
|
||||
|
||||
export enum CrawlerPolicies {
|
||||
allow = 'allow',
|
||||
deny = 'deny',
|
||||
}
|
||||
|
||||
export enum CrawlerRules {
|
||||
beginsWith = 'begins',
|
||||
endsWith = 'ends',
|
||||
contains = 'contains',
|
||||
regex = 'regex',
|
||||
}
|
||||
|
||||
export interface CrawlRule {
|
||||
id: string;
|
||||
pattern: string;
|
||||
policy: CrawlerPolicies;
|
||||
rule: CrawlerRules;
|
||||
}
|
||||
|
||||
export interface EntryPoint {
|
||||
id: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface Sitemap {
|
||||
id: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export type CrawlerDomainValidationStepState = '' | 'loading' | 'valid' | 'warning' | 'invalid';
|
||||
|
||||
// The BE uses a singular form of each unit
|
||||
// See shared_togo/app/models/shared_togo/crawler/crawl_schedule.rb
|
||||
export enum CrawlUnits {
|
||||
hours = 'hour',
|
||||
days = 'day',
|
||||
weeks = 'week',
|
||||
months = 'month',
|
||||
}
|
||||
|
||||
export type CrawlerDomainValidationStepName =
|
||||
| 'initialValidation'
|
||||
| 'networkConnectivity'
|
||||
| 'indexingRestrictions'
|
||||
| 'contentVerification';
|
||||
|
||||
export type CrawlEventStage = 'crawl' | 'process';
|
||||
|
||||
export enum CrawlType {
|
||||
Full = 'full',
|
||||
Partial = 'partial',
|
||||
}
|
||||
|
||||
export interface BasicCrawlerAuth {
|
||||
password: string;
|
||||
type: 'basic';
|
||||
username: string;
|
||||
}
|
||||
|
||||
export interface RawCrawlerAuth {
|
||||
header: string;
|
||||
type: 'raw';
|
||||
}
|
||||
|
||||
export type CrawlerAuth = BasicCrawlerAuth | RawCrawlerAuth | null;
|
||||
|
||||
// Server
|
||||
|
||||
export interface CrawlerDomainFromServer {
|
||||
auth: CrawlerAuth;
|
||||
available_deduplication_fields: string[];
|
||||
crawl_rules: CrawlRule[];
|
||||
created_on: string;
|
||||
deduplication_enabled: boolean;
|
||||
deduplication_fields: string[];
|
||||
default_crawl_rule?: CrawlRule;
|
||||
document_count: number;
|
||||
entry_points: EntryPoint[];
|
||||
extraction_rules: ExtractionRule[];
|
||||
id: string;
|
||||
last_visited_at?: string;
|
||||
name: string;
|
||||
sitemaps: Sitemap[];
|
||||
}
|
||||
|
||||
export interface CrawlerDomainsWithMetaFromServer {
|
||||
meta: Meta;
|
||||
results: CrawlerDomainFromServer[];
|
||||
}
|
||||
|
||||
export interface CrawlerDataFromServer {
|
||||
domains: CrawlerDomainFromServer[];
|
||||
events: CrawlEventFromServer[];
|
||||
most_recent_crawl_request: CrawlRequestFromServer | null;
|
||||
user_agent: string;
|
||||
}
|
||||
|
||||
export interface CrawlerDomainValidationResultFromServer {
|
||||
results: Array<{
|
||||
comment: string;
|
||||
name: string;
|
||||
result: 'ok' | 'warning' | 'failure';
|
||||
}>;
|
||||
valid: boolean;
|
||||
}
|
||||
|
||||
export interface CrawlRequestFromServer {
|
||||
began_at: string | null;
|
||||
completed_at: string | null;
|
||||
created_at: string;
|
||||
id: string;
|
||||
status: CrawlerStatus;
|
||||
}
|
||||
|
||||
export interface CrawlRequestStatsFromServer {
|
||||
status: {
|
||||
avg_response_time_msec?: number;
|
||||
crawl_duration_msec?: number;
|
||||
pages_visited?: number;
|
||||
status_codes?: {
|
||||
[code: string]: number;
|
||||
};
|
||||
urls_allowed?: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface CrawlConfigFromServer {
|
||||
domain_allowlist: string[];
|
||||
max_crawl_depth: number;
|
||||
seed_urls: string[];
|
||||
sitemap_urls: string[];
|
||||
}
|
||||
|
||||
export type CrawlRequestWithDetailsFromServer = CrawlRequestFromServer & {
|
||||
crawl_config: CrawlConfigFromServer;
|
||||
stats: CrawlRequestStatsFromServer;
|
||||
type: CrawlType;
|
||||
};
|
||||
|
||||
export type CrawlEventFromServer = CrawlRequestFromServer & {
|
||||
crawl_config: CrawlConfigFromServer;
|
||||
stage: CrawlEventStage;
|
||||
type: CrawlType;
|
||||
};
|
||||
|
||||
export interface DomainConfigFromServer {
|
||||
id: string;
|
||||
name: string;
|
||||
seed_urls: string[];
|
||||
sitemap_urls: string[];
|
||||
}
|
||||
|
||||
export interface CrawlScheduleFromServer {
|
||||
frequency: number;
|
||||
unit: CrawlUnits;
|
||||
use_connector_schedule: boolean;
|
||||
}
|
||||
|
||||
// Client
|
||||
|
||||
export interface CrawlerCustomSchedule {
|
||||
scheduleKey: string;
|
||||
name: string;
|
||||
customEntryPointUrls: string[];
|
||||
customSitemapUrls: string[];
|
||||
includeSitemapsInRobotsTxt: boolean;
|
||||
maxCrawlDepth: number;
|
||||
selectedDomainUrls: string[];
|
||||
selectedEntryPointUrls: string[];
|
||||
selectedSitemapUrls: string[];
|
||||
interval: string; // interval has crontab syntax
|
||||
enabled: boolean;
|
||||
entryPointUrls: string[];
|
||||
sitemapUrls: string[];
|
||||
}
|
||||
|
||||
export enum CustomCrawlType {
|
||||
ONE_TIME = 'one-time',
|
||||
MULTIPLE = 'multiple',
|
||||
}
|
||||
|
||||
export interface CrawlerDomain {
|
||||
auth: CrawlerAuth;
|
||||
availableDeduplicationFields: string[];
|
||||
crawlRules: CrawlRule[];
|
||||
createdOn: string;
|
||||
deduplicationEnabled: boolean;
|
||||
deduplicationFields: string[];
|
||||
defaultCrawlRule?: CrawlRule;
|
||||
documentCount: number;
|
||||
entryPoints: EntryPoint[];
|
||||
extractionRules: ExtractionRule[];
|
||||
id: string;
|
||||
lastCrawl?: string;
|
||||
sitemaps: Sitemap[];
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface CrawlerDomainsWithMeta {
|
||||
domains: CrawlerDomain[];
|
||||
meta: Meta;
|
||||
}
|
||||
|
||||
export interface CrawlerData {
|
||||
domains: CrawlerDomain[];
|
||||
events: CrawlEvent[];
|
||||
mostRecentCrawlRequest: CrawlRequest | null;
|
||||
userAgent: string;
|
||||
}
|
||||
|
||||
export interface CrawlerDomainValidationStep {
|
||||
blockingFailure?: boolean;
|
||||
message?: string;
|
||||
state: CrawlerDomainValidationStepState;
|
||||
}
|
||||
|
||||
interface CrawlerDomainValidationState {
|
||||
contentVerification: CrawlerDomainValidationStep;
|
||||
indexingRestrictions: CrawlerDomainValidationStep;
|
||||
initialValidation: CrawlerDomainValidationStep;
|
||||
networkConnectivity: CrawlerDomainValidationStep;
|
||||
}
|
||||
|
||||
export interface CrawlerDomainValidationResult {
|
||||
steps: CrawlerDomainValidationState;
|
||||
}
|
||||
|
||||
export type CrawlerDomainValidationResultChange = Partial<CrawlerDomainValidationState>;
|
||||
|
||||
export interface CrawlRequest {
|
||||
beganAt: string | null;
|
||||
completedAt: string | null;
|
||||
createdAt: string;
|
||||
id: string;
|
||||
status: CrawlerStatus;
|
||||
}
|
||||
|
||||
export interface CrawlRequestStats {
|
||||
status: {
|
||||
avgResponseTimeMSec?: number;
|
||||
crawlDurationMSec?: number;
|
||||
pagesVisited?: number;
|
||||
statusCodes?: {
|
||||
[code: string]: number;
|
||||
};
|
||||
urlsAllowed?: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface CrawlConfig {
|
||||
domainAllowlist: string[];
|
||||
maxCrawlDepth: number;
|
||||
seedUrls: string[];
|
||||
sitemapUrls: string[];
|
||||
}
|
||||
|
||||
export type CrawlRequestWithDetails = CrawlRequest & {
|
||||
crawlConfig: CrawlConfig;
|
||||
stats: CrawlRequestStats | null;
|
||||
type: CrawlType;
|
||||
};
|
||||
|
||||
export type CrawlEvent = CrawlRequest & {
|
||||
crawlConfig: CrawlConfig;
|
||||
stage: CrawlEventStage;
|
||||
type: CrawlType;
|
||||
};
|
||||
|
||||
export interface CrawlSchedule {
|
||||
frequency: number;
|
||||
unit: CrawlUnits;
|
||||
useConnectorSchedule: boolean;
|
||||
}
|
||||
|
||||
export interface DomainConfig {
|
||||
id: string;
|
||||
name: string;
|
||||
seedUrls: string[];
|
||||
sitemapUrls: string[];
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockHttpValues } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { updateHtmlExtraction } from './update_html_extraction_api_logic';
|
||||
|
||||
describe('UpdateHtmlExtractionApiLogic', () => {
|
||||
const { http } = mockHttpValues;
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('updateHtmlExtraction', () => {
|
||||
it('calls correct api', async () => {
|
||||
const indexName = 'elastic-co-crawler';
|
||||
|
||||
http.get.mockReturnValue(Promise.resolve());
|
||||
|
||||
const result = updateHtmlExtraction({ htmlExtraction: true, indexName });
|
||||
|
||||
expect(http.put).toHaveBeenCalledWith(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/html_extraction`,
|
||||
{
|
||||
body: JSON.stringify({
|
||||
extract_full_html: true,
|
||||
}),
|
||||
}
|
||||
);
|
||||
await expect(result).resolves.toEqual({ htmlExtraction: true });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Actions } from '../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
export interface UpdateHtmlExtractionArgs {
|
||||
htmlExtraction: boolean;
|
||||
indexName: string;
|
||||
}
|
||||
|
||||
export interface UpdateHtmlExtractionResponse {
|
||||
htmlExtraction: boolean;
|
||||
}
|
||||
|
||||
export const updateHtmlExtraction = async ({
|
||||
htmlExtraction,
|
||||
indexName,
|
||||
}: UpdateHtmlExtractionArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/html_extraction`;
|
||||
|
||||
const params = { extract_full_html: htmlExtraction };
|
||||
|
||||
await HttpLogic.values.http.put(route, {
|
||||
body: JSON.stringify(params),
|
||||
});
|
||||
return { htmlExtraction };
|
||||
};
|
||||
|
||||
export const UpdateHtmlExtractionApiLogic = createApiLogic(
|
||||
['update_html_extraction_api_logic'],
|
||||
updateHtmlExtraction
|
||||
);
|
||||
|
||||
export type UpdateHtmlExtractionActions = Actions<
|
||||
UpdateHtmlExtractionArgs,
|
||||
UpdateHtmlExtractionResponse
|
||||
>;
|
|
@ -1,194 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CRAWL_EVENT,
|
||||
CRAWL_EVENT_FROM_SERVER,
|
||||
CRAWL_REQUEST,
|
||||
CRAWL_REQUEST_FROM_SERVER,
|
||||
CRAWL_REQUEST_WITH_DETAILS,
|
||||
CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
|
||||
} from './_mocks_/crawl_events.mock';
|
||||
import { CRAWLER_DATA, CRAWLER_DATA_FROM_SERVER } from './_mocks_/crawler.mock';
|
||||
import {
|
||||
CRAWLER_DOMAIN,
|
||||
CRAWLER_DOMAINS_WITH_META,
|
||||
CRAWLER_DOMAINS_WITH_META_FROM_SERVER,
|
||||
CRAWLER_DOMAIN_CONFIG,
|
||||
CRAWLER_DOMAIN_CONFIG_FROM_SERVER,
|
||||
CRAWLER_DOMAIN_FROM_SERVER,
|
||||
CRAWL_RULE,
|
||||
} from './_mocks_/crawler_domains.mock';
|
||||
|
||||
import { CrawlerDomainValidationStep, CrawlerDomainValidationResultFromServer } from './types';
|
||||
|
||||
import {
|
||||
crawlerDomainServerToClient,
|
||||
crawlerDataServerToClient,
|
||||
crawlDomainValidationToResult,
|
||||
crawlEventServerToClient,
|
||||
crawlRequestServerToClient,
|
||||
crawlRequestWithDetailsServerToClient,
|
||||
domainConfigServerToClient,
|
||||
crawlerDomainsWithMetaServerToClient,
|
||||
} from './utils';
|
||||
|
||||
describe('crawlerDomainServerToClient', () => {
|
||||
it('converts the API payload into properties matching our code style', () => {
|
||||
expect(crawlerDomainServerToClient(CRAWLER_DOMAIN_FROM_SERVER)).toStrictEqual(CRAWLER_DOMAIN);
|
||||
expect(
|
||||
crawlerDomainServerToClient({
|
||||
...CRAWLER_DOMAIN_FROM_SERVER,
|
||||
last_visited_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWLER_DOMAIN, lastCrawl: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
expect(
|
||||
crawlerDomainServerToClient({
|
||||
...CRAWLER_DOMAIN_FROM_SERVER,
|
||||
default_crawl_rule: CRAWL_RULE,
|
||||
})
|
||||
).toStrictEqual({ ...CRAWLER_DOMAIN, defaultCrawlRule: CRAWL_RULE });
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlRequestServerToClient', () => {
|
||||
it('converts the API payload into properties matching our code style', () => {
|
||||
expect(crawlRequestServerToClient(CRAWL_REQUEST_FROM_SERVER)).toStrictEqual(CRAWL_REQUEST);
|
||||
expect(
|
||||
crawlRequestServerToClient({
|
||||
...CRAWL_REQUEST_FROM_SERVER,
|
||||
began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWL_REQUEST, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
expect(
|
||||
crawlRequestServerToClient({
|
||||
...CRAWL_REQUEST_FROM_SERVER,
|
||||
completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWL_REQUEST, completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlRequestWithDetailsServerToClient', () => {
|
||||
it('converts the API payload into properties matching our code style', () => {
|
||||
expect(
|
||||
crawlRequestWithDetailsServerToClient(CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER)
|
||||
).toStrictEqual(CRAWL_REQUEST_WITH_DETAILS);
|
||||
expect(
|
||||
crawlRequestWithDetailsServerToClient({
|
||||
...CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
|
||||
began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWL_REQUEST_WITH_DETAILS, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
expect(
|
||||
crawlRequestWithDetailsServerToClient({
|
||||
...CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
|
||||
completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({
|
||||
...CRAWL_REQUEST_WITH_DETAILS,
|
||||
completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlEventServerToClient', () => {
|
||||
it('converts the API payload into properties matching our code style', () => {
|
||||
expect(crawlEventServerToClient(CRAWL_EVENT_FROM_SERVER)).toStrictEqual(CRAWL_EVENT);
|
||||
expect(
|
||||
crawlEventServerToClient({
|
||||
...CRAWL_EVENT_FROM_SERVER,
|
||||
began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWL_EVENT, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
expect(
|
||||
crawlEventServerToClient({
|
||||
...CRAWL_EVENT_FROM_SERVER,
|
||||
completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
})
|
||||
).toStrictEqual({ ...CRAWL_EVENT, completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlerDataServerToClient', () => {
|
||||
it('converts all data from the server form to their client form', () => {
|
||||
expect(crawlerDataServerToClient(CRAWLER_DATA_FROM_SERVER)).toStrictEqual(CRAWLER_DATA);
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlDomainValidationToResult', () => {
|
||||
it('handles results with warnings', () => {
|
||||
const data: CrawlerDomainValidationResultFromServer = {
|
||||
results: [
|
||||
{
|
||||
comment: 'A warning, not failure',
|
||||
name: '-',
|
||||
result: 'warning',
|
||||
},
|
||||
],
|
||||
valid: true,
|
||||
};
|
||||
|
||||
expect(crawlDomainValidationToResult(data)).toEqual({
|
||||
blockingFailure: false,
|
||||
message: 'A warning, not failure',
|
||||
state: 'warning',
|
||||
} as CrawlerDomainValidationStep);
|
||||
});
|
||||
|
||||
it('handles valid results, without warnings', () => {
|
||||
const data: CrawlerDomainValidationResultFromServer = {
|
||||
results: [
|
||||
{
|
||||
comment: 'Something happened',
|
||||
name: '-',
|
||||
result: 'ok',
|
||||
},
|
||||
],
|
||||
valid: true,
|
||||
};
|
||||
|
||||
expect(crawlDomainValidationToResult(data)).toEqual({
|
||||
state: 'valid',
|
||||
} as CrawlerDomainValidationStep);
|
||||
});
|
||||
|
||||
it('handes invalid results', () => {
|
||||
const data: CrawlerDomainValidationResultFromServer = {
|
||||
results: [
|
||||
{
|
||||
comment: 'Something unexpected happened',
|
||||
name: '-',
|
||||
result: 'failure',
|
||||
},
|
||||
],
|
||||
valid: false,
|
||||
};
|
||||
|
||||
expect(crawlDomainValidationToResult(data)).toEqual({
|
||||
blockingFailure: true,
|
||||
message: 'Something unexpected happened',
|
||||
state: 'invalid',
|
||||
} as CrawlerDomainValidationStep);
|
||||
});
|
||||
});
|
||||
|
||||
describe('domainConfigServerToClient', () => {
|
||||
it('converts the domain config payload into properties matching our code style', () => {
|
||||
expect(domainConfigServerToClient(CRAWLER_DOMAIN_CONFIG_FROM_SERVER)).toEqual(
|
||||
CRAWLER_DOMAIN_CONFIG
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('crawlerDomainsWithMetaServerToClient', () => {
|
||||
it('converts the domain config payload into properties matching our code style', () => {
|
||||
expect(crawlerDomainsWithMetaServerToClient(CRAWLER_DOMAINS_WITH_META_FROM_SERVER)).toEqual(
|
||||
CRAWLER_DOMAINS_WITH_META
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,344 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CrawlerCustomScheduleMappingClient,
|
||||
CrawlerCustomSchedulesServer,
|
||||
CrawlerCustomScheduleClient,
|
||||
CrawlerCustomScheduleConfigOverridesClient,
|
||||
} from '../../../../../common/types/crawler';
|
||||
|
||||
import {
|
||||
CrawlerDomain,
|
||||
CrawlerDomainFromServer,
|
||||
CrawlerData,
|
||||
CrawlerDataFromServer,
|
||||
CrawlerDomainValidationResultFromServer,
|
||||
CrawlerDomainValidationStep,
|
||||
CrawlRequestFromServer,
|
||||
CrawlRequest,
|
||||
CrawlRequestStats,
|
||||
CrawlRequestStatsFromServer,
|
||||
CrawlEventFromServer,
|
||||
CrawlEvent,
|
||||
CrawlConfigFromServer,
|
||||
CrawlConfig,
|
||||
CrawlRequestWithDetailsFromServer,
|
||||
CrawlRequestWithDetails,
|
||||
DomainConfig,
|
||||
DomainConfigFromServer,
|
||||
CrawlerDomainsWithMetaFromServer,
|
||||
CrawlerDomainsWithMeta,
|
||||
BasicCrawlerAuth,
|
||||
CrawlerAuth,
|
||||
RawCrawlerAuth,
|
||||
CrawlScheduleFromServer,
|
||||
CrawlSchedule,
|
||||
CrawlerCustomSchedule,
|
||||
} from './types';
|
||||
|
||||
export function crawlerDomainServerToClient(payload: CrawlerDomainFromServer): CrawlerDomain {
|
||||
const {
|
||||
auth,
|
||||
available_deduplication_fields: availableDeduplicationFields,
|
||||
crawl_rules: crawlRules,
|
||||
created_on: createdOn,
|
||||
deduplication_enabled: deduplicationEnabled,
|
||||
deduplication_fields: deduplicationFields,
|
||||
default_crawl_rule: defaultCrawlRule,
|
||||
document_count: documentCount,
|
||||
entry_points: entryPoints,
|
||||
extraction_rules: extractionRules,
|
||||
id,
|
||||
last_visited_at: lastCrawl,
|
||||
name,
|
||||
sitemaps,
|
||||
} = payload;
|
||||
|
||||
const clientPayload: CrawlerDomain = {
|
||||
auth,
|
||||
availableDeduplicationFields,
|
||||
crawlRules,
|
||||
createdOn,
|
||||
deduplicationEnabled,
|
||||
deduplicationFields,
|
||||
documentCount,
|
||||
entryPoints,
|
||||
extractionRules,
|
||||
id,
|
||||
sitemaps,
|
||||
url: name,
|
||||
};
|
||||
|
||||
if (lastCrawl) {
|
||||
clientPayload.lastCrawl = lastCrawl;
|
||||
}
|
||||
|
||||
if (defaultCrawlRule) {
|
||||
clientPayload.defaultCrawlRule = defaultCrawlRule;
|
||||
}
|
||||
|
||||
return clientPayload;
|
||||
}
|
||||
|
||||
export function crawlRequestStatsServerToClient(
|
||||
crawlStats: CrawlRequestStatsFromServer
|
||||
): CrawlRequestStats {
|
||||
const {
|
||||
status: {
|
||||
avg_response_time_msec: avgResponseTimeMSec,
|
||||
crawl_duration_msec: crawlDurationMSec,
|
||||
pages_visited: pagesVisited,
|
||||
urls_allowed: urlsAllowed,
|
||||
status_codes: statusCodes,
|
||||
},
|
||||
} = crawlStats;
|
||||
|
||||
return {
|
||||
status: {
|
||||
avgResponseTimeMSec,
|
||||
crawlDurationMSec,
|
||||
pagesVisited,
|
||||
statusCodes,
|
||||
urlsAllowed,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlRequestServerToClient(crawlRequest: CrawlRequestFromServer): CrawlRequest {
|
||||
const {
|
||||
id,
|
||||
status,
|
||||
created_at: createdAt,
|
||||
began_at: beganAt,
|
||||
completed_at: completedAt,
|
||||
} = crawlRequest;
|
||||
|
||||
return {
|
||||
beganAt,
|
||||
completedAt,
|
||||
createdAt,
|
||||
id,
|
||||
status,
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlConfigServerToClient(crawlConfig: CrawlConfigFromServer): CrawlConfig {
|
||||
const {
|
||||
domain_allowlist: domainAllowlist,
|
||||
seed_urls: seedUrls,
|
||||
sitemap_urls: sitemapUrls,
|
||||
max_crawl_depth: maxCrawlDepth,
|
||||
} = crawlConfig;
|
||||
|
||||
return {
|
||||
domainAllowlist,
|
||||
maxCrawlDepth,
|
||||
seedUrls,
|
||||
sitemapUrls,
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlEventServerToClient(event: CrawlEventFromServer): CrawlEvent {
|
||||
const {
|
||||
id,
|
||||
stage,
|
||||
status,
|
||||
created_at: createdAt,
|
||||
began_at: beganAt,
|
||||
completed_at: completedAt,
|
||||
type,
|
||||
crawl_config: crawlConfig,
|
||||
} = event;
|
||||
|
||||
return {
|
||||
beganAt,
|
||||
completedAt,
|
||||
crawlConfig: crawlConfigServerToClient(crawlConfig),
|
||||
createdAt,
|
||||
id,
|
||||
stage,
|
||||
status,
|
||||
type,
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlRequestWithDetailsServerToClient(
|
||||
event: CrawlRequestWithDetailsFromServer
|
||||
): CrawlRequestWithDetails {
|
||||
const {
|
||||
began_at: beganAt,
|
||||
completed_at: completedAt,
|
||||
crawl_config: crawlConfig,
|
||||
created_at: createdAt,
|
||||
id,
|
||||
stats: crawlStats,
|
||||
status,
|
||||
type,
|
||||
} = event;
|
||||
|
||||
return {
|
||||
beganAt,
|
||||
completedAt,
|
||||
crawlConfig: crawlConfigServerToClient(crawlConfig),
|
||||
createdAt,
|
||||
id,
|
||||
stats: crawlStats && crawlRequestStatsServerToClient(crawlStats),
|
||||
status,
|
||||
type,
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlerDataServerToClient(payload: CrawlerDataFromServer): CrawlerData {
|
||||
const {
|
||||
domains,
|
||||
events,
|
||||
most_recent_crawl_request: mostRecentCrawlRequest,
|
||||
user_agent: userAgent,
|
||||
} = payload;
|
||||
|
||||
return {
|
||||
domains: domains.map((domain) => crawlerDomainServerToClient(domain)),
|
||||
events: events.map((event) => crawlEventServerToClient(event)),
|
||||
mostRecentCrawlRequest:
|
||||
mostRecentCrawlRequest && crawlRequestServerToClient(mostRecentCrawlRequest),
|
||||
userAgent,
|
||||
};
|
||||
}
|
||||
|
||||
export function crawlDomainValidationToResult(
|
||||
data: CrawlerDomainValidationResultFromServer
|
||||
): CrawlerDomainValidationStep {
|
||||
if (!data.valid) {
|
||||
return {
|
||||
blockingFailure: true,
|
||||
message: data.results.find((result) => result.result === 'failure')?.comment,
|
||||
state: 'invalid',
|
||||
};
|
||||
}
|
||||
|
||||
const warningResult = data.results.find((result) => result.result === 'warning');
|
||||
|
||||
if (warningResult) {
|
||||
return {
|
||||
blockingFailure: !data.valid,
|
||||
message: warningResult.comment,
|
||||
state: 'warning',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
state: 'valid',
|
||||
};
|
||||
}
|
||||
|
||||
export const domainConfigServerToClient = (
|
||||
domainConfigFromServer: DomainConfigFromServer
|
||||
): DomainConfig => ({
|
||||
id: domainConfigFromServer.id,
|
||||
name: domainConfigFromServer.name,
|
||||
seedUrls: domainConfigFromServer.seed_urls,
|
||||
sitemapUrls: domainConfigFromServer.sitemap_urls,
|
||||
});
|
||||
|
||||
export const crawlerCustomSchedulingServerToClient = (
|
||||
customSchedulingFromServer: CrawlerCustomSchedulesServer
|
||||
): CrawlerCustomSchedule[] =>
|
||||
Object.entries(customSchedulingFromServer.custom_scheduling).map(
|
||||
([scheduleKey, scheduleMapping]) => {
|
||||
const {
|
||||
name,
|
||||
interval,
|
||||
configuration_overrides: configurationOverrides,
|
||||
enabled,
|
||||
} = scheduleMapping;
|
||||
const {
|
||||
max_crawl_depth: maxCrawlDepth = 2,
|
||||
sitemap_discovery_disabled: notIncludeSitemapsInRobotsTxt = false,
|
||||
domain_allowlist: selectedDomainUrls = [],
|
||||
sitemap_urls: customSitemapUrls = [],
|
||||
seed_urls: customEntryPointUrls = [],
|
||||
} = configurationOverrides;
|
||||
|
||||
return {
|
||||
scheduleKey,
|
||||
name,
|
||||
interval,
|
||||
enabled,
|
||||
maxCrawlDepth,
|
||||
includeSitemapsInRobotsTxt: !notIncludeSitemapsInRobotsTxt,
|
||||
selectedDomainUrls,
|
||||
selectedEntryPointUrls: [],
|
||||
selectedSitemapUrls: [],
|
||||
customEntryPointUrls,
|
||||
customSitemapUrls,
|
||||
entryPointUrls: [],
|
||||
sitemapUrls: [],
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
export const crawlerCustomSchedulingClientToServer = (
|
||||
crawlerCustomSchedules: CrawlerCustomSchedule[]
|
||||
): CrawlerCustomScheduleMappingClient => {
|
||||
const mapToServerFormat = (
|
||||
crawlerSchedule: CrawlerCustomSchedule
|
||||
): CrawlerCustomScheduleClient => {
|
||||
const configurationOverrides: CrawlerCustomScheduleConfigOverridesClient = {
|
||||
maxCrawlDepth: crawlerSchedule.maxCrawlDepth,
|
||||
sitemapDiscoveryDisabled: !crawlerSchedule.includeSitemapsInRobotsTxt,
|
||||
domainAllowlist: crawlerSchedule.selectedDomainUrls,
|
||||
sitemapUrls: [...crawlerSchedule.selectedSitemapUrls, ...crawlerSchedule.customSitemapUrls],
|
||||
seedUrls: [
|
||||
...crawlerSchedule.selectedEntryPointUrls,
|
||||
...crawlerSchedule.customEntryPointUrls,
|
||||
],
|
||||
};
|
||||
|
||||
return {
|
||||
name: crawlerSchedule.name,
|
||||
interval: crawlerSchedule.interval,
|
||||
configurationOverrides,
|
||||
enabled: crawlerSchedule.enabled,
|
||||
};
|
||||
};
|
||||
|
||||
const customSchedules: CrawlerCustomScheduleMappingClient = crawlerCustomSchedules.reduce(
|
||||
(map, schedule) => {
|
||||
map.set(schedule.scheduleKey, mapToServerFormat(schedule));
|
||||
return map;
|
||||
},
|
||||
new Map()
|
||||
);
|
||||
return customSchedules;
|
||||
};
|
||||
|
||||
export const crawlerDomainsWithMetaServerToClient = ({
|
||||
results,
|
||||
meta,
|
||||
}: CrawlerDomainsWithMetaFromServer): CrawlerDomainsWithMeta => ({
|
||||
domains: results.map(crawlerDomainServerToClient),
|
||||
meta,
|
||||
});
|
||||
|
||||
export const crawlScheduleServerToClient = ({
|
||||
frequency,
|
||||
unit,
|
||||
use_connector_schedule: useConnectorSchedule,
|
||||
}: CrawlScheduleFromServer): CrawlSchedule => ({
|
||||
frequency,
|
||||
unit,
|
||||
useConnectorSchedule,
|
||||
});
|
||||
|
||||
export function isBasicCrawlerAuth(auth: CrawlerAuth): auth is BasicCrawlerAuth {
|
||||
return auth !== null && (auth as BasicCrawlerAuth).type === 'basic';
|
||||
}
|
||||
|
||||
export function isRawCrawlerAuth(auth: CrawlerAuth): auth is RawCrawlerAuth {
|
||||
return auth !== null && (auth as RawCrawlerAuth).type === 'raw';
|
||||
}
|
|
@ -12,12 +12,10 @@ import { Routes, Route } from '@kbn/shared-ux-router';
|
|||
import {
|
||||
CONNECTORS_PATH,
|
||||
NEW_INDEX_SELECT_CONNECTOR_PATH,
|
||||
NEW_CONNECTOR_PATH,
|
||||
NEW_CONNECTOR_FLOW_PATH,
|
||||
CONNECTOR_DETAIL_PATH,
|
||||
} from '../../routes';
|
||||
import { ConnectorDetailRouter } from '../connector_detail/connector_detail_router';
|
||||
import { NewSearchIndexPage } from '../new_index/new_search_index_page';
|
||||
|
||||
import { Connectors } from './connectors';
|
||||
import { CreateConnector } from './create_connector';
|
||||
|
@ -28,9 +26,6 @@ export const ConnectorsRouter: React.FC = () => {
|
|||
<Route path={NEW_INDEX_SELECT_CONNECTOR_PATH}>
|
||||
<CreateConnector />
|
||||
</Route>
|
||||
<Route path={NEW_CONNECTOR_PATH}>
|
||||
<NewSearchIndexPage type="connector" />
|
||||
</Route>
|
||||
<Route path={NEW_CONNECTOR_FLOW_PATH}>
|
||||
<CreateConnector />
|
||||
</Route>
|
||||
|
|
|
@ -9,17 +9,13 @@ import React from 'react';
|
|||
|
||||
import { Routes, Route } from '@kbn/shared-ux-router';
|
||||
|
||||
import { CRAWLERS_PATH, CRAWLERS_ELASTIC_MANAGED_PATH, NEW_CRAWLER_PATH } from '../../routes';
|
||||
import { NewSearchIndexPage } from '../new_index/new_search_index_page';
|
||||
import { CRAWLERS_PATH, CRAWLERS_ELASTIC_MANAGED_PATH } from '../../routes';
|
||||
|
||||
import { Connectors } from './connectors';
|
||||
|
||||
export const CrawlersRouter: React.FC = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path={NEW_CRAWLER_PATH}>
|
||||
<NewSearchIndexPage type="crawler" />
|
||||
</Route>
|
||||
<Route exact path={CRAWLERS_PATH}>
|
||||
<Connectors isCrawler isCrawlerSelfManaged />
|
||||
</Route>
|
||||
|
|
|
@ -31,13 +31,13 @@ import { i18n } from '@kbn/i18n';
|
|||
import { useKibana } from '@kbn/kibana-react-plugin/public';
|
||||
import { useUnsavedChangesPrompt } from '@kbn/unsaved-changes-prompt';
|
||||
|
||||
import { errorToText } from '../../../../../../common/utils/error_to_text';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
|
||||
import { AddConnectorApiLogic } from '../../../api/connector/add_connector_api_logic';
|
||||
import { EnterpriseSearchContentPageTemplate } from '../../layout';
|
||||
import { NewConnectorLogic } from '../../new_index/method_connector/new_connector_logic';
|
||||
import { errorToText } from '../../new_index/utils/error_to_text';
|
||||
import { connectorsBreadcrumbs } from '../connectors';
|
||||
|
||||
import { generateStepState } from '../utils/generate_step_state';
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
.connectorCheckable {
|
||||
flex-grow: 1;
|
||||
}
|
|
@ -1,318 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { MouseEvent, useState } from 'react';
|
||||
|
||||
import { css } from '@emotion/react';
|
||||
|
||||
import {
|
||||
EuiBadge,
|
||||
EuiButtonIcon,
|
||||
EuiContextMenuItem,
|
||||
EuiContextMenuPanel,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiIcon,
|
||||
EuiLink,
|
||||
EuiPanel,
|
||||
EuiPopover,
|
||||
EuiSpacer,
|
||||
EuiText,
|
||||
EuiThemeComputed,
|
||||
EuiTitle,
|
||||
useEuiTheme,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { EXAMPLE_CONNECTOR_SERVICE_TYPES } from '../../../../../../common/constants';
|
||||
|
||||
import {
|
||||
BETA_LABEL,
|
||||
NATIVE_LABEL,
|
||||
CONNECTOR_CLIENT_LABEL,
|
||||
EXAMPLE_CONNECTOR_LABEL,
|
||||
} from '../../../../shared/constants';
|
||||
|
||||
import { PlatinumLicensePopover } from '../../shared/platinum_license_popover/platinum_license_popover';
|
||||
|
||||
import { NativePopover } from './native_popover';
|
||||
|
||||
export interface ConnectorCheckableProps {
|
||||
documentationUrl: string | undefined;
|
||||
iconType: string;
|
||||
isBeta: boolean;
|
||||
isDisabled: boolean;
|
||||
isTechPreview: boolean;
|
||||
name: string;
|
||||
onConnectorSelect: (isNative?: boolean) => void;
|
||||
serviceType: string;
|
||||
showLicensePopover?: boolean;
|
||||
showNativeBadge: boolean;
|
||||
showNativePopover?: boolean;
|
||||
}
|
||||
|
||||
const getCss = (
|
||||
euiTheme: EuiThemeComputed,
|
||||
isDisabled: boolean,
|
||||
showNativeBadge: ConnectorCheckableProps['showNativeBadge']
|
||||
) => {
|
||||
return css`
|
||||
${showNativeBadge &&
|
||||
`box-shadow: 8px 9px 0px -1px ${euiTheme.colors.lightestShade},
|
||||
8px 9px 0px 0px ${euiTheme.colors.lightShade};`}
|
||||
${isDisabled &&
|
||||
`background-color: ${euiTheme.colors.lightestShade};
|
||||
color: ${euiTheme.colors.disabledText};
|
||||
`}
|
||||
`;
|
||||
};
|
||||
|
||||
export const ConnectorCheckable: React.FC<ConnectorCheckableProps> = ({
|
||||
isDisabled,
|
||||
documentationUrl,
|
||||
iconType,
|
||||
isBeta,
|
||||
isTechPreview,
|
||||
name,
|
||||
onConnectorSelect,
|
||||
serviceType,
|
||||
showNativeBadge,
|
||||
showLicensePopover = false,
|
||||
showNativePopover = false,
|
||||
}) => {
|
||||
const { euiTheme } = useEuiTheme();
|
||||
const [isLicensePopoverOpen, setIsLicensePopoverOpen] = useState(false);
|
||||
const [isNativeInfoPopoverOpen, setIsNativeInfoPopoverOpen] = useState(false);
|
||||
const [isNativePopoverOpen, setIsNativePopoverOpen] = useState(false);
|
||||
return (
|
||||
<EuiPanel
|
||||
element="div"
|
||||
onClick={() => {
|
||||
if (isDisabled && showNativeBadge) return;
|
||||
onConnectorSelect(showNativeBadge);
|
||||
}}
|
||||
id={`checkableCard-${serviceType}`}
|
||||
css={getCss(euiTheme, isDisabled || showLicensePopover, showNativeBadge)}
|
||||
hasBorder
|
||||
data-telemetry-id={`entSearchContent-connector-selectConnector-${serviceType}-select`}
|
||||
>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={false}>
|
||||
{iconType ? <EuiIcon type={iconType} size="l" /> : null}
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiFlexGroup direction="column" gutterSize="s">
|
||||
<EuiFlexItem>
|
||||
<EuiFlexGroup gutterSize="s" responsive={false} justifyContent="spaceAround">
|
||||
<EuiFlexItem grow>
|
||||
<EuiFlexGroup gutterSize="s" alignItems="flexStart" responsive={false}>
|
||||
<EuiFlexItem grow={false}>
|
||||
{isDisabled ? (
|
||||
<EuiText color="disabledText" size="xs">
|
||||
<h3>{name}</h3>
|
||||
</EuiText>
|
||||
) : (
|
||||
<EuiTitle size="xs">
|
||||
<h2>{name}</h2>
|
||||
</EuiTitle>
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
{!showNativePopover && showLicensePopover && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<PlatinumLicensePopover
|
||||
button={
|
||||
<EuiButtonIcon
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-licensePopoverButton"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-licensePopoverButton"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnector.openPopoverLabel',
|
||||
{
|
||||
defaultMessage: 'Open licensing popover',
|
||||
}
|
||||
)}
|
||||
iconType="questionInCircle"
|
||||
onClick={(event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
setIsLicensePopoverOpen(!isLicensePopoverOpen);
|
||||
}}
|
||||
/>
|
||||
}
|
||||
closePopover={() => setIsLicensePopoverOpen(false)}
|
||||
isPopoverOpen={isLicensePopoverOpen}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{showNativePopover && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<NativePopover
|
||||
button={
|
||||
<EuiButtonIcon
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-nativeInfoPopoverButton"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-nativeInfoPopoverButton"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnector.openNativePopoverLabel',
|
||||
{
|
||||
defaultMessage:
|
||||
'Open popover with information about Elastic managed connectors',
|
||||
}
|
||||
)}
|
||||
iconType="questionInCircle"
|
||||
onClick={(event: MouseEvent) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
setIsNativeInfoPopoverOpen(!isNativeInfoPopoverOpen);
|
||||
}}
|
||||
/>
|
||||
}
|
||||
closePopover={() => setIsNativeInfoPopoverOpen(false)}
|
||||
isPopoverOpen={isNativeInfoPopoverOpen}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
{showNativeBadge && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPopover
|
||||
button={
|
||||
<EuiButtonIcon
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnector.openCreateConnectorPopover',
|
||||
{
|
||||
defaultMessage:
|
||||
'Open menu to create a connector of type {connectorType}',
|
||||
values: { connectorType: name },
|
||||
}
|
||||
)}
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-nativePopoverButton"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-nativePopoverButton"
|
||||
display="base"
|
||||
color="primary"
|
||||
iconType="boxesHorizontal"
|
||||
onClick={(e: MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
setIsNativePopoverOpen(true);
|
||||
}}
|
||||
/>
|
||||
}
|
||||
isOpen={isNativePopoverOpen}
|
||||
closePopover={() => {
|
||||
setIsNativePopoverOpen(false);
|
||||
}}
|
||||
>
|
||||
<EuiContextMenuPanel
|
||||
size="s"
|
||||
items={[
|
||||
<EuiContextMenuItem
|
||||
key="native"
|
||||
disabled={isDisabled}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onConnectorSelect(true);
|
||||
}}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorCheckable.setupANativeConnectorContextMenuItemLabel',
|
||||
{ defaultMessage: 'Set up an Elastic managed connector' }
|
||||
)}
|
||||
</EuiContextMenuItem>,
|
||||
<EuiSpacer key="spacer" size="s" />,
|
||||
<EuiContextMenuItem
|
||||
key="client"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onConnectorSelect(false);
|
||||
}}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorCheckable.setupAConnectorClientContextMenuItemLabel',
|
||||
{ defaultMessage: 'Set up a self-managed connector' }
|
||||
)}
|
||||
</EuiContextMenuItem>,
|
||||
]}
|
||||
/>
|
||||
</EuiPopover>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiFlexGroup direction="column" gutterSize="xs">
|
||||
<EuiFlexItem>
|
||||
<EuiFlexGroup
|
||||
direction="row"
|
||||
gutterSize="s"
|
||||
justifyContent="flexStart"
|
||||
responsive={false}
|
||||
>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiBadge>
|
||||
<EuiText size="xs">
|
||||
{showNativeBadge ? NATIVE_LABEL : CONNECTOR_CLIENT_LABEL}
|
||||
</EuiText>
|
||||
</EuiBadge>
|
||||
</EuiFlexItem>
|
||||
{isBeta && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiBadge color="hollow">
|
||||
<EuiText size="xs">{BETA_LABEL}</EuiText>
|
||||
</EuiBadge>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{EXAMPLE_CONNECTOR_SERVICE_TYPES.includes(serviceType) && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiBadge color="hollow" iconType="beaker">
|
||||
<EuiText size="xs">{EXAMPLE_CONNECTOR_LABEL}</EuiText>
|
||||
</EuiBadge>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{isTechPreview && !EXAMPLE_CONNECTOR_SERVICE_TYPES.includes(serviceType) && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiBadge color="hollow" iconType="beaker">
|
||||
<EuiText size="xs">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.techPreviewLabel',
|
||||
{
|
||||
defaultMessage: 'Tech preview',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
</EuiBadge>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
{documentationUrl && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiText size="xs">
|
||||
<EuiLink
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-documentationLink"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-documentationLink"
|
||||
target="_blank"
|
||||
href={documentationUrl}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.connectorCheckable.documentationLinkLabel',
|
||||
{
|
||||
defaultMessage: 'Documentation',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -1,153 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import {
|
||||
EuiBadge,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiIcon,
|
||||
EuiPanel,
|
||||
EuiPopover,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import connectorLogo from '../../../../../assets/images/connector_logo_network_drive_version.svg';
|
||||
|
||||
const nativePopoverPanels = [
|
||||
{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorDescriptionBadge.native.chooseADataSourceLabel',
|
||||
{ defaultMessage: "Choose a data source you'd like to sync" }
|
||||
),
|
||||
icons: [<EuiIcon type="documents" />],
|
||||
id: 'native-choose-source',
|
||||
},
|
||||
{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorDescriptionBadge.native.configureConnectorLabel',
|
||||
{ defaultMessage: 'Configure your connector using our Kibana UI' }
|
||||
),
|
||||
icons: [<EuiIcon type={connectorLogo} />, <EuiIcon type="logoElastic" />],
|
||||
id: 'native-configure-connector',
|
||||
},
|
||||
];
|
||||
|
||||
const connectorClientPopoverPanels = [
|
||||
{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorDescriptionBadge.client.chooseADataSourceLabel',
|
||||
{ defaultMessage: "Choose a data source you'd like to sync" }
|
||||
),
|
||||
icons: [<EuiIcon type="documents" />],
|
||||
id: 'client-choose-source',
|
||||
},
|
||||
{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorDescriptionBadge.client.configureConnectorLabel',
|
||||
{
|
||||
defaultMessage:
|
||||
'Deploy connector code on your own infrastructure by running from source, or using Docker',
|
||||
}
|
||||
),
|
||||
icons: [
|
||||
<EuiIcon type={connectorLogo} />,
|
||||
<EuiIcon type="sortRight" />,
|
||||
<EuiIcon type="launch" />,
|
||||
],
|
||||
id: 'client-deploy',
|
||||
},
|
||||
{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.connectorDescriptionBadge.client.enterDetailsLabel',
|
||||
{
|
||||
defaultMessage: 'Enter access and connection details for your data source',
|
||||
}
|
||||
),
|
||||
icons: [
|
||||
<EuiIcon type="documents" />,
|
||||
<EuiIcon type="sortRight" />,
|
||||
<EuiIcon type={connectorLogo} />,
|
||||
<EuiIcon type="sortRight" />,
|
||||
<EuiIcon type="logoElastic" />,
|
||||
],
|
||||
id: 'client-configure-connector',
|
||||
},
|
||||
];
|
||||
|
||||
export interface ConnectorDescriptionBadgeProps {
|
||||
isNative: boolean;
|
||||
}
|
||||
|
||||
export const ConnectorDescriptionBadge: React.FC<ConnectorDescriptionBadgeProps> = ({
|
||||
isNative,
|
||||
}) => {
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
|
||||
const panels = isNative ? nativePopoverPanels : connectorClientPopoverPanels;
|
||||
return (
|
||||
<EuiPopover
|
||||
button={
|
||||
<EuiBadge
|
||||
iconSide="right"
|
||||
iconType="iInCircle"
|
||||
onClick={() => setIsPopoverOpen(true)}
|
||||
onClickAriaLabel={i18n.translate(
|
||||
'xpack.enterpriseSearch.selectConnector.badgeOnClick.ariaLabel',
|
||||
{
|
||||
defaultMessage: 'Click to open connector explanation popover',
|
||||
}
|
||||
)}
|
||||
>
|
||||
{isNative
|
||||
? i18n.translate('xpack.enterpriseSearch.selectConnector.nativeBadgeLabel', {
|
||||
defaultMessage: 'Elastic managed',
|
||||
})
|
||||
: i18n.translate('xpack.enterpriseSearch.selectConnector.connectorClientBadgeLabel', {
|
||||
defaultMessage: 'Self-managed',
|
||||
})}
|
||||
</EuiBadge>
|
||||
}
|
||||
isOpen={isPopoverOpen}
|
||||
closePopover={() => {
|
||||
setIsPopoverOpen(false);
|
||||
}}
|
||||
>
|
||||
<EuiPanel hasBorder={false} hasShadow={false}>
|
||||
<EuiFlexGroup>
|
||||
{panels.map((panel) => {
|
||||
return (
|
||||
<EuiFlexItem grow={false} key={panel.id}>
|
||||
<EuiFlexGroup
|
||||
direction="column"
|
||||
alignItems="center"
|
||||
gutterSize="s"
|
||||
style={{ maxWidth: 240 }}
|
||||
>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiFlexGroup responsive={false} gutterSize="s">
|
||||
{panel.icons.map((icon, index) => (
|
||||
<EuiFlexItem grow={false} key={index}>
|
||||
{icon}
|
||||
</EuiFlexItem>
|
||||
))}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiText size="s" grow={false} textAlign="center">
|
||||
<p>{panel.description}</p>
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
);
|
||||
})}
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
</EuiPopover>
|
||||
);
|
||||
};
|
|
@ -1,94 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { css } from '@emotion/react';
|
||||
|
||||
import {
|
||||
EuiPopover,
|
||||
EuiPopoverTitle,
|
||||
EuiText,
|
||||
EuiPopoverFooter,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiButton,
|
||||
EuiPopoverProps,
|
||||
useEuiTheme,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
interface NativePopoverProps {
|
||||
button: EuiPopoverProps['button'];
|
||||
closePopover: () => void;
|
||||
isPopoverOpen: boolean;
|
||||
}
|
||||
|
||||
export const NativePopover: React.FC<NativePopoverProps> = ({
|
||||
button,
|
||||
isPopoverOpen,
|
||||
closePopover,
|
||||
}) => {
|
||||
const { euiTheme } = useEuiTheme();
|
||||
return (
|
||||
<EuiPopover
|
||||
button={button}
|
||||
isOpen={isPopoverOpen}
|
||||
closePopover={closePopover}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
>
|
||||
<EuiPopoverTitle>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.title',
|
||||
{
|
||||
defaultMessage: 'Elastic Cloud',
|
||||
}
|
||||
)}
|
||||
</EuiPopoverTitle>
|
||||
<EuiText
|
||||
grow={false}
|
||||
size="s"
|
||||
css={css`
|
||||
max-width: calc(${euiTheme.size.xl} * 10);
|
||||
`}
|
||||
>
|
||||
<p>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnectore.nativePopover.description',
|
||||
{
|
||||
defaultMessage:
|
||||
'Elastic managed connectors are hosted on Elastic Cloud. Get started with a free 14-day trial.',
|
||||
}
|
||||
)}
|
||||
</p>
|
||||
</EuiText>
|
||||
<EuiPopoverFooter>
|
||||
<EuiFlexGroup justifyContent="spaceBetween" alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
data-test-subj="entSearchContent-connectors-nativePopover-trialButton"
|
||||
data-telemetry-id="entSearchContent-connectors-nativePopover-trialButton"
|
||||
iconType="popout"
|
||||
target="_blank"
|
||||
href="https://www.elastic.co/cloud/cloud-trial-overview"
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.selectConnector.cloudTrialButton',
|
||||
{
|
||||
defaultMessage: 'Elastic Cloud Trial',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPopoverFooter>
|
||||
</EuiPopover>
|
||||
);
|
||||
};
|
|
@ -1,392 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useMemo, useState } from 'react';
|
||||
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import { css } from '@emotion/react';
|
||||
import { useValues } from 'kea';
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiCallOut,
|
||||
EuiFacetButton,
|
||||
EuiFacetGroup,
|
||||
EuiFieldSearch,
|
||||
EuiFlexGrid,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiHorizontalRule,
|
||||
EuiIcon,
|
||||
EuiPanel,
|
||||
EuiSpacer,
|
||||
EuiSwitch,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
useEuiTheme,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { CONNECTOR_CLIENTS_TYPE, CONNECTOR_NATIVE_TYPE } from '../../../../../../common/constants';
|
||||
|
||||
import connectorLogo from '../../../../../assets/images/connector_logo_network_drive_version.svg';
|
||||
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
import { LicensingLogic } from '../../../../shared/licensing';
|
||||
import { parseQueryParams } from '../../../../shared/query_params';
|
||||
|
||||
import { NEW_CONNECTOR_PATH } from '../../../routes';
|
||||
import { EnterpriseSearchContentPageTemplate } from '../../layout';
|
||||
|
||||
import { connectorsBreadcrumbs } from '../connectors';
|
||||
|
||||
import { ConnectorCheckable } from './connector_checkable';
|
||||
import { ConnectorDescriptionBadge } from './connector_description_badge_popout';
|
||||
|
||||
export type ConnectorFilter = typeof CONNECTOR_NATIVE_TYPE | typeof CONNECTOR_CLIENTS_TYPE;
|
||||
|
||||
export const parseConnectorFilter = (filter: string | string[] | null): ConnectorFilter | null => {
|
||||
const temp = Array.isArray(filter) ? filter[0] : filter ?? null;
|
||||
if (!temp) return null;
|
||||
if (temp === CONNECTOR_CLIENTS_TYPE) {
|
||||
return CONNECTOR_CLIENTS_TYPE;
|
||||
}
|
||||
if (temp === CONNECTOR_NATIVE_TYPE) {
|
||||
return CONNECTOR_NATIVE_TYPE;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const SelectConnector: React.FC = () => {
|
||||
const { search } = useLocation();
|
||||
const { connectorTypes, isCloud } = useValues(KibanaLogic);
|
||||
const { hasPlatinumLicense } = useValues(LicensingLogic);
|
||||
const hasNativeAccess = isCloud;
|
||||
const { filter } = parseQueryParams(search);
|
||||
const [selectedConnectorFilter, setSelectedConnectorFilter] = useState<ConnectorFilter | null>(
|
||||
parseConnectorFilter(filter)
|
||||
);
|
||||
const useNativeFilter = selectedConnectorFilter === CONNECTOR_NATIVE_TYPE;
|
||||
const useClientsFilter = selectedConnectorFilter === CONNECTOR_CLIENTS_TYPE;
|
||||
const [showTechPreview, setShowTechPreview] = useState(true);
|
||||
const [showBeta, setShowBeta] = useState(true);
|
||||
const [searchTerm, setSearchTerm] = useState('');
|
||||
const filteredConnectors = useMemo(() => {
|
||||
const nativeConnectors = hasNativeAccess
|
||||
? connectorTypes
|
||||
.filter((connector) => connector.isNative)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
: [];
|
||||
const nonNativeConnectors = hasNativeAccess
|
||||
? connectorTypes
|
||||
.filter((connector) => !connector.isNative)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
: connectorTypes.sort((a, b) => a.name.localeCompare(b.name));
|
||||
const connectors =
|
||||
!hasNativeAccess || useClientsFilter
|
||||
? connectorTypes.sort((a, b) => a.name.localeCompare(b.name))
|
||||
: [...nativeConnectors, ...nonNativeConnectors];
|
||||
|
||||
return connectors
|
||||
.filter((connector) => (showBeta ? true : !connector.isBeta))
|
||||
.filter((connector) => (showTechPreview ? true : !connector.isTechPreview))
|
||||
.filter((connector) => (useNativeFilter ? connector.isNative : true))
|
||||
.filter((connector) =>
|
||||
searchTerm ? connector.name.toLowerCase().includes(searchTerm.toLowerCase()) : true
|
||||
);
|
||||
}, [hasNativeAccess, useClientsFilter, showBeta, showTechPreview, useNativeFilter, searchTerm]);
|
||||
const { euiTheme } = useEuiTheme();
|
||||
|
||||
return (
|
||||
<EnterpriseSearchContentPageTemplate
|
||||
pageChrome={[
|
||||
...connectorsBreadcrumbs,
|
||||
i18n.translate('xpack.enterpriseSearch.content.indices.selectConnector.breadcrumb', {
|
||||
defaultMessage: 'Select connector',
|
||||
}),
|
||||
]}
|
||||
pageViewTelemetry="select_connector"
|
||||
isLoading={false}
|
||||
pageHeader={{
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.description',
|
||||
{
|
||||
defaultMessage:
|
||||
"Select which third-party data source you'd like to sync to Elastic. All data sources are supported by self-managed connectors. Check the availability for Elastic managed connectors by using the filters.",
|
||||
}
|
||||
),
|
||||
pageTitle: i18n.translate('xpack.enterpriseSearch.content.indices.selectConnector.title', {
|
||||
defaultMessage: 'Select a connector',
|
||||
}),
|
||||
}}
|
||||
>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem
|
||||
grow={false}
|
||||
css={css`
|
||||
max-width: calc(${euiTheme.size.xxl} * 5);
|
||||
`}
|
||||
>
|
||||
<EuiFlexGroup direction="column" gutterSize="none">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiFacetGroup>
|
||||
{hasNativeAccess && (
|
||||
<EuiFacetButton
|
||||
quantity={connectorTypes.length}
|
||||
isSelected={!useNativeFilter && !useClientsFilter}
|
||||
onClick={() => setSelectedConnectorFilter(null)}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.allConnectorsLabel',
|
||||
{ defaultMessage: 'All connectors' }
|
||||
)}
|
||||
</EuiFacetButton>
|
||||
)}
|
||||
|
||||
{hasNativeAccess && (
|
||||
<EuiFacetButton
|
||||
key="native"
|
||||
quantity={connectorTypes.filter((connector) => connector.isNative).length}
|
||||
isSelected={useNativeFilter}
|
||||
onClick={() =>
|
||||
setSelectedConnectorFilter(!useNativeFilter ? CONNECTOR_NATIVE_TYPE : null)
|
||||
}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.nativeLabel',
|
||||
{
|
||||
defaultMessage: 'Elastic managed',
|
||||
}
|
||||
)}
|
||||
</EuiFacetButton>
|
||||
)}
|
||||
|
||||
<EuiFacetButton
|
||||
quantity={connectorTypes.length}
|
||||
isSelected={(!hasNativeAccess && !useNativeFilter) || useClientsFilter}
|
||||
onClick={() =>
|
||||
setSelectedConnectorFilter(!useClientsFilter ? CONNECTOR_CLIENTS_TYPE : null)
|
||||
}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.connectorClients',
|
||||
{
|
||||
defaultMessage: 'Self-managed',
|
||||
}
|
||||
)}
|
||||
</EuiFacetButton>
|
||||
{!hasNativeAccess && (
|
||||
<EuiFacetButton
|
||||
key="native"
|
||||
quantity={connectorTypes.filter((connector) => connector.isNative).length}
|
||||
isSelected={useNativeFilter}
|
||||
onClick={() =>
|
||||
setSelectedConnectorFilter(!useNativeFilter ? CONNECTOR_NATIVE_TYPE : null)
|
||||
}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.nativeLabel',
|
||||
{
|
||||
defaultMessage: 'Elastic managed',
|
||||
}
|
||||
)}
|
||||
</EuiFacetButton>
|
||||
)}
|
||||
</EuiFacetGroup>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiHorizontalRule margin="s" />
|
||||
<EuiPanel paddingSize="s" hasShadow={false}>
|
||||
<EuiSwitch
|
||||
checked={showBeta}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.showBetaLabel',
|
||||
{ defaultMessage: 'Display Beta connectors' }
|
||||
)}
|
||||
onChange={(e) => setShowBeta(e.target.checked)}
|
||||
/>
|
||||
<EuiSwitch
|
||||
checked={showTechPreview}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.showTechPreviewLabel',
|
||||
{ defaultMessage: 'Display Tech Preview connectors' }
|
||||
)}
|
||||
onChange={(e) => setShowTechPreview(e.target.checked)}
|
||||
/>
|
||||
</EuiPanel>
|
||||
<EuiSpacer size="s" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiHorizontalRule margin="s" />
|
||||
<EuiPanel paddingSize="s" hasShadow={false} grow={false}>
|
||||
<EuiFlexGroup gutterSize="xs" alignItems="center" responsive={false}>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type="logoCloud" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="xs">
|
||||
<h4>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.selectConnector.nativeConnectorsTitleLabel',
|
||||
{ defaultMessage: 'Elastic managed connectors' }
|
||||
)}
|
||||
</h4>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer size="s" />
|
||||
<ConnectorDescriptionBadge isNative />
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="xs" grow={false}>
|
||||
<p>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.selectConnector.p.areAvailableDirectlyWithinLabel',
|
||||
{
|
||||
defaultMessage:
|
||||
'Available directly within Elastic Cloud deployments. No additional infrastructure is required. You can also convert Elastic managed connectors to self-managed connectors.',
|
||||
}
|
||||
)}
|
||||
</p>
|
||||
</EuiText>
|
||||
</EuiPanel>
|
||||
<EuiSpacer size="s" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiPanel paddingSize="s" hasShadow={false} grow={false}>
|
||||
<EuiFlexGroup gutterSize="xs" alignItems="center" responsive={false}>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type={connectorLogo} />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="xs">
|
||||
<h4>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.selectConnector.h4.connectorClientsLabel',
|
||||
{ defaultMessage: 'Self-managed connectors' }
|
||||
)}
|
||||
</h4>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer size="s" />
|
||||
<ConnectorDescriptionBadge isNative={false} />
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="xs" grow={false}>
|
||||
<p>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.selectConnector.p.deployConnectorsOnYourLabel',
|
||||
{
|
||||
defaultMessage:
|
||||
'Deploy connectors on your own infrastructure. You can also customize existing self-managed connectors, or build your own using our connector framework.',
|
||||
}
|
||||
)}
|
||||
</p>
|
||||
</EuiText>
|
||||
</EuiPanel>
|
||||
<EuiSpacer size="s" />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiFieldSearch
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-searchInput"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-searchInput"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.search.ariaLabel',
|
||||
{ defaultMessage: 'Search through connectors' }
|
||||
)}
|
||||
isClearable
|
||||
onChange={(event) => setSearchTerm(event.target.value)}
|
||||
placeholder={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.searchPlaceholder',
|
||||
{ defaultMessage: 'Search' }
|
||||
)}
|
||||
value={searchTerm}
|
||||
fullWidth
|
||||
/>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiFlexGrid columns={3}>
|
||||
{filteredConnectors.map((connector) => (
|
||||
<EuiFlexItem key={connector.name} grow>
|
||||
<ConnectorCheckable
|
||||
showNativePopover={(!hasNativeAccess && useNativeFilter) ?? false}
|
||||
showLicensePopover={connector.platinumOnly && !hasPlatinumLicense && !isCloud}
|
||||
isDisabled={(!hasNativeAccess && useNativeFilter) ?? false}
|
||||
iconType={connector.iconPath}
|
||||
isBeta={connector.isBeta}
|
||||
isTechPreview={Boolean(connector.isTechPreview)}
|
||||
showNativeBadge={
|
||||
(hasNativeAccess && connector.isNative && !useClientsFilter) ||
|
||||
(!hasNativeAccess && useNativeFilter)
|
||||
}
|
||||
name={connector.name}
|
||||
serviceType={connector.serviceType}
|
||||
onConnectorSelect={(isNative?: boolean) => {
|
||||
const queryParam = new URLSearchParams();
|
||||
queryParam.append('service_type', connector.serviceType);
|
||||
if (isNative !== undefined) {
|
||||
queryParam.append(
|
||||
'connector_type',
|
||||
isNative && !useClientsFilter
|
||||
? CONNECTOR_NATIVE_TYPE
|
||||
: CONNECTOR_CLIENTS_TYPE
|
||||
);
|
||||
}
|
||||
KibanaLogic.values.navigateToUrl(
|
||||
`${NEW_CONNECTOR_PATH}?${queryParam.toString()}`
|
||||
);
|
||||
}}
|
||||
documentationUrl={connector.docsUrl}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
))}
|
||||
</EuiFlexGrid>
|
||||
{!hasNativeAccess && useNativeFilter && (
|
||||
<>
|
||||
<EuiSpacer />
|
||||
<EuiCallOut
|
||||
size="m"
|
||||
title={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.title',
|
||||
{
|
||||
defaultMessage: 'Elastic Cloud',
|
||||
}
|
||||
)}
|
||||
iconType="iInCircle"
|
||||
>
|
||||
<p>
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.description"
|
||||
defaultMessage="Elastic managed connectors are hosted on Elastic Cloud. Get started with a free 14-day trial."
|
||||
/>
|
||||
</p>
|
||||
<EuiButton
|
||||
data-test-subj="entSearchContent-connectors-selectConnector-cloudCallout-trialButton"
|
||||
data-telemetry-id="entSearchContent-connectors-selectConnector-cloudCallout-trialButton"
|
||||
color="primary"
|
||||
fill
|
||||
href="https://www.elastic.co/cloud/cloud-trial-overview"
|
||||
iconType="popout"
|
||||
iconSide="right"
|
||||
target="_blank"
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.content.indices.selectConnector.cloudCallout.trialLink"
|
||||
defaultMessage="Elastic Cloud Trial"
|
||||
/>
|
||||
</EuiButton>
|
||||
</EuiCallOut>
|
||||
</>
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EnterpriseSearchContentPageTemplate>
|
||||
);
|
||||
};
|
|
@ -1,12 +0,0 @@
|
|||
.buttonGroup {
|
||||
.buttonGroupOption {
|
||||
cursor: pointer;
|
||||
|
||||
&--selected {
|
||||
.buttonGroupOption-panel {
|
||||
border: 1px $euiColorSuccess solid;
|
||||
box-shadow: 0 0 0 1px $euiColorSuccess;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,103 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import classNames from 'classnames';
|
||||
|
||||
import {
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiTitle,
|
||||
EuiText,
|
||||
EuiSplitPanel,
|
||||
EuiButtonIcon,
|
||||
EuiSpacer,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import './button_group.scss';
|
||||
|
||||
export interface ButtonGroupOption {
|
||||
badge?: React.ReactNode;
|
||||
description: string;
|
||||
footer: string;
|
||||
icon: string;
|
||||
id: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
onChange(option: ButtonGroupOption): void;
|
||||
options: ButtonGroupOption[];
|
||||
selected?: ButtonGroupOption;
|
||||
}
|
||||
|
||||
export const ButtonGroup: React.FC<Props> = ({ onChange, options, selected }) => (
|
||||
<EuiFlexGroup className="buttonGroup" direction="column" gutterSize="m" role="radiogroup">
|
||||
{options.map((option, index) => {
|
||||
const isSelected = option === selected;
|
||||
return (
|
||||
<EuiFlexItem
|
||||
className={classNames('buttonGroupOption', {
|
||||
'buttonGroupOption--selected': isSelected,
|
||||
})}
|
||||
grow={false}
|
||||
key={index}
|
||||
onClick={() => {
|
||||
onChange(option);
|
||||
}}
|
||||
>
|
||||
<EuiSplitPanel.Outer
|
||||
borderRadius="m"
|
||||
grow
|
||||
hasBorder
|
||||
hasShadow={false}
|
||||
className="buttonGroupOption-panel"
|
||||
>
|
||||
<EuiSplitPanel.Inner color="plain" paddingSize="s">
|
||||
<EuiFlexGroup alignItems="center" responsive={false}>
|
||||
<EuiFlexItem>
|
||||
{option.badge && (
|
||||
<>
|
||||
<div>{option.badge}</div>
|
||||
<EuiSpacer size="xs" />
|
||||
</>
|
||||
)}
|
||||
<EuiTitle size="xs">
|
||||
<h4>{option.label}</h4>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="s" color="subdued">
|
||||
<p>{option.description}</p>
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonIcon
|
||||
display="base"
|
||||
iconType={isSelected ? 'check' : 'arrowRight'}
|
||||
color={isSelected ? 'success' : 'primary'}
|
||||
aria-label={option.label}
|
||||
aria-checked={isSelected}
|
||||
role="radio"
|
||||
autoFocus={index === 0}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiSplitPanel.Inner>
|
||||
<EuiSplitPanel.Inner color={isSelected ? 'success' : 'subdued'} paddingSize="s">
|
||||
<EuiText size="s" color={isSelected ? 'success' : 'subdued'}>
|
||||
<p>
|
||||
<strong>{option.footer}</strong>
|
||||
</p>
|
||||
</EuiText>
|
||||
</EuiSplitPanel.Inner>
|
||||
</EuiSplitPanel.Outer>
|
||||
</EuiFlexItem>
|
||||
);
|
||||
})}
|
||||
</EuiFlexGroup>
|
||||
);
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EuiSelectOption } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { languageToText } from '../../utils/language_to_text';
|
||||
|
||||
export const NEW_INDEX_TEMPLATE_TYPES: { [key: string]: string } = {
|
||||
api: i18n.translate('xpack.enterpriseSearch.content.newIndex.types.api', {
|
||||
defaultMessage: 'API endpoint',
|
||||
}),
|
||||
connector: i18n.translate('xpack.enterpriseSearch.content.newIndex.types.connector', {
|
||||
defaultMessage: 'Connector',
|
||||
}),
|
||||
crawler: i18n.translate('xpack.enterpriseSearch.content.newIndex.types.crawler', {
|
||||
defaultMessage: 'Web crawler',
|
||||
}),
|
||||
elasticsearch: i18n.translate('xpack.enterpriseSearch.content.newIndex.types.elasticsearch', {
|
||||
defaultMessage: 'Elasticsearch index',
|
||||
}),
|
||||
json: i18n.translate('xpack.enterpriseSearch.content.newIndex.types.json', {
|
||||
defaultMessage: 'JSON',
|
||||
}),
|
||||
};
|
||||
|
||||
export const DOCUMENTS_API_JSON_EXAMPLE = {
|
||||
id: 'park_rocky-mountain',
|
||||
title: 'Rocky Mountain',
|
||||
description:
|
||||
'Bisected north to south by the Continental Divide, this portion of the Rockies has ecosystems varying from over 150 riparian lakes to montane and subalpine forests to treeless alpine tundra. Wildlife including mule deer, bighorn sheep, black bears, and cougars inhabit its igneous mountains and glacial valleys. Longs Peak, a classic Colorado fourteener, and the scenic Bear Lake are popular destinations, as well as the historic Trail Ridge Road, which reaches an elevation of more than 12,000 feet (3,700 m).',
|
||||
nps_link: 'https://www.nps.gov/romo/index.htm',
|
||||
states: ['Colorado'],
|
||||
visitors: 4517585,
|
||||
world_heritage_site: false,
|
||||
location: '40.4,-105.58',
|
||||
acres: 265795.2,
|
||||
square_km: 1075.6,
|
||||
date_established: '1915-01-26T06:00:00Z',
|
||||
};
|
||||
|
||||
export const UNIVERSAL_LANGUAGE_VALUE = '';
|
||||
|
||||
export const SUPPORTED_LANGUAGES: EuiSelectOption[] = [
|
||||
{
|
||||
text: languageToText(UNIVERSAL_LANGUAGE_VALUE),
|
||||
value: UNIVERSAL_LANGUAGE_VALUE,
|
||||
},
|
||||
{
|
||||
disabled: true,
|
||||
text: '—',
|
||||
},
|
||||
{
|
||||
text: languageToText('zh'),
|
||||
value: 'zh',
|
||||
},
|
||||
{
|
||||
text: languageToText('da'),
|
||||
value: 'da',
|
||||
},
|
||||
{
|
||||
text: languageToText('nl'),
|
||||
value: 'nl',
|
||||
},
|
||||
{
|
||||
text: languageToText('en'),
|
||||
value: 'en',
|
||||
},
|
||||
{
|
||||
text: languageToText('fr'),
|
||||
value: 'fr',
|
||||
},
|
||||
{
|
||||
text: languageToText('de'),
|
||||
value: 'de',
|
||||
},
|
||||
{
|
||||
text: languageToText('it'),
|
||||
value: 'it',
|
||||
},
|
||||
{
|
||||
text: languageToText('ja'),
|
||||
value: 'ja',
|
||||
},
|
||||
{
|
||||
text: languageToText('ko'),
|
||||
value: 'ko',
|
||||
},
|
||||
{
|
||||
text: languageToText('pt'),
|
||||
value: 'pt',
|
||||
},
|
||||
{
|
||||
text: languageToText('pt-br'),
|
||||
value: 'pt-br',
|
||||
},
|
||||
{
|
||||
text: languageToText('ru'),
|
||||
value: 'ru',
|
||||
},
|
||||
{
|
||||
text: languageToText('es'),
|
||||
value: 'es',
|
||||
},
|
||||
{
|
||||
text: languageToText('th'),
|
||||
value: 'th',
|
||||
},
|
||||
];
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/**
|
||||
* TODO:
|
||||
* - Need to add documentation URLs (search for `#`s)
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { EuiEmptyPrompt, EuiLink, EuiPanel, EuiTitle } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { docLinks } from '../../../shared/doc_links';
|
||||
|
||||
export const SearchIndexEmptyState: React.FC = () => {
|
||||
return (
|
||||
<EuiPanel color="subdued">
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h3>
|
||||
{i18n.translate('xpack.enterpriseSearch.content.newIndex.emptyState.title', {
|
||||
defaultMessage: 'Select an ingestion method',
|
||||
})}
|
||||
</h3>
|
||||
}
|
||||
body={
|
||||
<p>
|
||||
{i18n.translate('xpack.enterpriseSearch.content.newIndex.emptyState.description', {
|
||||
defaultMessage:
|
||||
'Data you add in Search is called a search index and it’s searchable in both App Search and Workplace Search. Now you can use your connectors in App Search and your web crawlers in Workplace Search.',
|
||||
})}
|
||||
</p>
|
||||
}
|
||||
footer={
|
||||
<>
|
||||
<EuiTitle size="xxs">
|
||||
<h4>
|
||||
{i18n.translate('xpack.enterpriseSearch.content.newIndex.emptyState.footer.title', {
|
||||
defaultMessage: 'Want to learn more about search indices?',
|
||||
})}
|
||||
</h4>
|
||||
</EuiTitle>
|
||||
<EuiLink href={docLinks.start} target="_blank">
|
||||
{i18n.translate('xpack.enterpriseSearch.content.newIndex.emptyState.footer.link', {
|
||||
defaultMessage: 'Read the docs',
|
||||
})}
|
||||
</EuiLink>
|
||||
</>
|
||||
}
|
||||
/>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { NewIndex } from './new_index';
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { Status } from '../../../../../../common/types/api';
|
||||
|
||||
import { NewSearchIndexTemplate } from '../new_search_index_template';
|
||||
|
||||
import { MethodApi } from './method_api';
|
||||
|
||||
describe('MethodApi', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues({ status: Status.IDLE });
|
||||
});
|
||||
|
||||
it('renders API ingestion method tab', () => {
|
||||
const wrapper = shallow(<MethodApi />);
|
||||
const template = wrapper.find(NewSearchIndexTemplate);
|
||||
|
||||
expect(template.prop('type')).toEqual('api');
|
||||
});
|
||||
});
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
import { Status } from '../../../../../../common/types/api';
|
||||
|
||||
import { NewSearchIndexTemplate } from '../new_search_index_template';
|
||||
|
||||
import { MethodApiLogic } from './method_api_logic';
|
||||
|
||||
export const MethodApi: React.FC = () => {
|
||||
const { makeRequest } = useActions(MethodApiLogic);
|
||||
const { status } = useValues(MethodApiLogic);
|
||||
return (
|
||||
<NewSearchIndexTemplate
|
||||
type="api"
|
||||
buttonLoading={status === Status.LOADING}
|
||||
onSubmit={(indexName, language) => makeRequest({ indexName, language })}
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LogicMounter, mockKibanaValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { MethodApiLogic } from './method_api_logic';
|
||||
|
||||
describe('MethodApiLogic', () => {
|
||||
const { mount } = new LogicMounter(MethodApiLogic);
|
||||
const { navigateToUrl } = mockKibanaValues;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mount();
|
||||
});
|
||||
|
||||
describe('listeners', () => {
|
||||
describe('apiSuccess', () => {
|
||||
it('navigates user to index detail view', () => {
|
||||
MethodApiLogic.actions.apiSuccess({ indexName: 'my-index' });
|
||||
|
||||
expect(navigateToUrl).toHaveBeenCalledWith('/search_indices/my-index/overview');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { generateEncodedPath } from '../../../../shared/encode_path_params';
|
||||
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
import {
|
||||
CreateApiIndexApiLogic,
|
||||
CreateApiIndexApiLogicArgs,
|
||||
CreateApiIndexApiLogicResponse,
|
||||
} from '../../../api/index/create_api_index_api_logic';
|
||||
import { SEARCH_INDEX_TAB_PATH } from '../../../routes';
|
||||
import { SearchIndexTabId } from '../../search_index/search_index';
|
||||
|
||||
type MethodApiActions = Pick<
|
||||
Actions<CreateApiIndexApiLogicArgs, CreateApiIndexApiLogicResponse>,
|
||||
'apiSuccess' | 'makeRequest'
|
||||
>;
|
||||
|
||||
interface MethodApiValues {
|
||||
status: (typeof CreateApiIndexApiLogic.values)['status'];
|
||||
}
|
||||
|
||||
export const MethodApiLogic = kea<MakeLogicType<MethodApiValues, MethodApiActions>>({
|
||||
connect: {
|
||||
actions: [CreateApiIndexApiLogic, ['apiSuccess', 'makeRequest']],
|
||||
values: [CreateApiIndexApiLogic, ['status']],
|
||||
},
|
||||
listeners: {
|
||||
apiSuccess: ({ indexName }) => {
|
||||
KibanaLogic.values.navigateToUrl(
|
||||
generateEncodedPath(SEARCH_INDEX_TAB_PATH, {
|
||||
indexName,
|
||||
tabId: SearchIndexTabId.OVERVIEW,
|
||||
})
|
||||
);
|
||||
},
|
||||
},
|
||||
path: ['enterprise_search', 'method_api'],
|
||||
});
|
|
@ -12,6 +12,7 @@ import { useActions, useValues } from 'kea';
|
|||
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
|
||||
|
||||
import { Status } from '../../../../../../common/types/api';
|
||||
import { errorToText } from '../../../../../../common/utils/error_to_text';
|
||||
import { docLinks } from '../../../../shared/doc_links';
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
import { LicensingLogic } from '../../../../shared/licensing';
|
||||
|
@ -23,8 +24,6 @@ import { AddConnectorApiLogic } from '../../../api/connector/add_connector_api_l
|
|||
|
||||
import { FetchCloudHealthApiLogic } from '../../../api/stats/fetch_cloud_health_api_logic';
|
||||
|
||||
import { errorToText } from '../utils/error_to_text';
|
||||
|
||||
import { AddConnectorLogic } from './add_connector_logic';
|
||||
import { NewConnectorTemplate } from './new_connector_template';
|
||||
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockValues, setMockActions } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { Status } from '../../../../../../common/types/api';
|
||||
|
||||
import { NewSearchIndexTemplate } from '../new_search_index_template';
|
||||
|
||||
import { MethodCrawler } from './method_crawler';
|
||||
|
||||
describe('MethodCrawler', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues({
|
||||
status: Status.IDLE,
|
||||
});
|
||||
setMockActions({
|
||||
makeRequest: jest.fn(),
|
||||
});
|
||||
});
|
||||
|
||||
it('renders API ingestion method tab', () => {
|
||||
const wrapper = shallow(<MethodCrawler />);
|
||||
const template = wrapper.find(NewSearchIndexTemplate);
|
||||
|
||||
expect(template.prop('type')).toEqual('crawler');
|
||||
});
|
||||
});
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
|
||||
|
||||
import { Status } from '../../../../../../common/types/api';
|
||||
import { docLinks } from '../../../../shared/doc_links';
|
||||
import { HttpLogic } from '../../../../shared/http';
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
import { LicensingLogic } from '../../../../shared/licensing';
|
||||
import {
|
||||
LicensingCallout,
|
||||
LICENSING_FEATURE,
|
||||
} from '../../../../shared/licensing_callout/licensing_callout';
|
||||
import { CreateCrawlerIndexApiLogic } from '../../../api/crawler/create_crawler_index_api_logic';
|
||||
import { NewSearchIndexTemplate } from '../new_search_index_template';
|
||||
|
||||
import { MethodCrawlerLogic } from './method_crawler_logic';
|
||||
|
||||
export const MethodCrawler: React.FC = () => {
|
||||
const { status } = useValues(CreateCrawlerIndexApiLogic);
|
||||
const { makeRequest } = useActions(CreateCrawlerIndexApiLogic);
|
||||
const { isCloud } = useValues(KibanaLogic);
|
||||
const { hasPlatinumLicense } = useValues(LicensingLogic);
|
||||
const { errorConnectingMessage } = useValues(HttpLogic);
|
||||
|
||||
const isGated = !isCloud && !hasPlatinumLicense;
|
||||
|
||||
MethodCrawlerLogic.mount();
|
||||
|
||||
return (
|
||||
<EuiFlexGroup direction="column">
|
||||
{isGated && (
|
||||
<EuiFlexItem>
|
||||
<LicensingCallout feature={LICENSING_FEATURE.CRAWLER} />
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
<EuiFlexItem>
|
||||
<NewSearchIndexTemplate
|
||||
type="crawler"
|
||||
onSubmit={(indexName, language) => makeRequest({ indexName, language })}
|
||||
disabled={isGated || Boolean(errorConnectingMessage)}
|
||||
buttonLoading={status === Status.LOADING}
|
||||
docsUrl={docLinks.crawlerOverview}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
);
|
||||
};
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LogicMounter,
|
||||
mockFlashMessageHelpers,
|
||||
mockKibanaValues,
|
||||
} from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import { HttpError } from '../../../../../../common/types/api';
|
||||
|
||||
import { CreateCrawlerIndexApiLogic } from '../../../api/crawler/create_crawler_index_api_logic';
|
||||
|
||||
import { MethodCrawlerLogic } from './method_crawler_logic';
|
||||
|
||||
describe('MethodCrawlerLogic', () => {
|
||||
const { mount } = new LogicMounter(MethodCrawlerLogic);
|
||||
const { mount: apiLogicMount } = new LogicMounter(CreateCrawlerIndexApiLogic);
|
||||
const { clearFlashMessages, flashAPIErrors } = mockFlashMessageHelpers;
|
||||
const { navigateToUrl } = mockKibanaValues;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
apiLogicMount();
|
||||
mount();
|
||||
});
|
||||
|
||||
describe('listeners', () => {
|
||||
describe('apiSuccess', () => {
|
||||
it('navigates user to index detail view', () => {
|
||||
CreateCrawlerIndexApiLogic.actions.apiSuccess({ created: 'my-index' });
|
||||
|
||||
expect(navigateToUrl).toHaveBeenCalledWith('/search_indices/my-index/domain_management');
|
||||
});
|
||||
});
|
||||
|
||||
describe('makeRequest', () => {
|
||||
it('clears any displayed errors', () => {
|
||||
CreateCrawlerIndexApiLogic.actions.makeRequest({
|
||||
indexName: 'my-index',
|
||||
language: 'Universal',
|
||||
});
|
||||
|
||||
expect(clearFlashMessages).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('apiError', () => {
|
||||
it('displays the error to the user', () => {
|
||||
const error = {} as HttpError;
|
||||
|
||||
CreateCrawlerIndexApiLogic.actions.apiError(error);
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Actions } from '../../../../shared/api_logic/create_api_logic';
|
||||
import { generateEncodedPath } from '../../../../shared/encode_path_params';
|
||||
|
||||
import { KibanaLogic } from '../../../../shared/kibana';
|
||||
import {
|
||||
CreateCrawlerIndexApiLogic,
|
||||
CreateCrawlerIndexArgs,
|
||||
CreateCrawlerIndexResponse,
|
||||
} from '../../../api/crawler/create_crawler_index_api_logic';
|
||||
import { SEARCH_INDEX_TAB_PATH } from '../../../routes';
|
||||
import { SearchIndexTabId } from '../../search_index/search_index';
|
||||
|
||||
type MethodCrawlerActions = Pick<
|
||||
Actions<CreateCrawlerIndexArgs, CreateCrawlerIndexResponse>,
|
||||
'apiError' | 'apiSuccess' | 'makeRequest'
|
||||
>;
|
||||
|
||||
export const MethodCrawlerLogic = kea<MakeLogicType<{}, MethodCrawlerActions>>({
|
||||
connect: {
|
||||
actions: [CreateCrawlerIndexApiLogic, ['apiError', 'apiSuccess']],
|
||||
},
|
||||
listeners: {
|
||||
apiSuccess: ({ created }) => {
|
||||
KibanaLogic.values.navigateToUrl(
|
||||
generateEncodedPath(SEARCH_INDEX_TAB_PATH, {
|
||||
indexName: created,
|
||||
tabId: SearchIndexTabId.DOMAIN_MANAGEMENT,
|
||||
})
|
||||
);
|
||||
},
|
||||
},
|
||||
path: ['enterprise_search', 'content', 'method_crawler'],
|
||||
});
|
|
@ -1,89 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues } from 'kea';
|
||||
|
||||
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { INGESTION_METHOD_IDS } from '../../../../../common/constants';
|
||||
|
||||
import { ProductFeatures } from '../../../../../common/types';
|
||||
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
import { KibanaLogic } from '../../../shared/kibana/kibana_logic';
|
||||
|
||||
import { NEW_API_PATH, NEW_CRAWLER_PATH, NEW_INDEX_SELECT_CONNECTOR_PATH } from '../../routes';
|
||||
import { EnterpriseSearchContentPageTemplate } from '../layout/page_template';
|
||||
import { baseBreadcrumbs } from '../search_indices';
|
||||
|
||||
import { NewIndexCard } from './new_index_card';
|
||||
|
||||
const getAvailableMethodOptions = (productFeatures: ProductFeatures): INGESTION_METHOD_IDS[] => {
|
||||
return [
|
||||
INGESTION_METHOD_IDS.API,
|
||||
...(productFeatures.hasWebCrawler ? [INGESTION_METHOD_IDS.CRAWLER] : []),
|
||||
...(productFeatures.hasConnectors ? [INGESTION_METHOD_IDS.CONNECTOR] : []),
|
||||
];
|
||||
};
|
||||
|
||||
export const NewIndex: React.FC = () => {
|
||||
const { config, productFeatures } = useValues(KibanaLogic);
|
||||
const availableIngestionMethodOptions = getAvailableMethodOptions(productFeatures);
|
||||
const { errorConnectingMessage } = useValues(HttpLogic);
|
||||
|
||||
return (
|
||||
<EnterpriseSearchContentPageTemplate
|
||||
pageChrome={[
|
||||
...baseBreadcrumbs,
|
||||
i18n.translate('xpack.enterpriseSearch.content.newIndex.breadcrumb', {
|
||||
defaultMessage: 'New ingestion method',
|
||||
}),
|
||||
]}
|
||||
pageViewTelemetry="New Index"
|
||||
isLoading={false}
|
||||
pageHeader={{
|
||||
description: i18n.translate('xpack.enterpriseSearch.content.newIndex.pageDescription', {
|
||||
defaultMessage:
|
||||
'Create a search optimized Elasticsearch index to store your content. Start by selecting an ingestion method.',
|
||||
}),
|
||||
pageTitle: i18n.translate('xpack.enterpriseSearch.content.newIndex.pageTitle', {
|
||||
defaultMessage: 'Select an ingestion method',
|
||||
}),
|
||||
}}
|
||||
>
|
||||
<EuiFlexGroup direction="column">
|
||||
<EuiFlexItem>
|
||||
<EuiFlexGroup>
|
||||
{availableIngestionMethodOptions.map((type) => (
|
||||
<EuiFlexItem key={type}>
|
||||
<NewIndexCard
|
||||
disabled={Boolean(
|
||||
type === INGESTION_METHOD_IDS.CRAWLER &&
|
||||
(errorConnectingMessage || !config.host)
|
||||
)}
|
||||
type={type}
|
||||
onSelect={() => {
|
||||
if (type === INGESTION_METHOD_IDS.CONNECTOR) {
|
||||
KibanaLogic.values.navigateToUrl(NEW_INDEX_SELECT_CONNECTOR_PATH);
|
||||
} else if (type === INGESTION_METHOD_IDS.CRAWLER) {
|
||||
KibanaLogic.values.navigateToUrl(NEW_CRAWLER_PATH);
|
||||
} else {
|
||||
KibanaLogic.values.navigateToUrl(NEW_API_PATH);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
))}
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EnterpriseSearchContentPageTemplate>
|
||||
);
|
||||
};
|
|
@ -1,122 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { EuiIconProps } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { INGESTION_METHOD_IDS } from '../../../../../common/constants';
|
||||
|
||||
import { IngestionCard } from '../shared/ingestion_card/ingestion_card';
|
||||
|
||||
import { getIngestionMethodButtonIcon, getIngestionMethodIconType } from './utils';
|
||||
|
||||
export interface NewIndexCardProps {
|
||||
disabled: boolean;
|
||||
onSelect?: () => void;
|
||||
type: INGESTION_METHOD_IDS;
|
||||
}
|
||||
|
||||
export interface MethodCardOptions {
|
||||
buttonIcon: EuiIconProps['type'];
|
||||
description: string;
|
||||
footer: Record<string, string>;
|
||||
icon: EuiIconProps['type'];
|
||||
title: string;
|
||||
}
|
||||
|
||||
const METHOD_CARD_OPTIONS: Record<INGESTION_METHOD_IDS, MethodCardOptions> = {
|
||||
[INGESTION_METHOD_IDS.CRAWLER]: {
|
||||
buttonIcon: getIngestionMethodButtonIcon(INGESTION_METHOD_IDS.CRAWLER),
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.crawler.description',
|
||||
{
|
||||
defaultMessage:
|
||||
'Discover, extract, and index searchable content from websites and knowledge bases',
|
||||
}
|
||||
),
|
||||
footer: {
|
||||
buttonLabel: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.crawler.label',
|
||||
{
|
||||
defaultMessage: 'Crawl URL',
|
||||
}
|
||||
),
|
||||
label: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.crawler.nocodeLabel',
|
||||
{
|
||||
defaultMessage: 'No code',
|
||||
}
|
||||
),
|
||||
},
|
||||
icon: getIngestionMethodIconType(INGESTION_METHOD_IDS.CRAWLER),
|
||||
title: i18n.translate('xpack.enterpriseSearch.content.newIndex.methodCard.crawler.title', {
|
||||
defaultMessage: 'Web crawler',
|
||||
}),
|
||||
},
|
||||
[INGESTION_METHOD_IDS.CONNECTOR]: {
|
||||
buttonIcon: getIngestionMethodButtonIcon(INGESTION_METHOD_IDS.CONNECTOR),
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.connector.description',
|
||||
{
|
||||
defaultMessage: 'Extract, transform, index and sync data from a third-party data source',
|
||||
}
|
||||
),
|
||||
footer: {
|
||||
buttonLabel: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.connector.label',
|
||||
{
|
||||
defaultMessage: 'Choose a source connector',
|
||||
}
|
||||
),
|
||||
},
|
||||
icon: getIngestionMethodIconType(INGESTION_METHOD_IDS.CONNECTOR),
|
||||
title: i18n.translate('xpack.enterpriseSearch.content.newIndex.methodCard.connector.title', {
|
||||
defaultMessage: 'Connectors',
|
||||
}),
|
||||
},
|
||||
[INGESTION_METHOD_IDS.API]: {
|
||||
buttonIcon: getIngestionMethodButtonIcon(INGESTION_METHOD_IDS.API),
|
||||
description: i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.methodCard.api.description',
|
||||
{
|
||||
defaultMessage: 'Use the API to connect directly to your Elasticsearch index endpoint.',
|
||||
}
|
||||
),
|
||||
footer: {
|
||||
buttonLabel: i18n.translate('xpack.enterpriseSearch.content.newIndex.methodCard.api.label', {
|
||||
defaultMessage: 'Create API Index',
|
||||
}),
|
||||
},
|
||||
icon: getIngestionMethodIconType(INGESTION_METHOD_IDS.API),
|
||||
title: i18n.translate('xpack.enterpriseSearch.content.newIndex.methodCard.api.title', {
|
||||
defaultMessage: 'API',
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
export const NewIndexCard: React.FC<NewIndexCardProps> = ({ disabled, onSelect, type }) => {
|
||||
if (!METHOD_CARD_OPTIONS[type]) {
|
||||
return null;
|
||||
}
|
||||
const { buttonIcon, icon, title, description, footer } = METHOD_CARD_OPTIONS[type];
|
||||
|
||||
return (
|
||||
<IngestionCard
|
||||
isDisabled={disabled}
|
||||
data-test-subj="entSearch-content-newIndexCard-cardBody"
|
||||
logo={icon}
|
||||
buttonIcon={buttonIcon}
|
||||
buttonLabel={footer.buttonLabel}
|
||||
title={title}
|
||||
description={description}
|
||||
onClick={onSelect}
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -1,21 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { flashSuccessToast } from '../../../shared/flash_messages';
|
||||
|
||||
export function flashIndexCreatedToast(): void {
|
||||
flashSuccessToast(
|
||||
i18n.translate('xpack.enterpriseSearch.content.new_index.successToast.title', {
|
||||
defaultMessage: 'Index created successfully',
|
||||
}),
|
||||
{
|
||||
iconType: 'cheer',
|
||||
}
|
||||
);
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { Routes, Route } from '@kbn/shared-ux-router';
|
||||
|
||||
import { NEW_INDEX_PATH, NEW_API_PATH } from '../../routes';
|
||||
|
||||
import { NewIndex } from './new_index';
|
||||
import { NewSearchIndexPage } from './new_search_index_page';
|
||||
|
||||
export const NewIndexRouter: React.FC = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path={NEW_INDEX_PATH} exact>
|
||||
<NewIndex />
|
||||
</Route>
|
||||
<Route path={NEW_API_PATH} exact>
|
||||
<NewSearchIndexPage type="api" />
|
||||
</Route>
|
||||
</Routes>
|
||||
);
|
||||
};
|
|
@ -1,138 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LogicMounter } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { IndexExistsApiLogic } from '../../api/index/index_exists_api_logic';
|
||||
|
||||
import { UNIVERSAL_LANGUAGE_VALUE } from './constants';
|
||||
import { flashIndexCreatedToast } from './new_index_created_toast';
|
||||
import { NewSearchIndexLogic, NewSearchIndexValues } from './new_search_index_logic';
|
||||
|
||||
jest.mock('./new_index_created_toast', () => ({ flashIndexCreatedToast: jest.fn() }));
|
||||
jest.mock('../../../shared/kibana/kibana_logic', () => ({
|
||||
KibanaLogic: { values: { productAccess: { hasAppSearchAccess: true } } },
|
||||
}));
|
||||
|
||||
const DEFAULT_VALUES: NewSearchIndexValues = {
|
||||
data: undefined as any,
|
||||
fullIndexName: '',
|
||||
fullIndexNameExists: false,
|
||||
fullIndexNameIsValid: true,
|
||||
hasPrefix: false,
|
||||
language: null,
|
||||
languageSelectValue: UNIVERSAL_LANGUAGE_VALUE,
|
||||
rawName: '',
|
||||
};
|
||||
|
||||
describe('NewSearchIndexLogic', () => {
|
||||
const { mount } = new LogicMounter(NewSearchIndexLogic);
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mount();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
expect(NewSearchIndexLogic.values).toEqual(DEFAULT_VALUES);
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('setLanguageSelectValue', () => {
|
||||
it('sets language to the provided value', () => {
|
||||
NewSearchIndexLogic.actions.setLanguageSelectValue('en');
|
||||
expect(NewSearchIndexLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
language: 'en',
|
||||
languageSelectValue: 'en',
|
||||
});
|
||||
});
|
||||
|
||||
it('sets language to null when the universal language option is picked', () => {
|
||||
mount({
|
||||
language: 'en',
|
||||
languageSelectValue: 'en',
|
||||
});
|
||||
NewSearchIndexLogic.actions.setLanguageSelectValue(UNIVERSAL_LANGUAGE_VALUE);
|
||||
expect(NewSearchIndexLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
language: null,
|
||||
languageSelectValue: UNIVERSAL_LANGUAGE_VALUE,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setRawName', () => {
|
||||
it('sets correct values for valid index name', () => {
|
||||
NewSearchIndexLogic.actions.setRawName('rawname');
|
||||
expect(NewSearchIndexLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
fullIndexName: 'rawname',
|
||||
fullIndexNameIsValid: true,
|
||||
rawName: 'rawname',
|
||||
});
|
||||
});
|
||||
|
||||
it('sets correct values for invalid index name', () => {
|
||||
NewSearchIndexLogic.actions.setRawName('invalid/name');
|
||||
expect(NewSearchIndexLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
fullIndexName: 'invalid/name',
|
||||
fullIndexNameIsValid: false,
|
||||
rawName: 'invalid/name',
|
||||
});
|
||||
});
|
||||
it('calls makeRequest on whether API exists with a 150ms debounce', async () => {
|
||||
jest.useFakeTimers({ legacyFakeTimers: true });
|
||||
NewSearchIndexLogic.actions.makeRequest = jest.fn();
|
||||
NewSearchIndexLogic.actions.setRawName('indexname');
|
||||
await nextTick();
|
||||
jest.advanceTimersByTime(150);
|
||||
await nextTick();
|
||||
expect(NewSearchIndexLogic.actions.makeRequest).toHaveBeenCalledWith({
|
||||
indexName: 'indexname',
|
||||
});
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
describe('apiSuccess', () => {
|
||||
it('sets correct values for existing index', () => {
|
||||
NewSearchIndexLogic.actions.setRawName('indexname');
|
||||
IndexExistsApiLogic.actions.apiSuccess({ exists: true, indexName: 'indexname' });
|
||||
expect(NewSearchIndexLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
data: { exists: true, indexName: 'indexname' },
|
||||
fullIndexName: 'indexname',
|
||||
fullIndexNameExists: true,
|
||||
rawName: 'indexname',
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('apiIndexCreated', () => {
|
||||
it('calls flash index created toast', () => {
|
||||
NewSearchIndexLogic.actions.apiIndexCreated({ indexName: 'indexName' });
|
||||
expect(flashIndexCreatedToast).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
describe('connectorIndexCreated', () => {
|
||||
it('calls flash index created toast', () => {
|
||||
NewSearchIndexLogic.actions.connectorIndexCreated({
|
||||
id: 'connectorId',
|
||||
indexName: 'indexName',
|
||||
});
|
||||
expect(flashIndexCreatedToast).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
describe('crawlerIndexCreated', () => {
|
||||
it('calls flash index created toast', () => {
|
||||
NewSearchIndexLogic.actions.crawlerIndexCreated({ created: 'indexName' });
|
||||
expect(flashIndexCreatedToast).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,146 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Actions } from '../../../shared/api_logic/create_api_logic';
|
||||
import {
|
||||
AddConnectorApiLogic,
|
||||
AddConnectorApiLogicArgs,
|
||||
AddConnectorApiLogicResponse,
|
||||
} from '../../api/connector/add_connector_api_logic';
|
||||
import {
|
||||
CreateCrawlerIndexApiLogic,
|
||||
CreateCrawlerIndexArgs,
|
||||
CreateCrawlerIndexResponse,
|
||||
} from '../../api/crawler/create_crawler_index_api_logic';
|
||||
import {
|
||||
CreateApiIndexApiLogic,
|
||||
CreateApiIndexApiLogicArgs,
|
||||
CreateApiIndexApiLogicResponse,
|
||||
} from '../../api/index/create_api_index_api_logic';
|
||||
|
||||
import {
|
||||
IndexExistsApiLogic,
|
||||
IndexExistsApiParams,
|
||||
IndexExistsApiResponse,
|
||||
} from '../../api/index/index_exists_api_logic';
|
||||
|
||||
import { isValidIndexName } from '../../utils/validate_index_name';
|
||||
|
||||
import { UNIVERSAL_LANGUAGE_VALUE } from './constants';
|
||||
import { flashIndexCreatedToast } from './new_index_created_toast';
|
||||
import { LanguageForOptimization } from './types';
|
||||
import { getLanguageForOptimization } from './utils';
|
||||
|
||||
export interface NewSearchIndexValues {
|
||||
data: IndexExistsApiResponse;
|
||||
fullIndexName: string;
|
||||
fullIndexNameExists: boolean;
|
||||
fullIndexNameIsValid: boolean;
|
||||
hasPrefix: boolean;
|
||||
language: LanguageForOptimization;
|
||||
languageSelectValue: string;
|
||||
rawName: string;
|
||||
}
|
||||
|
||||
type NewSearchIndexActions = Pick<
|
||||
Actions<IndexExistsApiParams, IndexExistsApiResponse>,
|
||||
'makeRequest'
|
||||
> & {
|
||||
apiIndexCreated: Actions<
|
||||
CreateApiIndexApiLogicArgs,
|
||||
CreateApiIndexApiLogicResponse
|
||||
>['apiSuccess'];
|
||||
connectorIndexCreated: Actions<
|
||||
AddConnectorApiLogicArgs,
|
||||
AddConnectorApiLogicResponse
|
||||
>['apiSuccess'];
|
||||
crawlerIndexCreated: Actions<CreateCrawlerIndexArgs, CreateCrawlerIndexResponse>['apiSuccess'];
|
||||
setHasPrefix(hasPrefix: boolean): { hasPrefix: boolean };
|
||||
setLanguageSelectValue(language: string): { language: string };
|
||||
setRawName(rawName: string): { rawName: string };
|
||||
};
|
||||
|
||||
export const NewSearchIndexLogic = kea<MakeLogicType<NewSearchIndexValues, NewSearchIndexActions>>({
|
||||
actions: {
|
||||
setHasPrefix: (hasPrefix) => ({ hasPrefix }),
|
||||
setLanguageSelectValue: (language) => ({ language }),
|
||||
setRawName: (rawName) => ({ rawName }),
|
||||
},
|
||||
connect: {
|
||||
actions: [
|
||||
AddConnectorApiLogic,
|
||||
['apiSuccess as connectorIndexCreated'],
|
||||
CreateApiIndexApiLogic,
|
||||
['apiSuccess as apiIndexCreated'],
|
||||
CreateCrawlerIndexApiLogic,
|
||||
['apiSuccess as crawlerIndexCreated'],
|
||||
IndexExistsApiLogic,
|
||||
['makeRequest'],
|
||||
],
|
||||
values: [IndexExistsApiLogic, ['data']],
|
||||
},
|
||||
listeners: ({ actions, values }) => ({
|
||||
apiIndexCreated: () => {
|
||||
flashIndexCreatedToast();
|
||||
},
|
||||
connectorIndexCreated: () => {
|
||||
flashIndexCreatedToast();
|
||||
},
|
||||
crawlerIndexCreated: () => {
|
||||
flashIndexCreatedToast();
|
||||
},
|
||||
setRawName: async (_, breakpoint) => {
|
||||
await breakpoint(150);
|
||||
actions.makeRequest({ indexName: values.fullIndexName });
|
||||
},
|
||||
}),
|
||||
path: ['enterprise_search', 'content', 'new_search_index'],
|
||||
reducers: {
|
||||
hasPrefix: [
|
||||
false,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setHasPrefix: (_, { hasPrefix }) => hasPrefix,
|
||||
},
|
||||
],
|
||||
languageSelectValue: [
|
||||
UNIVERSAL_LANGUAGE_VALUE,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setLanguageSelectValue: (_, { language }) => language ?? null,
|
||||
},
|
||||
],
|
||||
rawName: [
|
||||
'',
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setRawName: (_, { rawName }) => rawName,
|
||||
},
|
||||
],
|
||||
},
|
||||
selectors: ({ selectors }) => ({
|
||||
fullIndexName: [
|
||||
() => [selectors.rawName, selectors.hasPrefix],
|
||||
(name: string, hasPrefix: boolean) => (hasPrefix ? `search-${name}` : name),
|
||||
],
|
||||
fullIndexNameExists: [
|
||||
() => [selectors.data, selectors.fullIndexName],
|
||||
(data: IndexExistsApiResponse | undefined, fullIndexName: string) =>
|
||||
data?.exists === true && data.indexName === fullIndexName,
|
||||
],
|
||||
fullIndexNameIsValid: [
|
||||
() => [selectors.fullIndexName],
|
||||
(fullIndexName) => isValidIndexName(fullIndexName),
|
||||
],
|
||||
language: [
|
||||
() => [selectors.languageSelectValue],
|
||||
(languageSelectValue) => getLanguageForOptimization(languageSelectValue),
|
||||
],
|
||||
}),
|
||||
});
|
|
@ -1,216 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import { useValues } from 'kea';
|
||||
|
||||
import { EuiBadge, EuiFlexGroup, EuiFlexItem, EuiIcon } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { ConnectorDefinition } from '@kbn/search-connectors';
|
||||
|
||||
import {
|
||||
CONNECTOR_CLIENTS_TYPE,
|
||||
CONNECTOR_NATIVE_TYPE,
|
||||
INGESTION_METHOD_IDS,
|
||||
} from '../../../../../common/constants';
|
||||
import { KibanaLogic } from '../../../shared/kibana';
|
||||
import { parseQueryParams } from '../../../shared/query_params';
|
||||
|
||||
import { connectorsBreadcrumbs, crawlersBreadcrumbs } from '../connectors/connectors';
|
||||
import { EnterpriseSearchContentPageTemplate } from '../layout/page_template';
|
||||
import { baseBreadcrumbs } from '../search_indices';
|
||||
|
||||
import { MethodApi } from './method_api/method_api';
|
||||
import { MethodConnector } from './method_connector/method_connector';
|
||||
import { MethodCrawler } from './method_crawler/method_crawler';
|
||||
import { getIngestionMethodIconType } from './utils';
|
||||
|
||||
function getTitle(
|
||||
method: string,
|
||||
serviceType: string,
|
||||
connectorTypes: ConnectorDefinition[]
|
||||
): string {
|
||||
switch (method) {
|
||||
case INGESTION_METHOD_IDS.API:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.apiTitle', {
|
||||
defaultMessage: 'New search index',
|
||||
});
|
||||
case INGESTION_METHOD_IDS.CONNECTOR: {
|
||||
const connector =
|
||||
Boolean(serviceType) && connectorTypes.find((item) => item.serviceType === serviceType);
|
||||
return connector
|
||||
? i18n.translate('xpack.enterpriseSearch.content.new_index.connectorTitleWithServiceType', {
|
||||
defaultMessage: 'New {name} connector',
|
||||
values: {
|
||||
name: connector.name,
|
||||
},
|
||||
})
|
||||
: i18n.translate('xpack.enterpriseSearch.content.new_index.connectorTitle', {
|
||||
defaultMessage: 'New connector search index',
|
||||
});
|
||||
}
|
||||
case INGESTION_METHOD_IDS.CRAWLER:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.crawlerTitle', {
|
||||
defaultMessage: 'Web crawler search index',
|
||||
});
|
||||
default:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.genericTitle', {
|
||||
defaultMessage: 'New search index',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getDescription(method: string): string {
|
||||
switch (method) {
|
||||
case INGESTION_METHOD_IDS.API:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.apiDescription', {
|
||||
defaultMessage:
|
||||
'Use the API to programatically add documents to an Elasticsearch index. Start by creating your index.',
|
||||
});
|
||||
case INGESTION_METHOD_IDS.CONNECTOR: {
|
||||
return i18n.translate(
|
||||
'xpack.enterpriseSearch.content.new_index.connectorDescriptionWithServiceType',
|
||||
{
|
||||
defaultMessage:
|
||||
'Use a connector to sync, extract, transform and index data from your data source. Connectors are Elastic integrations that write directly to Elasticsearch indices.',
|
||||
}
|
||||
);
|
||||
}
|
||||
case INGESTION_METHOD_IDS.CRAWLER:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.crawlerDescription', {
|
||||
defaultMessage:
|
||||
'Use the web crawler to programmatically discover, extract, and index searchable content from websites and knowledge bases.',
|
||||
});
|
||||
default:
|
||||
return i18n.translate('xpack.enterpriseSearch.content.new_index.defaultDescription', {
|
||||
defaultMessage: 'A search index stores your data.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const parseIsNativeParam = (queryString: string | string[] | null): boolean | undefined => {
|
||||
const parsedStr = Array.isArray(queryString) ? queryString[0] : queryString;
|
||||
if (parsedStr === CONNECTOR_NATIVE_TYPE) return true;
|
||||
if (parsedStr === CONNECTOR_CLIENTS_TYPE) return false;
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const getBreadcrumb = (
|
||||
method: string,
|
||||
serviceType: string,
|
||||
connectorTypes: ConnectorDefinition[]
|
||||
): string[] => {
|
||||
switch (method) {
|
||||
case INGESTION_METHOD_IDS.CONNECTOR:
|
||||
const connector =
|
||||
Boolean(serviceType) && connectorTypes.find((item) => item.serviceType === serviceType);
|
||||
|
||||
const thisConnectorBreadcrumb = connector
|
||||
? i18n.translate(
|
||||
'xpack.enterpriseSearch.content.new_connector_with_service_type.breadcrumbs',
|
||||
{
|
||||
defaultMessage: `New {name} connector`,
|
||||
values: {
|
||||
name: connector.name,
|
||||
},
|
||||
}
|
||||
)
|
||||
: i18n.translate('xpack.enterpriseSearch.content.new_connector.breadcrumbs', {
|
||||
defaultMessage: `New connector`,
|
||||
});
|
||||
|
||||
return [...connectorsBreadcrumbs, thisConnectorBreadcrumb];
|
||||
case INGESTION_METHOD_IDS.CRAWLER:
|
||||
return [
|
||||
...crawlersBreadcrumbs,
|
||||
i18n.translate('xpack.enterpriseSearch.content.new_web_crawler.breadcrumbs', {
|
||||
defaultMessage: 'New web crawler',
|
||||
}),
|
||||
];
|
||||
default:
|
||||
return [
|
||||
...baseBreadcrumbs,
|
||||
i18n.translate('xpack.enterpriseSearch.content.new_index.breadcrumbs', {
|
||||
defaultMessage: 'New search index',
|
||||
}),
|
||||
];
|
||||
}
|
||||
};
|
||||
|
||||
const getConnectorModeBadge = (isNative?: boolean) => {
|
||||
if (isNative) {
|
||||
return (
|
||||
<EuiBadge iconSide="right">
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.getConnectorTypeBadge.nativeBadgeLabel"
|
||||
defaultMessage="Elastic managed connector"
|
||||
/>
|
||||
</EuiBadge>
|
||||
);
|
||||
}
|
||||
if (!isNative) {
|
||||
return (
|
||||
<EuiBadge iconSide="right">
|
||||
{i18n.translate('xpack.enterpriseSearch.getConnectorTypeBadge.connectorClientBadgeLabel', {
|
||||
defaultMessage: 'Self-managed',
|
||||
})}
|
||||
</EuiBadge>
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
export interface NewSearchIndexPageProps {
|
||||
type: string;
|
||||
}
|
||||
export const NewSearchIndexPage: React.FC<NewSearchIndexPageProps> = ({ type }) => {
|
||||
const { connectorTypes } = useValues(KibanaLogic);
|
||||
const { search } = useLocation();
|
||||
const { service_type: inputServiceType, connector_type: inputConnectorType } =
|
||||
parseQueryParams(search);
|
||||
const serviceType = Array.isArray(inputServiceType)
|
||||
? inputServiceType[0]
|
||||
: inputServiceType || '';
|
||||
|
||||
const isNative = parseIsNativeParam(inputConnectorType);
|
||||
|
||||
return (
|
||||
<EnterpriseSearchContentPageTemplate
|
||||
pageChrome={getBreadcrumb(type, serviceType, connectorTypes)}
|
||||
pageViewTelemetry="New Index"
|
||||
isLoading={false}
|
||||
pageHeader={{
|
||||
description: getDescription(type),
|
||||
pageTitle: (
|
||||
<EuiFlexGroup alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type={getIngestionMethodIconType(type)} size="xxl" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>{getTitle(type, serviceType, connectorTypes)}</EuiFlexItem>
|
||||
{type === INGESTION_METHOD_IDS.CONNECTOR && (
|
||||
<EuiFlexItem grow={false}>{getConnectorModeBadge(isNative)}</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
),
|
||||
}}
|
||||
>
|
||||
{
|
||||
<>
|
||||
{type === INGESTION_METHOD_IDS.CRAWLER && <MethodCrawler />}
|
||||
{type === INGESTION_METHOD_IDS.API && <MethodApi />}
|
||||
{type === INGESTION_METHOD_IDS.CONNECTOR && (
|
||||
<MethodConnector serviceType={serviceType} isNative={isNative} />
|
||||
)}
|
||||
</>
|
||||
}
|
||||
</EnterpriseSearchContentPageTemplate>
|
||||
);
|
||||
};
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockValues, setMockActions } from '../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { UNIVERSAL_LANGUAGE_VALUE } from './constants';
|
||||
import {
|
||||
NewSearchIndexTemplate,
|
||||
Props as NewSearchIndexTemplateProps,
|
||||
} from './new_search_index_template';
|
||||
|
||||
describe('NewSearchIndexTemplate', () => {
|
||||
const mockProps: NewSearchIndexTemplateProps = {
|
||||
onSubmit: jest.fn(),
|
||||
type: 'api',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues({
|
||||
language: null,
|
||||
languageSelectValue: UNIVERSAL_LANGUAGE_VALUE,
|
||||
name: 'my-name',
|
||||
rawName: 'MY$_RAW_$NAME',
|
||||
});
|
||||
setMockActions({
|
||||
makeRequest: jest.fn(),
|
||||
setHasPrefix: jest.fn(),
|
||||
setLanguageSelectValue: jest.fn(),
|
||||
});
|
||||
});
|
||||
|
||||
it('renders', () => {
|
||||
const wrapper = shallow(<NewSearchIndexTemplate {...mockProps} />);
|
||||
|
||||
expect(wrapper.find('EuiForm')).toHaveLength(1);
|
||||
});
|
||||
});
|
|
@ -1,365 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { ChangeEvent } from 'react';
|
||||
|
||||
import { css } from '@emotion/react';
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiFieldText,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiForm,
|
||||
EuiFormRow,
|
||||
EuiLink,
|
||||
EuiSelect,
|
||||
EuiSpacer,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { INGESTION_METHOD_IDS } from '../../../../../common/constants';
|
||||
|
||||
import { BetaConnectorCallout } from '../../../shared/beta/beta_connector_callout';
|
||||
|
||||
import { BACK_BUTTON_LABEL } from '../../../shared/constants';
|
||||
import { docLinks } from '../../../shared/doc_links';
|
||||
|
||||
import { SUPPORTED_LANGUAGES } from './constants';
|
||||
import { NewSearchIndexLogic } from './new_search_index_logic';
|
||||
import { LanguageForOptimization } from './types';
|
||||
|
||||
export interface Props {
|
||||
buttonLoading?: boolean;
|
||||
disabled?: boolean;
|
||||
docsUrl?: string;
|
||||
error?: string | React.ReactNode;
|
||||
isBeta?: boolean;
|
||||
onNameChange?(name: string): void;
|
||||
onSubmit(name: string, language: LanguageForOptimization): void;
|
||||
type: string;
|
||||
}
|
||||
|
||||
export const NewSearchIndexTemplate: React.FC<Props> = ({
|
||||
buttonLoading,
|
||||
disabled,
|
||||
error,
|
||||
onNameChange,
|
||||
onSubmit,
|
||||
type,
|
||||
isBeta,
|
||||
}) => {
|
||||
const {
|
||||
fullIndexName,
|
||||
fullIndexNameExists,
|
||||
fullIndexNameIsValid,
|
||||
hasPrefix,
|
||||
language,
|
||||
rawName,
|
||||
languageSelectValue,
|
||||
} = useValues(NewSearchIndexLogic);
|
||||
const { setRawName, setLanguageSelectValue, setHasPrefix } = useActions(NewSearchIndexLogic);
|
||||
setHasPrefix(type === INGESTION_METHOD_IDS.CRAWLER);
|
||||
|
||||
const handleNameChange = (e: ChangeEvent<HTMLInputElement>) => {
|
||||
setRawName(e.target.value);
|
||||
if (onNameChange) {
|
||||
onNameChange(fullIndexName);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLanguageChange = (e: ChangeEvent<HTMLSelectElement>) => {
|
||||
setLanguageSelectValue(e.target.value);
|
||||
};
|
||||
|
||||
const formInvalid = !!error || fullIndexNameExists || !fullIndexNameIsValid;
|
||||
|
||||
const formError = () => {
|
||||
if (fullIndexNameExists) {
|
||||
return i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.alreadyExists.error',
|
||||
{
|
||||
defaultMessage: 'An index with the name {indexName} already exists',
|
||||
values: {
|
||||
indexName: fullIndexName,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
if (!fullIndexNameIsValid) {
|
||||
return i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.isInvalid.error',
|
||||
{
|
||||
defaultMessage: '{indexName} is an invalid index name',
|
||||
values: {
|
||||
indexName: fullIndexName,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
return error;
|
||||
};
|
||||
const searchHelpTest = i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineOne',
|
||||
{
|
||||
defaultMessage: 'Your index will be named: {indexName}',
|
||||
values: {
|
||||
indexName: fullIndexName,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiForm
|
||||
component="form"
|
||||
id="enterprise-search-create-index"
|
||||
onSubmit={(event) => {
|
||||
event.preventDefault();
|
||||
onSubmit(fullIndexName, language);
|
||||
}}
|
||||
>
|
||||
<EuiFlexGroup direction="column">
|
||||
{isBeta ? (
|
||||
<EuiFlexItem>
|
||||
<BetaConnectorCallout />
|
||||
</EuiFlexItem>
|
||||
) : null}
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="s">
|
||||
<h3>
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formTitle"
|
||||
defaultMessage="Create an Elasticsearch index"
|
||||
/>
|
||||
</h3>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiText size="m">
|
||||
<p>
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription"
|
||||
defaultMessage="This index will hold your data source content, and is optimized with default field
|
||||
mappings for relevant search experiences. Give your index a unique name and
|
||||
optionally set a default {language_analyzer} for the index."
|
||||
values={{
|
||||
language_analyzer: (
|
||||
<EuiLink
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateLanguageAnalyzerLink"
|
||||
target="_blank"
|
||||
href={docLinks.languageAnalyzers}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.formDescription.linkText',
|
||||
{
|
||||
defaultMessage: 'language analyzer',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
</p>
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow>
|
||||
<EuiFormRow
|
||||
isDisabled={disabled || buttonLoading}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputLabel',
|
||||
{
|
||||
defaultMessage: 'Index name',
|
||||
}
|
||||
)}
|
||||
isInvalid={formInvalid}
|
||||
error={
|
||||
<EuiText
|
||||
size="xs"
|
||||
css={css`
|
||||
line-break: anywhere;
|
||||
`}
|
||||
>
|
||||
{formError()}
|
||||
</EuiText>
|
||||
}
|
||||
helpText={
|
||||
<EuiText
|
||||
size="xs"
|
||||
css={css`
|
||||
line-break: anywhere;
|
||||
`}
|
||||
>
|
||||
{searchHelpTest}
|
||||
</EuiText>
|
||||
}
|
||||
fullWidth
|
||||
>
|
||||
<EuiFieldText
|
||||
data-test-subj={`entSearchContent-${type}-newIndex-editName`}
|
||||
data-telemetry-id={`entSearchContent-${type}-newIndex-editName`}
|
||||
placeholder={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputPlaceholder',
|
||||
{
|
||||
defaultMessage: 'Set a name for your index',
|
||||
}
|
||||
)}
|
||||
fullWidth
|
||||
disabled={disabled}
|
||||
isInvalid={false}
|
||||
value={rawName}
|
||||
onChange={handleNameChange}
|
||||
autoFocus
|
||||
prepend={hasPrefix ? 'search-' : undefined}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.nameInputHelpText.lineTwo',
|
||||
{
|
||||
defaultMessage:
|
||||
'Names should be lowercase and cannot contain spaces or special characters.',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiFormRow
|
||||
isDisabled={disabled}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputLabel',
|
||||
{
|
||||
defaultMessage: 'Language analyzer',
|
||||
}
|
||||
)}
|
||||
helpText={i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.languageInputHelpText',
|
||||
{
|
||||
defaultMessage: 'Language can be changed later, but may require a reindex',
|
||||
}
|
||||
)}
|
||||
>
|
||||
<EuiSelect
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateSelect"
|
||||
data-telemetry-id={`entSearchContent-${type}-newIndex-languageAnalyzer`}
|
||||
disabled={disabled}
|
||||
options={SUPPORTED_LANGUAGES}
|
||||
onChange={handleLanguageChange}
|
||||
value={languageSelectValue}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer />
|
||||
<EuiFlexGroup direction="column" gutterSize="xs">
|
||||
<EuiFlexItem>
|
||||
<EuiLink
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateLearnMoreAboutIndicesLink"
|
||||
target="_blank"
|
||||
href={docLinks.elasticsearchGettingStarted}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreIndices.linkText',
|
||||
{
|
||||
defaultMessage: 'Learn more about indices',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiFlexItem>
|
||||
|
||||
{type === INGESTION_METHOD_IDS.CONNECTOR && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLink
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateLearnMoreAboutConnectorsLink"
|
||||
target="_blank"
|
||||
href={docLinks.connectors}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreConnectors.linkText',
|
||||
{
|
||||
defaultMessage: 'Learn more about connectors',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{type === INGESTION_METHOD_IDS.CRAWLER && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLink
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateLearnMoreAboutTheElasticWebCrawlerLink"
|
||||
target="_blank"
|
||||
href={docLinks.crawlerOverview}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreCrawler.linkText',
|
||||
{
|
||||
defaultMessage: 'Learn more about the Elastic Web Crawler',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{type === INGESTION_METHOD_IDS.API && (
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLink
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateLearnMoreAboutIngestionApIsLink"
|
||||
target="_blank"
|
||||
href={docLinks.ingestionApis}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.learnMoreApis.linkText',
|
||||
{
|
||||
defaultMessage: 'Learn more about ingestion APIs',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer />
|
||||
<EuiFlexGroup direction="row" alignItems="center" justifyContent="spaceBetween">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
data-test-subj="enterpriseSearchNewSearchIndexTemplateButton"
|
||||
data-telemetry-id={`entSearchContent-${type}-newIndex-goBack`}
|
||||
isDisabled={buttonLoading}
|
||||
onClick={() => history.back()}
|
||||
>
|
||||
{BACK_BUTTON_LABEL}
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
data-test-subj={`entSearchContent-${type}-newIndex-createIndex`}
|
||||
data-telemetry-id={`entSearchContent-${type}-newIndex-createIndex`}
|
||||
fill
|
||||
isDisabled={!rawName || buttonLoading || formInvalid || disabled}
|
||||
isLoading={buttonLoading}
|
||||
type="submit"
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.content.newIndex.newSearchIndexTemplate.createIndex.buttonText',
|
||||
{
|
||||
defaultMessage: 'Create index',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiForm>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export type LanguageForOptimization = string | null;
|
|
@ -1,19 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { UNIVERSAL_LANGUAGE_VALUE } from './constants';
|
||||
import { getLanguageForOptimization } from './utils';
|
||||
|
||||
describe('getLanguageForOptimizatioin', () => {
|
||||
it('returns null for the universal language option', () => {
|
||||
expect(getLanguageForOptimization(UNIVERSAL_LANGUAGE_VALUE)).toEqual(null);
|
||||
});
|
||||
|
||||
it('returns the language code for non-Universal languageoptions', () => {
|
||||
expect(getLanguageForOptimization('zh')).toEqual('zh');
|
||||
});
|
||||
});
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { INGESTION_METHOD_IDS } from '../../../../../common/constants';
|
||||
|
||||
import apiLogo from '../../../../assets/images/api_image.png';
|
||||
import connectorLogo from '../../../../assets/images/search_connector.svg';
|
||||
import crawlerLogo from '../../../../assets/images/search_crawler.svg';
|
||||
import { ConnectorIcon } from '../../../shared/icons/connector';
|
||||
import { CrawlerIcon } from '../../../shared/icons/crawler';
|
||||
|
||||
import { UNIVERSAL_LANGUAGE_VALUE } from './constants';
|
||||
import { LanguageForOptimization } from './types';
|
||||
|
||||
// Enterprise Search expects the Universal language option to be represented by null
|
||||
// but we can't use null as the value for an EuiSelectOption
|
||||
export const getLanguageForOptimization = (language: string): LanguageForOptimization =>
|
||||
language === UNIVERSAL_LANGUAGE_VALUE ? null : language;
|
||||
|
||||
export function getIngestionMethodIconType(type: string): string {
|
||||
switch (type) {
|
||||
case INGESTION_METHOD_IDS.CRAWLER:
|
||||
return crawlerLogo;
|
||||
case INGESTION_METHOD_IDS.CONNECTOR:
|
||||
return connectorLogo;
|
||||
default:
|
||||
return apiLogo;
|
||||
}
|
||||
}
|
||||
|
||||
export function getIngestionMethodButtonIcon(type: string): React.FC | string {
|
||||
switch (type) {
|
||||
case INGESTION_METHOD_IDS.CRAWLER:
|
||||
return CrawlerIcon;
|
||||
case INGESTION_METHOD_IDS.CONNECTOR:
|
||||
return ConnectorIcon;
|
||||
default:
|
||||
return 'console';
|
||||
}
|
||||
}
|
|
@ -20,7 +20,7 @@ import {
|
|||
CachedFetchIndexApiLogicActions,
|
||||
} from '../../../../api/index/cached_fetch_index_api_logic';
|
||||
import { FetchIndexApiResponse } from '../../../../api/index/fetch_index_api_logic';
|
||||
import { isConnectorIndex, isCrawlerIndex } from '../../../../utils/indices';
|
||||
import { isConnectorIndex } from '../../../../utils/indices';
|
||||
|
||||
type NameAndDescription = Partial<Pick<Connector, 'name' | 'description'>>;
|
||||
|
||||
|
@ -66,9 +66,7 @@ export const ConnectorNameAndDescriptionLogic = kea<
|
|||
},
|
||||
events: ({ actions, values }) => ({
|
||||
afterMount: () =>
|
||||
actions.setNameAndDescription(
|
||||
isConnectorIndex(values.index) || isCrawlerIndex(values.index) ? values.index.connector : {}
|
||||
),
|
||||
actions.setNameAndDescription(isConnectorIndex(values.index) ? values.index.connector : {}),
|
||||
}),
|
||||
listeners: ({ actions, values }) => ({
|
||||
fetchIndexApiSuccess: (index) => {
|
||||
|
@ -77,7 +75,7 @@ export const ConnectorNameAndDescriptionLogic = kea<
|
|||
}
|
||||
},
|
||||
saveNameAndDescription: () => {
|
||||
if (isConnectorIndex(values.index) || isCrawlerIndex(values.index)) {
|
||||
if (isConnectorIndex(values.index)) {
|
||||
actions.makeRequest({
|
||||
connectorId: values.index.connector.id,
|
||||
...values.localNameAndDescription,
|
||||
|
|
|
@ -1,252 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
import {
|
||||
EuiCheckableCard,
|
||||
EuiFieldNumber,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiHorizontalRule,
|
||||
EuiLink,
|
||||
EuiSelect,
|
||||
EuiSpacer,
|
||||
EuiSplitPanel,
|
||||
EuiSwitch,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { EnterpriseSearchCronEditor } from '@kbn/search-connectors/components/cron_editor';
|
||||
|
||||
import {
|
||||
HOURS_UNIT_LABEL,
|
||||
DAYS_UNIT_LABEL,
|
||||
WEEKS_UNIT_LABEL,
|
||||
MONTHS_UNIT_LABEL,
|
||||
} from '../../../../../shared/constants';
|
||||
|
||||
import { docLinks } from '../../../../../shared/doc_links/doc_links';
|
||||
import { CrawlUnits } from '../../../../api/crawler/types';
|
||||
import { isCrawlerIndex } from '../../../../utils/indices';
|
||||
|
||||
import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
|
||||
|
||||
export const AutomaticCrawlScheduler: React.FC = () => {
|
||||
const {
|
||||
setCrawlAutomatically,
|
||||
setCrawlFrequency,
|
||||
setCrawlUnit,
|
||||
setUseConnectorSchedule,
|
||||
submitConnectorSchedule,
|
||||
} = useActions(AutomaticCrawlSchedulerLogic);
|
||||
|
||||
const { index, crawlAutomatically, crawlFrequency, crawlUnit, useConnectorSchedule } = useValues(
|
||||
AutomaticCrawlSchedulerLogic
|
||||
);
|
||||
|
||||
if (!isCrawlerIndex(index)) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSpacer />
|
||||
<EuiTitle size="s">
|
||||
<h2>
|
||||
{i18n.translate('xpack.enterpriseSearch.automaticCrawlSchedule.title', {
|
||||
defaultMessage: 'Crawl frequency',
|
||||
})}
|
||||
</h2>
|
||||
</EuiTitle>
|
||||
<EuiSpacer />
|
||||
<EuiSplitPanel.Outer grow>
|
||||
<EuiSplitPanel.Inner grow={false} color="subdued">
|
||||
<EuiFormRow display="rowCompressed">
|
||||
<EuiSwitch
|
||||
data-telemetry-id="entSearchContent-crawler-scheduleCrawl-crawlAutomatically"
|
||||
autoFocus
|
||||
checked={crawlAutomatically}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel',
|
||||
{
|
||||
defaultMessage: 'Enable recurring crawls with the following schedule',
|
||||
}
|
||||
)}
|
||||
onChange={(e) => setCrawlAutomatically(e.target.checked)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiSplitPanel.Inner>
|
||||
<EuiSplitPanel.Inner>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem>
|
||||
<EuiCheckableCard
|
||||
id="specificTimeSchedulingCard"
|
||||
name="scheduling-card"
|
||||
label={
|
||||
<>
|
||||
<EuiTitle size="xxs">
|
||||
<h5>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingTitle',
|
||||
{
|
||||
defaultMessage: 'Specific time scheduling',
|
||||
}
|
||||
)}
|
||||
</h5>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.cronSchedulingDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'Define the frequency and time for scheduled crawls. The crawler uses UTC as its timezone.',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
<EuiHorizontalRule margin="s" />
|
||||
</>
|
||||
}
|
||||
checked={crawlAutomatically && useConnectorSchedule}
|
||||
disabled={!crawlAutomatically}
|
||||
onChange={() => setUseConnectorSchedule(true)}
|
||||
>
|
||||
<EnterpriseSearchCronEditor
|
||||
disabled={!crawlAutomatically || !useConnectorSchedule}
|
||||
scheduling={index.connector.scheduling.full}
|
||||
onChange={(newScheduling) =>
|
||||
submitConnectorSchedule({
|
||||
...newScheduling,
|
||||
enabled: true,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</EuiCheckableCard>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiCheckableCard
|
||||
id="intervalSchedulingCard"
|
||||
name="scheduling-card"
|
||||
label={
|
||||
<>
|
||||
<EuiTitle size="xxs">
|
||||
<h5>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingTitle',
|
||||
{
|
||||
defaultMessage: 'Interval scheduling',
|
||||
}
|
||||
)}
|
||||
</h5>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.intervalSchedulingDescription',
|
||||
{
|
||||
defaultMessage: 'Define the frequency for scheduled crawls',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
<EuiHorizontalRule margin="s" />
|
||||
</>
|
||||
}
|
||||
checked={crawlAutomatically && !useConnectorSchedule}
|
||||
disabled={!crawlAutomatically}
|
||||
onChange={() => setUseConnectorSchedule(false)}
|
||||
>
|
||||
<EuiFormRow display="rowCompressed" label="Frequency" fullWidth>
|
||||
<EuiFlexGroup direction="row" gutterSize="s" alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiFieldNumber
|
||||
data-telemetry-id="entSearchContent-crawler-scheduleCrawl-crawlAutomatically-scheduleFrequency"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel',
|
||||
{
|
||||
defaultMessage: 'Schedule frequency',
|
||||
}
|
||||
)}
|
||||
disabled={!crawlAutomatically || useConnectorSchedule}
|
||||
min={0}
|
||||
max={99}
|
||||
compressed
|
||||
value={crawlFrequency}
|
||||
onChange={(e) => setCrawlFrequency(parseInt(e.target.value, 10))}
|
||||
prepend={'Every'}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiSelect
|
||||
data-telemetry-id="entSearchContent-crawler-scheduleCrawl-crawlAutomatically-scheduleUnits"
|
||||
aria-label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel',
|
||||
{
|
||||
defaultMessage: 'Schedule units of time',
|
||||
}
|
||||
)}
|
||||
disabled={!crawlAutomatically || useConnectorSchedule}
|
||||
fullWidth
|
||||
compressed
|
||||
options={[
|
||||
{
|
||||
text: HOURS_UNIT_LABEL,
|
||||
value: CrawlUnits.hours,
|
||||
},
|
||||
{
|
||||
text: DAYS_UNIT_LABEL,
|
||||
value: CrawlUnits.days,
|
||||
},
|
||||
{
|
||||
text: WEEKS_UNIT_LABEL,
|
||||
value: CrawlUnits.weeks,
|
||||
},
|
||||
{
|
||||
text: MONTHS_UNIT_LABEL,
|
||||
value: CrawlUnits.months,
|
||||
},
|
||||
]}
|
||||
value={crawlUnit}
|
||||
onChange={(e) => setCrawlUnit(e.target.value as CrawlUnits)}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFormRow>
|
||||
</EuiCheckableCard>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer />
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'The crawl schedule will perform a full crawl on every domain on this index.',
|
||||
}
|
||||
)}
|
||||
<EuiSpacer size="s" />
|
||||
<EuiLink href={docLinks.crawlerManaging} target="_blank" external>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink',
|
||||
{
|
||||
defaultMessage: 'Learn more about scheduling',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiText>
|
||||
</EuiSplitPanel.Inner>
|
||||
</EuiSplitPanel.Outer>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,245 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { ConnectorScheduling } from '@kbn/search-connectors';
|
||||
|
||||
import { CrawlerIndex } from '../../../../../../../common/types/indices';
|
||||
import { Actions } from '../../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import {
|
||||
UpdateConnectorSchedulingApiLogic,
|
||||
UpdateConnectorSchedulingArgs,
|
||||
} from '../../../../api/connector/update_connector_scheduling_api_logic';
|
||||
import { CrawlSchedule, CrawlScheduleFromServer, CrawlUnits } from '../../../../api/crawler/types';
|
||||
import { crawlScheduleServerToClient } from '../../../../api/crawler/utils';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
import { IndexViewLogic } from '../../index_view_logic';
|
||||
|
||||
export interface AutomaticCrawlSchedulerLogicValues {
|
||||
crawlAutomatically: boolean;
|
||||
crawlFrequency: CrawlSchedule['frequency'];
|
||||
crawlUnit: CrawlSchedule['unit'];
|
||||
index: CrawlerIndex;
|
||||
isSubmitting: boolean;
|
||||
useConnectorSchedule: CrawlSchedule['useConnectorSchedule'];
|
||||
}
|
||||
|
||||
export const DEFAULT_VALUES: Pick<
|
||||
AutomaticCrawlSchedulerLogicValues,
|
||||
'crawlFrequency' | 'crawlUnit'
|
||||
> = {
|
||||
crawlFrequency: 24,
|
||||
crawlUnit: CrawlUnits.hours,
|
||||
};
|
||||
|
||||
export interface AutomaticCrawlSchedulerLogicActions {
|
||||
clearCrawlSchedule(): void;
|
||||
deleteCrawlSchedule(): void;
|
||||
disableCrawlAutomatically(): void;
|
||||
onDoneSubmitting(): void;
|
||||
enableCrawlAutomatically(): void;
|
||||
fetchCrawlSchedule(): void;
|
||||
makeUpdateConnectorSchedulingRequest: Actions<{}, UpdateConnectorSchedulingArgs>['makeRequest'];
|
||||
saveChanges(): void;
|
||||
setCrawlAutomatically(crawlAutomatically: boolean): { crawlAutomatically: boolean };
|
||||
setCrawlFrequency(crawlFrequency: CrawlSchedule['frequency']): {
|
||||
crawlFrequency: CrawlSchedule['frequency'];
|
||||
};
|
||||
setCrawlSchedule(crawlSchedule: CrawlSchedule): { crawlSchedule: CrawlSchedule };
|
||||
setCrawlUnit(crawlUnit: CrawlSchedule['unit']): { crawlUnit: CrawlSchedule['unit'] };
|
||||
setUseConnectorSchedule(useConnectorSchedule: CrawlSchedule['useConnectorSchedule']): {
|
||||
useConnectorSchedule: CrawlSchedule['useConnectorSchedule'];
|
||||
};
|
||||
submitConnectorSchedule(scheduling: ConnectorScheduling): { scheduling: ConnectorScheduling };
|
||||
submitCrawlSchedule(): void;
|
||||
updateConnectorSchedulingApiError: Actions<{}, UpdateConnectorSchedulingArgs>['apiError'];
|
||||
}
|
||||
|
||||
export const AutomaticCrawlSchedulerLogic = kea<
|
||||
MakeLogicType<AutomaticCrawlSchedulerLogicValues, AutomaticCrawlSchedulerLogicActions>
|
||||
>({
|
||||
path: ['enterprise_search', 'crawler', 'automatic_crawl_scheduler_logic'],
|
||||
connect: {
|
||||
actions: [
|
||||
UpdateConnectorSchedulingApiLogic,
|
||||
[
|
||||
'makeRequest as makeUpdateConnectorSchedulingRequest',
|
||||
'apiError as updateConnectorSchedulingApiError',
|
||||
],
|
||||
],
|
||||
values: [IndexViewLogic, ['index']],
|
||||
},
|
||||
actions: () => ({
|
||||
clearCrawlSchedule: true,
|
||||
deleteCrawlSchedule: true,
|
||||
disableCrawlAutomatically: true,
|
||||
onDoneSubmitting: true,
|
||||
enableCrawlAutomatically: true,
|
||||
fetchCrawlSchedule: true,
|
||||
saveChanges: true,
|
||||
setCrawlSchedule: (crawlSchedule: CrawlSchedule) => ({ crawlSchedule }),
|
||||
submitConnectorSchedule: (scheduling) => ({ scheduling }),
|
||||
submitCrawlSchedule: true,
|
||||
setCrawlAutomatically: (crawlAutomatically) => ({ crawlAutomatically }),
|
||||
setCrawlFrequency: (crawlFrequency: string) => ({ crawlFrequency }),
|
||||
setCrawlUnit: (crawlUnit: CrawlUnits) => ({ crawlUnit }),
|
||||
setUseConnectorSchedule: (useConnectorSchedule) => ({ useConnectorSchedule }),
|
||||
}),
|
||||
reducers: () => ({
|
||||
crawlAutomatically: [
|
||||
false,
|
||||
{
|
||||
clearCrawlSchedule: () => false,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlAutomatically: (_, { crawlAutomatically }) => crawlAutomatically,
|
||||
setCrawlSchedule: () => true,
|
||||
},
|
||||
],
|
||||
crawlFrequency: [
|
||||
DEFAULT_VALUES.crawlFrequency,
|
||||
{
|
||||
clearCrawlSchedule: () => DEFAULT_VALUES.crawlFrequency,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlSchedule: (_, { crawlSchedule: { frequency } }) => frequency,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlFrequency: (_, { crawlFrequency }) => crawlFrequency,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setUseConnectorSchedule: (crawlFrequency) =>
|
||||
crawlFrequency || DEFAULT_VALUES.crawlFrequency,
|
||||
},
|
||||
],
|
||||
crawlUnit: [
|
||||
DEFAULT_VALUES.crawlUnit,
|
||||
{
|
||||
clearCrawlSchedule: () => DEFAULT_VALUES.crawlUnit,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlSchedule: (_, { crawlSchedule: { unit } }) => unit,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlUnit: (_, { crawlUnit }) => crawlUnit,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setUseConnectorSchedule: (crawlUnit) => crawlUnit || DEFAULT_VALUES.crawlUnit,
|
||||
},
|
||||
],
|
||||
isSubmitting: [
|
||||
false,
|
||||
{
|
||||
deleteCrawlSchedule: () => true,
|
||||
onDoneSubmitting: () => false,
|
||||
submitCrawlSchedule: () => true,
|
||||
},
|
||||
],
|
||||
useConnectorSchedule: [
|
||||
false,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlAutomatically: (useConnectorSchedule, { crawlAutomatically }) =>
|
||||
crawlAutomatically || useConnectorSchedule,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setCrawlSchedule: (_, { crawlSchedule: { useConnectorSchedule = false } }) =>
|
||||
useConnectorSchedule,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setUseConnectorSchedule: (_, { useConnectorSchedule }) => useConnectorSchedule,
|
||||
},
|
||||
],
|
||||
}),
|
||||
listeners: ({ actions, values }) => ({
|
||||
deleteCrawlSchedule: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
try {
|
||||
await http.delete(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`
|
||||
);
|
||||
} catch (e) {
|
||||
// A 404 is expected and means the user has no crawl schedule to delete
|
||||
if (e.response?.status !== 404) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
} finally {
|
||||
actions.onDoneSubmitting();
|
||||
}
|
||||
},
|
||||
fetchCrawlSchedule: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
try {
|
||||
const crawlSchedule: CrawlScheduleFromServer = await http.get(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`
|
||||
);
|
||||
actions.setCrawlSchedule(crawlScheduleServerToClient(crawlSchedule));
|
||||
} catch (e) {
|
||||
// A 404 is expected and means the user does not have crawl schedule
|
||||
// for this index. We continue to use the defaults.
|
||||
if (e.response?.status !== 404) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
}
|
||||
},
|
||||
saveChanges: () => {
|
||||
if (values.crawlAutomatically) {
|
||||
actions.submitCrawlSchedule();
|
||||
} else {
|
||||
actions.deleteCrawlSchedule();
|
||||
}
|
||||
actions.submitConnectorSchedule({
|
||||
...values.index.connector.scheduling.full,
|
||||
enabled: values.crawlAutomatically && values.useConnectorSchedule,
|
||||
});
|
||||
},
|
||||
setCrawlAutomatically: actions.saveChanges,
|
||||
setCrawlFrequency: actions.saveChanges,
|
||||
setCrawlUnit: actions.saveChanges,
|
||||
setUseConnectorSchedule: actions.saveChanges,
|
||||
submitConnectorSchedule: ({ scheduling }) => {
|
||||
actions.makeUpdateConnectorSchedulingRequest({
|
||||
connectorId: values.index.connector.id,
|
||||
scheduling: {
|
||||
...values.index.connector.scheduling,
|
||||
full: scheduling,
|
||||
},
|
||||
});
|
||||
},
|
||||
submitCrawlSchedule: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
if (!values.crawlUnit || !values.crawlFrequency) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const crawlSchedule: CrawlScheduleFromServer = await http.put(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`,
|
||||
{
|
||||
body: JSON.stringify({
|
||||
frequency: values.crawlFrequency,
|
||||
unit: values.crawlUnit,
|
||||
use_connector_schedule: values.useConnectorSchedule,
|
||||
}),
|
||||
}
|
||||
);
|
||||
actions.setCrawlSchedule(crawlScheduleServerToClient(crawlSchedule));
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
} finally {
|
||||
actions.onDoneSubmitting();
|
||||
}
|
||||
},
|
||||
updateConnectorSchedulingApiError: (e) => flashAPIErrors(e),
|
||||
}),
|
||||
events: ({ actions }) => ({
|
||||
afterMount: () => {
|
||||
actions.fetchCrawlSchedule();
|
||||
},
|
||||
}),
|
||||
});
|
|
@ -1,153 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow, ShallowWrapper } from 'enzyme';
|
||||
|
||||
import { EuiButton, EuiButtonEmpty, EuiFlyout, EuiFlyoutFooter } from '@elastic/eui';
|
||||
|
||||
import { Loading } from '../../../../../shared/loading';
|
||||
import { rerender } from '../../../../../test_helpers';
|
||||
|
||||
import { CrawlCustomSettingsFlyout } from './crawl_custom_settings_flyout';
|
||||
import { CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps } from './crawl_custom_settings_flyout_crawl_depth_panel';
|
||||
import { CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps } from './crawl_custom_settings_flyout_domains_panel';
|
||||
import { CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps } from './crawl_custom_settings_flyout_seed_urls_panel';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
isDataLoading: false,
|
||||
isFormSubmitting: false,
|
||||
isFlyoutVisible: true,
|
||||
isSingleCrawlType: true,
|
||||
selectedDomainUrls: ['https://www.elastic.co'],
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
hideFlyout: jest.fn(),
|
||||
onSelectDomainUrls: jest.fn(),
|
||||
startCustomCrawl: jest.fn(),
|
||||
};
|
||||
|
||||
describe('CrawlCustomSettingsFlyout', () => {
|
||||
let wrapper: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues(MOCK_VALUES);
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
|
||||
wrapper = shallow(<CrawlCustomSettingsFlyout />);
|
||||
});
|
||||
|
||||
it('is empty when the flyout is hidden', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
isFlyoutVisible: false,
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.isEmptyRender()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders as a modal when visible', () => {
|
||||
expect(wrapper.is(EuiFlyout)).toBe(true);
|
||||
});
|
||||
|
||||
it('can be closed', () => {
|
||||
expect(wrapper.prop('onClose')).toEqual(MOCK_ACTIONS.hideFlyout);
|
||||
expect(wrapper.find(EuiFlyoutFooter).find(EuiButtonEmpty).prop('onClick')).toEqual(
|
||||
MOCK_ACTIONS.hideFlyout
|
||||
);
|
||||
});
|
||||
|
||||
it('lets the user customize their crawl', () => {
|
||||
expect(wrapper.find(Loading)).toHaveLength(0);
|
||||
for (const component of [
|
||||
CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps,
|
||||
CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps,
|
||||
CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps,
|
||||
]) {
|
||||
expect(wrapper.find(component)).toHaveLength(1);
|
||||
}
|
||||
});
|
||||
|
||||
it('shows a loading state', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
isDataLoading: true,
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.find(Loading)).toHaveLength(1);
|
||||
for (const component of [
|
||||
CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps,
|
||||
CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps,
|
||||
CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps,
|
||||
]) {
|
||||
expect(wrapper.find(component)).toHaveLength(0);
|
||||
}
|
||||
});
|
||||
|
||||
describe('submit button', () => {
|
||||
it('is enabled by default', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
selectedDomainUrls: [],
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
|
||||
});
|
||||
|
||||
it('is disabled when no domains are selected', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
selectedDomainUrls: [],
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
|
||||
});
|
||||
|
||||
it('is disabled when data is loading', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
isDataLoading: true,
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
|
||||
});
|
||||
|
||||
it('shows a loading state when the user makes a request', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
isFormSubmitting: true,
|
||||
});
|
||||
|
||||
rerender(wrapper);
|
||||
|
||||
expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('isLoading')).toEqual(true);
|
||||
});
|
||||
|
||||
it('starts a crawl and hides the modal', () => {
|
||||
wrapper.find(EuiFlyoutFooter).find(EuiButton).simulate('click');
|
||||
|
||||
expect(MOCK_ACTIONS.startCustomCrawl).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,156 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiButtonEmpty,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFlyout,
|
||||
EuiFlyoutBody,
|
||||
EuiFlyoutFooter,
|
||||
EuiFlyoutHeader,
|
||||
EuiSpacer,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CANCEL_BUTTON_LABEL } from '../../../../../shared/constants';
|
||||
import { Loading } from '../../../../../shared/loading';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps } from './crawl_custom_settings_flyout_crawl_depth_panel';
|
||||
import { CrawlCustomSettingsFlyoutCrawlTypeSelection } from './crawl_custom_settings_flyout_crawl_type_select';
|
||||
import { CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps } from './crawl_custom_settings_flyout_domains_panel';
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
import { CrawlCustomSettingsFlyoutMultipleCrawlDelete } from './crawl_custom_settings_flyout_multi_crawl_delete';
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlLogic } from './crawl_custom_settings_flyout_multi_crawl_logic';
|
||||
import { CrawlCustomSettingsFlyoutMultipleCrawlTabs } from './crawl_custom_settings_flyout_multi_crawl_tabs';
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlScheduling } from './crawl_custom_settings_flyout_mutli_crawl';
|
||||
import { CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps } from './crawl_custom_settings_flyout_seed_urls_panel';
|
||||
|
||||
export const CrawlCustomSettingsFlyout: React.FC = () => {
|
||||
const {
|
||||
isDataLoading,
|
||||
isFormSubmitting,
|
||||
isFlyoutVisible,
|
||||
isSingleCrawlType,
|
||||
selectedDomainUrls,
|
||||
} = useValues(CrawlCustomSettingsFlyoutLogic);
|
||||
const { crawlerCustomSchedulingIsValid } = useValues(CrawlCustomSettingsFlyoutMultiCrawlLogic);
|
||||
const { hideFlyout, startCustomCrawl, saveCustomSchedulingConfiguration } = useActions(
|
||||
CrawlCustomSettingsFlyoutLogic
|
||||
);
|
||||
|
||||
if (!isFlyoutVisible) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const submitFunctionLogic = isSingleCrawlType
|
||||
? startCustomCrawl
|
||||
: saveCustomSchedulingConfiguration;
|
||||
|
||||
return (
|
||||
<EuiFlyout ownFocus onClose={hideFlyout} size="m">
|
||||
<EuiFlyoutHeader hasBorder>
|
||||
<EuiTitle size="m">
|
||||
<h2>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle',
|
||||
{
|
||||
defaultMessage: 'Custom crawl configuration',
|
||||
}
|
||||
)}
|
||||
</h2>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="s">
|
||||
<p>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription',
|
||||
{
|
||||
defaultMessage: 'Set up a one-time crawl or multiple crawling custom settings.',
|
||||
}
|
||||
)}
|
||||
</p>
|
||||
</EuiText>
|
||||
</EuiFlyoutHeader>
|
||||
|
||||
<EuiFlyoutBody>
|
||||
{isDataLoading ? (
|
||||
<Loading />
|
||||
) : (
|
||||
<>
|
||||
<CrawlCustomSettingsFlyoutCrawlTypeSelection />
|
||||
<EuiSpacer />
|
||||
{isSingleCrawlType ? (
|
||||
<>
|
||||
<CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps />
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps />
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps />
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<CrawlCustomSettingsFlyoutMultipleCrawlTabs />
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutMultiCrawlScheduling />
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutMultipleCrawlDelete />
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</EuiFlyoutBody>
|
||||
<EuiFlyoutFooter>
|
||||
<EuiFlexGroup justifyContent="flexEnd">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButtonEmpty
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-cancelStartCrawl"
|
||||
onClick={hideFlyout}
|
||||
>
|
||||
{CANCEL_BUTTON_LABEL}
|
||||
</EuiButtonEmpty>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiButton
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-startCrawl"
|
||||
fill
|
||||
onClick={submitFunctionLogic}
|
||||
disabled={
|
||||
isSingleCrawlType
|
||||
? isDataLoading || selectedDomainUrls.length === 0
|
||||
: !crawlerCustomSchedulingIsValid
|
||||
}
|
||||
isLoading={isFormSubmitting}
|
||||
>
|
||||
{isSingleCrawlType
|
||||
? i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Apply and crawl now',
|
||||
}
|
||||
)
|
||||
: i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.saveMultipleCrawlersConfiguration',
|
||||
{
|
||||
defaultMessage: 'Save configuration',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFlyoutFooter>
|
||||
</EuiFlyout>
|
||||
);
|
||||
};
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { EuiFieldNumber } from '@elastic/eui';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutCrawlDepthPanel } from './crawl_custom_settings_flyout_crawl_depth_panel';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
maxCrawlDepth: 5,
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
onSelectMaxCrawlDepth: jest.fn(),
|
||||
};
|
||||
|
||||
describe('CrawlCustomSettingsFlyoutCrawlDepthPanel', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues(MOCK_VALUES);
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
});
|
||||
|
||||
it('allows the user to set max crawl depth', () => {
|
||||
const wrapper = shallow(
|
||||
<CrawlCustomSettingsFlyoutCrawlDepthPanel
|
||||
maxCrawlDepth={MOCK_VALUES.maxCrawlDepth}
|
||||
onSelectMaxCrawlDepth={MOCK_ACTIONS.onSelectMaxCrawlDepth}
|
||||
/>
|
||||
);
|
||||
const crawlDepthField = wrapper.find(EuiFieldNumber);
|
||||
|
||||
expect(crawlDepthField.prop('value')).toEqual(5);
|
||||
|
||||
crawlDepthField.simulate('change', { target: { value: '10' } });
|
||||
|
||||
expect(MOCK_ACTIONS.onSelectMaxCrawlDepth).toHaveBeenCalledWith(10);
|
||||
});
|
||||
});
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { ChangeEvent } from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import {
|
||||
EuiFieldNumber,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiPanel,
|
||||
EuiText,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
|
||||
interface CrawlCustomSettingsFlyoutCrawlDepthPanelProps {
|
||||
maxCrawlDepth: number;
|
||||
onSelectMaxCrawlDepth: (depth: number) => void;
|
||||
}
|
||||
|
||||
export const CrawlCustomSettingsFlyoutCrawlDepthPanelWithLogicProps: React.FC = () => {
|
||||
const { maxCrawlDepth } = useValues(CrawlCustomSettingsFlyoutLogic);
|
||||
const { onSelectMaxCrawlDepth } = useActions(CrawlCustomSettingsFlyoutLogic);
|
||||
|
||||
return (
|
||||
<CrawlCustomSettingsFlyoutCrawlDepthPanel
|
||||
maxCrawlDepth={maxCrawlDepth}
|
||||
onSelectMaxCrawlDepth={onSelectMaxCrawlDepth}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const CrawlCustomSettingsFlyoutCrawlDepthPanel: React.FC<
|
||||
CrawlCustomSettingsFlyoutCrawlDepthPanelProps
|
||||
> = ({ maxCrawlDepth, onSelectMaxCrawlDepth }) => {
|
||||
return (
|
||||
<EuiPanel hasBorder>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiFormRow
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel',
|
||||
{
|
||||
defaultMessage: 'Max crawl depth',
|
||||
}
|
||||
)}
|
||||
>
|
||||
<EuiFieldNumber
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-maxCrawlDepth"
|
||||
min={1}
|
||||
value={maxCrawlDepth}
|
||||
onChange={(e: ChangeEvent<HTMLInputElement>) =>
|
||||
onSelectMaxCrawlDepth(parseInt(e.target.value, 10))
|
||||
}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'Set a max crawl depth to specify how many pages deep the crawler should traverse. Set the value to one (1) to limit the crawl to only the entry points.',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -1,141 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import {
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiFormRow,
|
||||
EuiHorizontalRule,
|
||||
EuiLink,
|
||||
EuiSpacer,
|
||||
EuiText,
|
||||
EuiTitle,
|
||||
EuiSplitPanel,
|
||||
EuiSwitch,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { ConnectorScheduling } from '@kbn/search-connectors';
|
||||
|
||||
import { EnterpriseSearchCronEditor } from '@kbn/search-connectors/components/cron_editor';
|
||||
|
||||
import { CrawlerIndex } from '../../../../../../../common/types/indices';
|
||||
import { docLinks } from '../../../../../shared/doc_links/doc_links';
|
||||
import { isCrawlerIndex } from '../../../../utils/indices';
|
||||
|
||||
interface MultiCrawlSchedulerProps {
|
||||
index: CrawlerIndex;
|
||||
interval: string;
|
||||
schedulingEnabled: boolean;
|
||||
setConnectorSchedulingInterval: (interval: ConnectorScheduling) => void;
|
||||
onSetConnectorSchedulingEnabled: (enabled: boolean) => void;
|
||||
}
|
||||
|
||||
export const MultiCrawlScheduler: React.FC<MultiCrawlSchedulerProps> = ({
|
||||
index,
|
||||
interval,
|
||||
schedulingEnabled,
|
||||
setConnectorSchedulingInterval,
|
||||
onSetConnectorSchedulingEnabled,
|
||||
}) => {
|
||||
if (!isCrawlerIndex(index)) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiSplitPanel.Outer hasBorder hasShadow={false} grow>
|
||||
<EuiSplitPanel.Inner grow={false}>
|
||||
<EuiFormRow display="rowCompressed">
|
||||
<EuiTitle size="xs">
|
||||
<h3>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingFrequency',
|
||||
{
|
||||
defaultMessage: 'Crawl frequency',
|
||||
}
|
||||
)}
|
||||
</h3>
|
||||
</EuiTitle>
|
||||
</EuiFormRow>
|
||||
</EuiSplitPanel.Inner>
|
||||
<EuiSplitPanel.Inner grow={false} color="subdued">
|
||||
<EuiFormRow display="rowCompressed">
|
||||
<EuiSwitch
|
||||
checked={schedulingEnabled}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multiCrawlSchedulingEnabled',
|
||||
{
|
||||
defaultMessage: 'Enable recurring crawls with the following schedule',
|
||||
}
|
||||
)}
|
||||
onChange={(e) => onSetConnectorSchedulingEnabled(e.target.checked)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
</EuiSplitPanel.Inner>
|
||||
<EuiSplitPanel.Inner>
|
||||
<EuiFlexGroup>
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="xxs">
|
||||
<h5>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingTitle',
|
||||
{
|
||||
defaultMessage: 'Specific time scheduling',
|
||||
}
|
||||
)}
|
||||
</h5>
|
||||
</EuiTitle>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.cronSchedulingDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'Define the frequency and time for scheduled crawls. The crawler uses UTC as its timezone.',
|
||||
}
|
||||
)}
|
||||
</EuiText>
|
||||
<EuiHorizontalRule margin="s" />
|
||||
<EnterpriseSearchCronEditor
|
||||
disabled={!schedulingEnabled}
|
||||
scheduling={{
|
||||
interval,
|
||||
enabled: schedulingEnabled,
|
||||
}}
|
||||
onChange={setConnectorSchedulingInterval}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
<EuiSpacer />
|
||||
<EuiText size="xs" color="subdued">
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.scheduleDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'The crawl schedule will perform a full crawl on every domain on this index.',
|
||||
}
|
||||
)}
|
||||
<EuiSpacer size="s" />
|
||||
<EuiLink href={docLinks.crawlerManaging} target="_blank" external>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.readMoreLink',
|
||||
{
|
||||
defaultMessage: 'Learn more about scheduling',
|
||||
}
|
||||
)}
|
||||
</EuiLink>
|
||||
</EuiText>
|
||||
</EuiSplitPanel.Inner>
|
||||
</EuiSplitPanel.Outer>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import { EuiFlexGroup, EuiFlexItem, EuiFormFieldset, EuiRadio } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CustomCrawlType } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
|
||||
export const CrawlCustomSettingsFlyoutCrawlTypeSelection: React.FC = () => {
|
||||
const { crawlType } = useValues(CrawlCustomSettingsFlyoutLogic);
|
||||
const { onSelectCrawlType } = useActions(CrawlCustomSettingsFlyoutLogic);
|
||||
|
||||
return (
|
||||
<EuiFormFieldset
|
||||
legend={{
|
||||
children: i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.crawlTypeGroupLabel',
|
||||
{
|
||||
defaultMessage: 'Crawl type',
|
||||
}
|
||||
),
|
||||
}}
|
||||
>
|
||||
<EuiFlexGroup direction="row">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiRadio
|
||||
id={CustomCrawlType.ONE_TIME}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.oneTimeCrawlRadioLabel',
|
||||
{
|
||||
defaultMessage: 'One-time crawl',
|
||||
}
|
||||
)}
|
||||
checked={crawlType === CustomCrawlType.ONE_TIME}
|
||||
onChange={() => onSelectCrawlType(CustomCrawlType.ONE_TIME)}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiRadio
|
||||
id={CustomCrawlType.MULTIPLE}
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlsRadioLabel',
|
||||
{
|
||||
defaultMessage: 'Multiple crawls',
|
||||
}
|
||||
)}
|
||||
checked={crawlType === CustomCrawlType.MULTIPLE}
|
||||
onChange={() => onSelectCrawlType(CustomCrawlType.MULTIPLE)}
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiFormFieldset>
|
||||
);
|
||||
};
|
|
@ -1,197 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { LogicMounter, mockHttpValues } from '../../../../../__mocks__/kea_logic';
|
||||
import '../../_mocks_/index_name_logic.mock';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { itShowsServerErrorAsFlashMessage } from '../../../../../test_helpers';
|
||||
import { DomainConfig } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutDomainConfigLogic } from './crawl_custom_settings_flyout_domain_logic';
|
||||
|
||||
describe('CrawlCustomSettingsFlyoutDomainConfigLogic', () => {
|
||||
const { mount } = new LogicMounter(CrawlCustomSettingsFlyoutDomainConfigLogic);
|
||||
|
||||
const { http } = mockHttpValues;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mount();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
expect(CrawlCustomSettingsFlyoutDomainConfigLogic.values).toEqual({
|
||||
domainConfigMap: {},
|
||||
domainConfigs: [],
|
||||
domainUrls: [],
|
||||
});
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('fetchDomainConfigData', () => {
|
||||
it('updates logic with data that has been converted from server to client', async () => {
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutDomainConfigLogic.actions, 'onRecieveDomainConfigData');
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 1,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '1234',
|
||||
name: 'https://www.elastic.co',
|
||||
seed_urls: [],
|
||||
sitemap_urls: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 2,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seed_urls: [],
|
||||
sitemap_urls: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.actions.fetchDomainConfigData();
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'/internal/enterprise_search/indices/index-name/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 1,
|
||||
'page[size]': 100,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'/internal/enterprise_search/indices/index-name/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 2,
|
||||
'page[size]': 1,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.actions.onRecieveDomainConfigData
|
||||
).toHaveBeenCalledWith([
|
||||
{
|
||||
id: '1234',
|
||||
name: 'https://www.elastic.co',
|
||||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
itShowsServerErrorAsFlashMessage(http.get, () => {
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.actions.fetchDomainConfigData();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onRecieveDomainConfigData', () => {
|
||||
it('saves the data', () => {
|
||||
mount({
|
||||
domainConfigs: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.actions.onRecieveDomainConfigData([
|
||||
{
|
||||
name: 'https://www.elastic.co',
|
||||
},
|
||||
] as DomainConfig[]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutDomainConfigLogic.values.domainConfigs).toEqual([
|
||||
{
|
||||
name: 'https://www.elastic.co',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('selectors', () => {
|
||||
beforeEach(() => {
|
||||
mount({
|
||||
domainConfigs: [
|
||||
{
|
||||
name: 'https://www.elastic.co',
|
||||
sitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://www.elastic.co/sitemap2.xml',
|
||||
],
|
||||
seedUrls: ['https://www.elastic.co/', 'https://www.elastic.co/guide'],
|
||||
},
|
||||
{
|
||||
name: 'https://swiftype.com',
|
||||
sitemapUrls: ['https://swiftype.com/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
|
||||
seedUrls: ['https://swiftype.com/', 'https://swiftype.com/documentation'],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
describe('domainUrls', () => {
|
||||
it('contains all the domain urls from the domain config', () => {
|
||||
expect(CrawlCustomSettingsFlyoutDomainConfigLogic.values.domainUrls).toEqual([
|
||||
'https://www.elastic.co',
|
||||
'https://swiftype.com',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('domainConfigMap', () => {
|
||||
it('contains all the domain urls from the domain config', () => {
|
||||
expect(CrawlCustomSettingsFlyoutDomainConfigLogic.values.domainConfigMap).toEqual({
|
||||
'https://www.elastic.co': {
|
||||
name: 'https://www.elastic.co',
|
||||
sitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://www.elastic.co/sitemap2.xml',
|
||||
],
|
||||
seedUrls: ['https://www.elastic.co/', 'https://www.elastic.co/guide'],
|
||||
},
|
||||
'https://swiftype.com': {
|
||||
name: 'https://swiftype.com',
|
||||
sitemapUrls: ['https://swiftype.com/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
|
||||
seedUrls: ['https://swiftype.com/', 'https://swiftype.com/documentation'],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,104 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Meta } from '../../../../../../../common/types';
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import { DomainConfig, DomainConfigFromServer } from '../../../../api/crawler/types';
|
||||
import { domainConfigServerToClient } from '../../../../api/crawler/utils';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutDomainConfigLogicValues {
|
||||
domainUrls: string[];
|
||||
domainConfigs: DomainConfig[];
|
||||
domainConfigMap: {
|
||||
[key: string]: DomainConfig;
|
||||
};
|
||||
}
|
||||
|
||||
export const domainConfigsToDomainUrls = (domainConfigs: DomainConfig[]) =>
|
||||
domainConfigs.map((domainConfig) => domainConfig.name);
|
||||
|
||||
export const domainConfigsToDomainConfigMap = (domainConfigs: DomainConfig[]) =>
|
||||
domainConfigs.reduce(
|
||||
(acc, domainConfig) => ({ ...acc, [domainConfig.name]: domainConfig }),
|
||||
{} as { [key: string]: DomainConfig }
|
||||
);
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutDomainConfigLogicActions {
|
||||
fetchDomainConfigData(): void;
|
||||
onRecieveDomainConfigData(domainConfigs: DomainConfig[]): { domainConfigs: DomainConfig[] };
|
||||
}
|
||||
|
||||
export const CrawlCustomSettingsFlyoutDomainConfigLogic = kea<
|
||||
MakeLogicType<
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogicValues,
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogicActions
|
||||
>
|
||||
>({
|
||||
path: ['enterprise_search', 'crawler', 'crawl_custom_settings_flyout_domain_logic'],
|
||||
actions: () => ({
|
||||
fetchDomainConfigData: true,
|
||||
onRecieveDomainConfigData: (domainConfigs) => ({ domainConfigs }),
|
||||
}),
|
||||
reducers: () => ({
|
||||
domainConfigs: [
|
||||
[],
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onRecieveDomainConfigData: (_, { domainConfigs }) => domainConfigs,
|
||||
},
|
||||
],
|
||||
}),
|
||||
selectors: () => ({
|
||||
domainUrls: [
|
||||
(selectors) => [selectors.domainConfigs],
|
||||
(domainConfigs: DomainConfig[]) => domainConfigsToDomainUrls(domainConfigs),
|
||||
],
|
||||
domainConfigMap: [
|
||||
(selectors) => [selectors.domainConfigs],
|
||||
(domainConfigs: DomainConfig[]) => domainConfigsToDomainConfigMap(domainConfigs),
|
||||
],
|
||||
}),
|
||||
listeners: ({ actions }) => ({
|
||||
fetchDomainConfigData: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
let domainConfigs: DomainConfig[] = [];
|
||||
let totalPages: number = 1;
|
||||
let nextPage: number = 1;
|
||||
let pageSize: number = 100;
|
||||
|
||||
try {
|
||||
while (nextPage <= totalPages) {
|
||||
const {
|
||||
results,
|
||||
meta: { page },
|
||||
} = await http.get<{
|
||||
meta: Meta;
|
||||
results: DomainConfigFromServer[];
|
||||
}>(`/internal/enterprise_search/indices/${indexName}/crawler/domain_configs`, {
|
||||
query: { 'page[current]': nextPage, 'page[size]': pageSize },
|
||||
});
|
||||
|
||||
domainConfigs = [...domainConfigs, ...results.map(domainConfigServerToClient)];
|
||||
|
||||
nextPage = page.current + 1;
|
||||
totalPages = page.total_pages;
|
||||
pageSize = page.size;
|
||||
}
|
||||
|
||||
actions.onRecieveDomainConfigData(domainConfigs);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
}),
|
||||
});
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow, ShallowWrapper } from 'enzyme';
|
||||
|
||||
import { EuiAccordion, EuiNotificationBadge } from '@elastic/eui';
|
||||
|
||||
import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutDomainsPanel } from './crawl_custom_settings_flyout_domains_panel';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
domainUrls: ['https://www.elastic.co', 'https://www.swiftype.com'],
|
||||
selectedDomainUrls: ['https://www.elastic.co'],
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
onSelectDomainUrls: jest.fn(),
|
||||
};
|
||||
|
||||
const getAccordionBadge = (wrapper: ShallowWrapper) => {
|
||||
const accordionWrapper = wrapper.find(EuiAccordion);
|
||||
const extraActionWrapper = shallow(<div>{accordionWrapper.prop('extraAction')}</div>);
|
||||
return extraActionWrapper.find(EuiNotificationBadge);
|
||||
};
|
||||
|
||||
describe('CrawlCustomSettingsFlyoutDomainsPanel', () => {
|
||||
let wrapper: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues(MOCK_VALUES);
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
|
||||
wrapper = shallow(
|
||||
<CrawlCustomSettingsFlyoutDomainsPanel
|
||||
domainUrls={MOCK_VALUES.domainUrls}
|
||||
selectedDomainUrls={MOCK_VALUES.selectedDomainUrls}
|
||||
onSelectDomainUrls={MOCK_ACTIONS.onSelectDomainUrls}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
it('allows the user to select domains', () => {
|
||||
const domainAccordionWrapper = wrapper.find(EuiAccordion);
|
||||
|
||||
expect(domainAccordionWrapper.find(SimplifiedSelectable).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
options: ['https://www.elastic.co', 'https://www.swiftype.com'],
|
||||
selectedOptions: ['https://www.elastic.co'],
|
||||
onChange: MOCK_ACTIONS.onSelectDomainUrls,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('indicates how many domains are selected', () => {
|
||||
let badge = getAccordionBadge(wrapper);
|
||||
|
||||
expect(badge.render().text()).toContain('1');
|
||||
expect(badge.prop('color')).toEqual('accent');
|
||||
|
||||
wrapper.setProps({ selectedDomainUrls: [] });
|
||||
badge = getAccordionBadge(wrapper);
|
||||
|
||||
expect(badge.render().text()).toContain('0');
|
||||
expect(badge.prop('color')).toEqual('subdued');
|
||||
});
|
||||
});
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import {
|
||||
EuiAccordion,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiIcon,
|
||||
EuiNotificationBadge,
|
||||
EuiPanel,
|
||||
EuiTitle,
|
||||
useGeneratedHtmlId,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutDomainConfigLogic } from './crawl_custom_settings_flyout_domain_logic';
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
|
||||
interface CrawlCustomSettingsFlyoutDomainsPanelProps {
|
||||
domainUrls: string[];
|
||||
selectedDomainUrls: string[];
|
||||
onSelectDomainUrls: (selectedUrls: string[]) => void;
|
||||
}
|
||||
|
||||
export const CrawlCustomSettingsFlyoutDomainsPanelWithLogicProps: React.FC = () => {
|
||||
const { selectedDomainUrls } = useValues(CrawlCustomSettingsFlyoutLogic);
|
||||
const { domainUrls } = useValues(CrawlCustomSettingsFlyoutDomainConfigLogic);
|
||||
const { onSelectDomainUrls } = useActions(CrawlCustomSettingsFlyoutLogic);
|
||||
|
||||
return (
|
||||
<CrawlCustomSettingsFlyoutDomainsPanel
|
||||
domainUrls={domainUrls}
|
||||
selectedDomainUrls={selectedDomainUrls}
|
||||
onSelectDomainUrls={onSelectDomainUrls}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const CrawlCustomSettingsFlyoutDomainsPanel: React.FC<
|
||||
CrawlCustomSettingsFlyoutDomainsPanelProps
|
||||
> = ({ domainUrls, selectedDomainUrls, onSelectDomainUrls }) => {
|
||||
return (
|
||||
<EuiPanel hasBorder>
|
||||
<EuiAccordion
|
||||
id={useGeneratedHtmlId({ prefix: 'domainAccordion' })}
|
||||
initialIsOpen
|
||||
buttonContent={
|
||||
<EuiFlexGroup direction="row" responsive={false} gutterSize="s" alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type="globe" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="xs">
|
||||
<h3>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Add domains to your crawl',
|
||||
}
|
||||
)}
|
||||
</h3>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
}
|
||||
extraAction={
|
||||
<EuiFlexGroup alignItems="center" gutterSize="m">
|
||||
<EuiNotificationBadge
|
||||
size="m"
|
||||
color={selectedDomainUrls.length > 0 ? 'accent' : 'subdued'}
|
||||
>
|
||||
{selectedDomainUrls.length}
|
||||
</EuiNotificationBadge>
|
||||
<EuiFlexItem grow={false}>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor',
|
||||
{
|
||||
defaultMessage: 'selected',
|
||||
}
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
}
|
||||
>
|
||||
<SimplifiedSelectable
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-selectDomainUrls"
|
||||
options={domainUrls}
|
||||
selectedOptions={selectedDomainUrls}
|
||||
onChange={onSelectDomainUrls}
|
||||
/>
|
||||
</EuiAccordion>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -1,382 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { LogicMounter } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { DomainConfig, CustomCrawlType } from '../../../../api/crawler/types';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
import { IndexViewLogic } from '../../index_view_logic';
|
||||
import { CrawlerLogic } from '../crawler_logic';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutDomainConfigLogic } from './crawl_custom_settings_flyout_domain_logic';
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
|
||||
describe('CrawlCustomSettingsFlyoutLogic', () => {
|
||||
const { mount } = new LogicMounter(CrawlCustomSettingsFlyoutLogic);
|
||||
const { mount: indexViewLogicMount } = new LogicMounter(IndexViewLogic);
|
||||
const { mount: indexNameMount } = new LogicMounter(IndexNameLogic);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
indexViewLogicMount();
|
||||
indexNameMount();
|
||||
mount();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values).toEqual({
|
||||
crawlType: CustomCrawlType.ONE_TIME,
|
||||
customEntryPointUrls: [],
|
||||
customSitemapUrls: [],
|
||||
domainConfigMap: {},
|
||||
domainConfigs: [],
|
||||
entryPointUrls: [],
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
isDataLoading: true,
|
||||
isFlyoutVisible: false,
|
||||
isFormSubmitting: false,
|
||||
isSingleCrawlType: true,
|
||||
maxCrawlDepth: 2,
|
||||
selectedDomainUrls: [],
|
||||
selectedEntryPointUrls: [],
|
||||
selectedSitemapUrls: [],
|
||||
sitemapUrls: [],
|
||||
});
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('hideFlyout', () => {
|
||||
it('hides the modal', () => {
|
||||
CrawlCustomSettingsFlyoutLogic.actions.hideFlyout();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.isFlyoutVisible).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onRecieveDomainConfigData', () => {
|
||||
it('saves the data', () => {
|
||||
mount({
|
||||
domainConfigs: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onRecieveDomainConfigData([
|
||||
{
|
||||
name: 'https://www.elastic.co',
|
||||
},
|
||||
] as DomainConfig[]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.domainConfigs).toEqual([
|
||||
{
|
||||
name: 'https://www.elastic.co',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectCustomSitemapUrls', () => {
|
||||
it('saves the urls', () => {
|
||||
mount({
|
||||
customSitemapUrls: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectCustomSitemapUrls([
|
||||
'https://www.elastic.co/custom-sitemap1.xml',
|
||||
'https://swiftype.com/custom-sitemap2.xml',
|
||||
]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.customSitemapUrls).toEqual([
|
||||
'https://www.elastic.co/custom-sitemap1.xml',
|
||||
'https://swiftype.com/custom-sitemap2.xml',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectCustomEntryPointUrls', () => {
|
||||
it('saves the urls', () => {
|
||||
mount({
|
||||
customEntryPointUrls: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectCustomEntryPointUrls([
|
||||
'https://www.elastic.co/custom-entry-point',
|
||||
'https://swiftype.com/custom-entry-point',
|
||||
]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.customEntryPointUrls).toEqual([
|
||||
'https://www.elastic.co/custom-entry-point',
|
||||
'https://swiftype.com/custom-entry-point',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectDomainUrls', () => {
|
||||
it('saves the urls', () => {
|
||||
mount({
|
||||
selectedDomainUrls: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls([
|
||||
'https://www.elastic.co',
|
||||
'https://swiftype.com',
|
||||
]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.selectedDomainUrls).toEqual([
|
||||
'https://www.elastic.co',
|
||||
'https://swiftype.com',
|
||||
]);
|
||||
});
|
||||
|
||||
it('filters selected sitemap urls by selected domains', () => {
|
||||
mount({
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
selectedSitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls(['https://swiftype.com']);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.selectedSitemapUrls).toEqual([
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
]);
|
||||
});
|
||||
|
||||
it('filters selected entry point urls by selected domains', () => {
|
||||
mount({
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
selectedEntryPointUrls: [
|
||||
'https://www.elastic.co/guide',
|
||||
'https://swiftype.com/documentation',
|
||||
],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls(['https://swiftype.com']);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.selectedEntryPointUrls).toEqual([
|
||||
'https://swiftype.com/documentation',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectEntryPointUrls', () => {
|
||||
it('saves the urls', () => {
|
||||
mount({
|
||||
selectedEntryPointUrls: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectEntryPointUrls([
|
||||
'https://www.elastic.co/guide',
|
||||
'https://swiftype.com/documentation',
|
||||
]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.selectedEntryPointUrls).toEqual([
|
||||
'https://www.elastic.co/guide',
|
||||
'https://swiftype.com/documentation',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectMaxCrawlDepth', () => {
|
||||
it('saves the crawl depth', () => {
|
||||
mount({
|
||||
maxCrawlDepth: 5,
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectMaxCrawlDepth(10);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.maxCrawlDepth).toEqual(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSelectSitemapUrls', () => {
|
||||
it('saves the urls', () => {
|
||||
mount({
|
||||
selectedSitemapUrls: [],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onSelectSitemapUrls([
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
]);
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.selectedSitemapUrls).toEqual([
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('showFlyout', () => {
|
||||
it('shows the modal and resets the form', () => {
|
||||
mount({
|
||||
customEntryPointUrls: [
|
||||
'https://www.elastic.co/custom-entry-point',
|
||||
'https://swiftype.com/custom-entry-point',
|
||||
],
|
||||
customSitemapUrls: [
|
||||
'https://www.elastic.co/custom-sitemap1.xml',
|
||||
'https://swiftype.com/custom-sitemap2.xml',
|
||||
],
|
||||
includeSitemapsInRobotsTxt: false,
|
||||
isDataLoading: false,
|
||||
isFlyoutVisible: false,
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
selectedEntryPointUrls: [
|
||||
'https://www.elastic.co/guide',
|
||||
'https://swiftype.com/documentation',
|
||||
],
|
||||
selectedSitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
],
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.showFlyout();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values).toEqual(
|
||||
expect.objectContaining({
|
||||
customEntryPointUrls: [],
|
||||
customSitemapUrls: [],
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
isDataLoading: true,
|
||||
isFlyoutVisible: true,
|
||||
selectedDomainUrls: [],
|
||||
selectedEntryPointUrls: [],
|
||||
selectedSitemapUrls: [],
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('fetches the latest data', () => {
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'fetchDomainConfigData');
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.showFlyout();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('startCustomCrawl', () => {
|
||||
it('can start a custom crawl for selected domains', async () => {
|
||||
mount({
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
maxCrawlDepth: 5,
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
});
|
||||
CrawlerLogic.mount();
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'startCrawl');
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.startCustomCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.actions.startCrawl).toHaveBeenCalledWith({
|
||||
domain_allowlist: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
max_crawl_depth: 5,
|
||||
sitemap_discovery_disabled: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('can start a custom crawl selected domains, sitemaps, and seed urls', async () => {
|
||||
mount({
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
maxCrawlDepth: 5,
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
selectedEntryPointUrls: [
|
||||
'https://www.elastic.co/guide',
|
||||
'https://swiftype.com/documentation',
|
||||
],
|
||||
selectedSitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
],
|
||||
});
|
||||
CrawlerLogic.mount();
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'startCrawl');
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.startCustomCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.actions.startCrawl).toHaveBeenCalledWith({
|
||||
domain_allowlist: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
max_crawl_depth: 5,
|
||||
seed_urls: ['https://www.elastic.co/guide', 'https://swiftype.com/documentation'],
|
||||
sitemap_urls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
],
|
||||
sitemap_discovery_disabled: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toggleIncludeSitemapsInRobotsTxt', () => {
|
||||
it('toggles the flag', () => {
|
||||
mount({
|
||||
includeSitemapsInRobotsTxt: false,
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.toggleIncludeSitemapsInRobotsTxt();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.includeSitemapsInRobotsTxt).toEqual(true);
|
||||
|
||||
mount({
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
});
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.toggleIncludeSitemapsInRobotsTxt();
|
||||
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.includeSitemapsInRobotsTxt).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('selectors', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mount({
|
||||
selectedDomainUrls: ['https://swiftype.com'],
|
||||
});
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.actions.onRecieveDomainConfigData([
|
||||
{
|
||||
id: '1',
|
||||
name: 'https://www.elastic.co',
|
||||
sitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://www.elastic.co/sitemap2.xml',
|
||||
],
|
||||
seedUrls: ['https://www.elastic.co/', 'https://www.elastic.co/guide'],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'https://swiftype.com',
|
||||
sitemapUrls: ['https://swiftype.com/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
|
||||
seedUrls: ['https://swiftype.com/', 'https://swiftype.com/documentation'],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
describe('entryPointUrls', () => {
|
||||
it('contains all the sitemap urls from selected domains', () => {
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.entryPointUrls).toEqual([
|
||||
'https://swiftype.com/',
|
||||
'https://swiftype.com/documentation',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sitemapUrls', () => {
|
||||
it('contains all the sitemap urls from selected domains', () => {
|
||||
expect(CrawlCustomSettingsFlyoutLogic.values.sitemapUrls).toEqual([
|
||||
'https://swiftype.com/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,251 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { CustomCrawlType, DomainConfig } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlerActions, CrawlerLogic, CrawlRequestOverrides } from '../crawler_logic';
|
||||
import { extractDomainAndEntryPointFromUrl } from '../domain_management/add_domain/utils';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutDomainConfigLogic } from './crawl_custom_settings_flyout_domain_logic';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlLogic } from './crawl_custom_settings_flyout_multi_crawl_logic';
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutLogicValues {
|
||||
crawlType: string;
|
||||
customEntryPointUrls: string[];
|
||||
customSitemapUrls: string[];
|
||||
domainUrls: string[];
|
||||
domainConfigs: DomainConfig[];
|
||||
domainConfigMap: {
|
||||
[key: string]: DomainConfig;
|
||||
};
|
||||
entryPointUrls: string[];
|
||||
includeSitemapsInRobotsTxt: boolean;
|
||||
isDataLoading: boolean;
|
||||
isFormSubmitting: boolean;
|
||||
isFlyoutVisible: boolean;
|
||||
isSingleCrawlType: boolean;
|
||||
maxCrawlDepth: number;
|
||||
selectedDomainUrls: string[];
|
||||
selectedEntryPointUrls: string[];
|
||||
selectedSitemapUrls: string[];
|
||||
sitemapUrls: string[];
|
||||
}
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutLogicActions {
|
||||
fetchDomainConfigData(): void;
|
||||
fetchCustomScheduling(): void;
|
||||
postCustomScheduling(): void;
|
||||
hideFlyout(): void;
|
||||
saveCustomSchedulingConfiguration(): void;
|
||||
onRecieveDomainConfigData(domainConfigs: DomainConfig[]): { domainConfigs: DomainConfig[] };
|
||||
onSelectCrawlType(crawlType: string): { crawlType: string };
|
||||
onSelectCustomEntryPointUrls(entryPointUrls: string[]): { entryPointUrls: string[] };
|
||||
onSelectCustomSitemapUrls(sitemapUrls: string[]): { sitemapUrls: string[] };
|
||||
onSelectDomainUrls(domainUrls: string[]): { domainUrls: string[] };
|
||||
onSelectEntryPointUrls(entryPointUrls: string[]): { entryPointUrls: string[] };
|
||||
onSelectMaxCrawlDepth(maxCrawlDepth: number): { maxCrawlDepth: number };
|
||||
onSelectSitemapUrls(sitemapUrls: string[]): { sitemapUrls: string[] };
|
||||
showFlyout(): void;
|
||||
startCustomCrawl(): void;
|
||||
startCrawl: CrawlerActions['startCrawl'];
|
||||
toggleIncludeSitemapsInRobotsTxt(): void;
|
||||
}
|
||||
|
||||
export const filterSeedUrlsByDomainUrls = (seedUrls: string[], domainUrls: string[]): string[] => {
|
||||
const domainUrlMap = domainUrls.reduce(
|
||||
(acc, domainUrl) => ({ ...acc, [domainUrl]: true }),
|
||||
{} as { [key: string]: boolean }
|
||||
);
|
||||
|
||||
return seedUrls.filter((seedUrl) => {
|
||||
const { domain } = extractDomainAndEntryPointFromUrl(seedUrl);
|
||||
return !!domainUrlMap[domain];
|
||||
});
|
||||
};
|
||||
|
||||
export const CrawlCustomSettingsFlyoutLogic = kea<
|
||||
MakeLogicType<CrawlCustomSettingsFlyoutLogicValues, CrawlCustomSettingsFlyoutLogicActions>
|
||||
>({
|
||||
path: ['enterprise_search', 'crawler', 'crawl_custom_settings_flyout_logic'],
|
||||
connect: {
|
||||
actions: [
|
||||
CrawlerLogic,
|
||||
['startCrawl'],
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic,
|
||||
['fetchDomainConfigData', 'onRecieveDomainConfigData'],
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogic,
|
||||
['fetchCustomScheduling', 'postCustomScheduling'],
|
||||
],
|
||||
values: [CrawlCustomSettingsFlyoutDomainConfigLogic, ['domainConfigs', 'domainConfigMap']],
|
||||
},
|
||||
actions: () => ({
|
||||
saveCustomSchedulingConfiguration: true,
|
||||
hideFlyout: true,
|
||||
onSelectCrawlType: (crawlType) => ({ crawlType }),
|
||||
onSelectCustomEntryPointUrls: (entryPointUrls) => ({ entryPointUrls }),
|
||||
onSelectCustomSitemapUrls: (sitemapUrls) => ({ sitemapUrls }),
|
||||
onSelectDomainUrls: (domainUrls) => ({ domainUrls }),
|
||||
onSelectEntryPointUrls: (entryPointUrls) => ({ entryPointUrls }),
|
||||
onSelectMaxCrawlDepth: (maxCrawlDepth) => ({ maxCrawlDepth }),
|
||||
onSelectSitemapUrls: (sitemapUrls) => ({ sitemapUrls }),
|
||||
startCustomCrawl: true,
|
||||
toggleIncludeSitemapsInRobotsTxt: true,
|
||||
showFlyout: true,
|
||||
}),
|
||||
reducers: () => ({
|
||||
crawlType: [
|
||||
CustomCrawlType.ONE_TIME,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCrawlType: (_, { crawlType }) => crawlType,
|
||||
},
|
||||
],
|
||||
customEntryPointUrls: [
|
||||
[],
|
||||
{
|
||||
showFlyout: () => [],
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCustomEntryPointUrls: (_, { entryPointUrls }) => entryPointUrls,
|
||||
},
|
||||
],
|
||||
customSitemapUrls: [
|
||||
[],
|
||||
{
|
||||
showFlyout: () => [],
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCustomSitemapUrls: (_, { sitemapUrls }) => sitemapUrls,
|
||||
},
|
||||
],
|
||||
includeSitemapsInRobotsTxt: [
|
||||
true,
|
||||
{
|
||||
showFlyout: () => true,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
toggleIncludeSitemapsInRobotsTxt: (includeSitemapsInRobotsTxt) =>
|
||||
!includeSitemapsInRobotsTxt,
|
||||
},
|
||||
],
|
||||
isDataLoading: [
|
||||
true,
|
||||
{
|
||||
showFlyout: () => true,
|
||||
onRecieveDomainConfigData: () => false,
|
||||
},
|
||||
],
|
||||
isFormSubmitting: [
|
||||
false,
|
||||
{
|
||||
startCustomCrawl: () => true,
|
||||
startCrawl: () => false,
|
||||
},
|
||||
],
|
||||
isFlyoutVisible: [
|
||||
false,
|
||||
{
|
||||
showFlyout: () => true,
|
||||
hideFlyout: () => false,
|
||||
startCrawl: () => false,
|
||||
saveCustomSchedulingConfiguration: () => false,
|
||||
},
|
||||
],
|
||||
maxCrawlDepth: [
|
||||
2,
|
||||
{
|
||||
showFlyout: () => 2,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectMaxCrawlDepth: (_, { maxCrawlDepth }) => maxCrawlDepth,
|
||||
},
|
||||
],
|
||||
selectedDomainUrls: [
|
||||
[],
|
||||
{
|
||||
showFlyout: () => [],
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectDomainUrls: (_, { domainUrls }) => domainUrls,
|
||||
},
|
||||
],
|
||||
selectedEntryPointUrls: [
|
||||
[],
|
||||
{
|
||||
showFlyout: () => [],
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectEntryPointUrls: (_, { entryPointUrls }) => entryPointUrls,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectDomainUrls: (entryPointUrls, { domainUrls }) =>
|
||||
filterSeedUrlsByDomainUrls(entryPointUrls, domainUrls),
|
||||
},
|
||||
],
|
||||
selectedSitemapUrls: [
|
||||
[],
|
||||
{
|
||||
showFlyout: () => [],
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectSitemapUrls: (_, { sitemapUrls }) => sitemapUrls,
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectDomainUrls: (selectedSitemapUrls, { domainUrls }) =>
|
||||
filterSeedUrlsByDomainUrls(selectedSitemapUrls, domainUrls),
|
||||
},
|
||||
],
|
||||
}),
|
||||
selectors: () => ({
|
||||
entryPointUrls: [
|
||||
(selectors) => [
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.selectors.domainConfigMap,
|
||||
selectors.selectedDomainUrls,
|
||||
],
|
||||
(domainConfigMap: { [key: string]: DomainConfig }, selectedDomainUrls: string[]): string[] =>
|
||||
selectedDomainUrls.flatMap(
|
||||
(selectedDomainUrl) => domainConfigMap[selectedDomainUrl].seedUrls
|
||||
),
|
||||
],
|
||||
isSingleCrawlType: [
|
||||
(selectors) => [selectors.crawlType],
|
||||
(crawlType: string): boolean => crawlType === CustomCrawlType.ONE_TIME,
|
||||
],
|
||||
sitemapUrls: [
|
||||
(selectors) => [
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic.selectors.domainConfigMap,
|
||||
selectors.selectedDomainUrls,
|
||||
],
|
||||
(domainConfigMap: { [key: string]: DomainConfig }, selectedDomainUrls: string[]): string[] =>
|
||||
selectedDomainUrls.flatMap(
|
||||
(selectedDomainUrl) => domainConfigMap[selectedDomainUrl].sitemapUrls
|
||||
),
|
||||
],
|
||||
}),
|
||||
listeners: ({ actions, values }) => ({
|
||||
showFlyout: async () => {
|
||||
actions.fetchDomainConfigData();
|
||||
actions.fetchCustomScheduling();
|
||||
},
|
||||
saveCustomSchedulingConfiguration: () => {
|
||||
actions.postCustomScheduling();
|
||||
},
|
||||
startCustomCrawl: () => {
|
||||
const overrides: CrawlRequestOverrides = {
|
||||
sitemap_discovery_disabled: !values.includeSitemapsInRobotsTxt,
|
||||
max_crawl_depth: values.maxCrawlDepth,
|
||||
domain_allowlist: values.selectedDomainUrls,
|
||||
};
|
||||
|
||||
const seedUrls = [...values.selectedEntryPointUrls, ...values.customEntryPointUrls];
|
||||
if (seedUrls.length > 0) {
|
||||
overrides.seed_urls = seedUrls;
|
||||
}
|
||||
|
||||
const sitemapUrls = [...values.selectedSitemapUrls, ...values.customSitemapUrls];
|
||||
if (sitemapUrls.length > 0) {
|
||||
overrides.sitemap_urls = sitemapUrls;
|
||||
}
|
||||
|
||||
actions.startCrawl(overrides);
|
||||
},
|
||||
}),
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import { EuiButton } from '@elastic/eui';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlLogic } from './crawl_custom_settings_flyout_multi_crawl_logic';
|
||||
|
||||
export const CrawlCustomSettingsFlyoutMultipleCrawlDelete: React.FC = () => {
|
||||
const { crawlerConfigActiveTab, crawlerConfigurations } = useValues(
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogic
|
||||
);
|
||||
const { onDeleteCustomCrawler } = useActions(CrawlCustomSettingsFlyoutMultiCrawlLogic);
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiButton
|
||||
iconType="trash"
|
||||
color="danger"
|
||||
disabled={crawlerConfigurations.length < 2}
|
||||
onClick={() => onDeleteCustomCrawler(crawlerConfigActiveTab)}
|
||||
>
|
||||
{`Delete Crawl ${crawlerConfigActiveTab + 1}`}
|
||||
</EuiButton>
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,379 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { ConnectorScheduling } from '@kbn/search-connectors';
|
||||
|
||||
import { CrawlerCustomSchedulesServer } from '../../../../../../../common/types/crawler';
|
||||
|
||||
import { CrawlerIndex } from '../../../../../../../common/types/indices';
|
||||
import { Actions } from '../../../../../shared/api_logic/create_api_logic';
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import { DomainConfig, CrawlerCustomSchedule } from '../../../../api/crawler/types';
|
||||
import {
|
||||
crawlerCustomSchedulingServerToClient,
|
||||
crawlerCustomSchedulingClientToServer,
|
||||
} from '../../../../api/crawler/utils';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
|
||||
import { IndexViewLogic } from '../../index_view_logic';
|
||||
|
||||
import {
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic,
|
||||
domainConfigsToDomainConfigMap,
|
||||
} from './crawl_custom_settings_flyout_domain_logic';
|
||||
|
||||
import { filterSeedUrlsByDomainUrls } from './crawl_custom_settings_flyout_logic';
|
||||
import {
|
||||
PostCustomSchedulingApiLogic,
|
||||
PostCustomSchedulingArgs,
|
||||
} from './crawl_custom_settings_flyout_schedule_api_logic';
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutMultiCrawlLogicValues {
|
||||
crawlerConfigActiveTab: number;
|
||||
crawlerConfigurations: CrawlerCustomSchedule[];
|
||||
crawlerConfigurationsWithDomainData: CrawlerCustomSchedule[];
|
||||
index: CrawlerIndex;
|
||||
domainUrls: string[];
|
||||
domainConfigs: DomainConfig[];
|
||||
domainConfigMap: {
|
||||
[key: string]: DomainConfig;
|
||||
};
|
||||
crawlerCustomSchedulingIsValid: boolean;
|
||||
}
|
||||
|
||||
type PostCustomSchedulingApiValues = Actions<PostCustomSchedulingArgs, {}>;
|
||||
|
||||
export interface CrawlCustomSettingsFlyoutMultiCrawlLogicActions {
|
||||
fetchCustomScheduling(): void;
|
||||
postCustomScheduling(): void;
|
||||
onReceiveCrawlerCustomScheduling(crawlerConfigurations: CrawlerCustomSchedule[]): {
|
||||
crawlerConfigurations: CrawlerCustomSchedule[];
|
||||
};
|
||||
onAddCustomCrawler(index: number): { index: number };
|
||||
onDeleteCustomCrawler(index: number): { index: number };
|
||||
onSelectCrawlerConfigActiveTab(crawlerConfigActiveTab: number): {
|
||||
crawlerConfigActiveTab: number;
|
||||
};
|
||||
onSelectCustomEntryPointUrls(
|
||||
index: number,
|
||||
entryPointUrls: string[]
|
||||
): { index: number; entryPointUrls: string[] };
|
||||
onSelectCustomSitemapUrls(
|
||||
index: number,
|
||||
sitemapUrls: string[]
|
||||
): { index: number; sitemapUrls: string[] };
|
||||
onSelectDomainUrls(index: number, domainUrls: string[]): { index: number; domainUrls: string[] };
|
||||
onSelectEntryPointUrls(
|
||||
index: number,
|
||||
entryPointUrls: string[]
|
||||
): { index: number; entryPointUrls: string[] };
|
||||
onSelectMaxCrawlDepth(
|
||||
index: number,
|
||||
maxCrawlDepth: number
|
||||
): { index: number; maxCrawlDepth: number };
|
||||
onSelectSitemapUrls(
|
||||
index: number,
|
||||
sitemapUrls: string[]
|
||||
): { index: number; sitemapUrls: string[] };
|
||||
setConnectorSchedulingInterval(
|
||||
index: number,
|
||||
newSchedule: ConnectorScheduling
|
||||
): {
|
||||
index: number;
|
||||
newSchedule: ConnectorScheduling;
|
||||
};
|
||||
onSetConnectorSchedulingEnabled(
|
||||
index: number,
|
||||
enabled: boolean
|
||||
): {
|
||||
index: number;
|
||||
enabled: boolean;
|
||||
};
|
||||
toggleIncludeSitemapsInRobotsTxt(index: number): { index: number };
|
||||
makePostCustomSchedulingRequest: PostCustomSchedulingApiValues['makeRequest'];
|
||||
}
|
||||
|
||||
const defaulCrawlerConfiguration: CrawlerCustomSchedule = {
|
||||
scheduleKey: 'crawler_0',
|
||||
name: 'Crawler 0',
|
||||
maxCrawlDepth: 2,
|
||||
customEntryPointUrls: [],
|
||||
customSitemapUrls: [],
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
selectedDomainUrls: [],
|
||||
selectedEntryPointUrls: [],
|
||||
selectedSitemapUrls: [],
|
||||
interval: '0 0 0 * * ?',
|
||||
enabled: false,
|
||||
sitemapUrls: [],
|
||||
entryPointUrls: [],
|
||||
};
|
||||
|
||||
export const CrawlCustomSettingsFlyoutMultiCrawlLogic = kea<
|
||||
MakeLogicType<
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogicValues,
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogicActions
|
||||
>
|
||||
>({
|
||||
path: ['enterprise_search', 'crawler', 'crawl_custom_settings_flyout_multi_crawl_logic'],
|
||||
connect: {
|
||||
actions: [
|
||||
PostCustomSchedulingApiLogic,
|
||||
['makeRequest as makePostCustomSchedulingRequest'],
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic,
|
||||
['onRecieveDomainConfigData'],
|
||||
],
|
||||
values: [
|
||||
IndexViewLogic,
|
||||
['index'],
|
||||
CrawlCustomSettingsFlyoutDomainConfigLogic,
|
||||
['domainConfigs', 'domainConfigMap'],
|
||||
],
|
||||
},
|
||||
actions: () => ({
|
||||
fetchCustomScheduling: true,
|
||||
postCustomScheduling: true,
|
||||
onAddCustomCrawler: (index) => ({ index }),
|
||||
onDeleteCustomCrawler: (index) => ({ index }),
|
||||
onReceiveCrawlerCustomScheduling: (crawlerConfigurations) => ({ crawlerConfigurations }),
|
||||
onSelectCrawlerConfigActiveTab: (crawlerConfigActiveTab) => ({ crawlerConfigActiveTab }),
|
||||
onSelectCustomEntryPointUrls: (index, entryPointUrls) => ({ index, entryPointUrls }),
|
||||
onSelectCustomSitemapUrls: (index, sitemapUrls) => ({ index, sitemapUrls }),
|
||||
onSelectDomainUrls: (index, domainUrls) => ({ index, domainUrls }),
|
||||
onSelectEntryPointUrls: (index, entryPointUrls) => ({ index, entryPointUrls }),
|
||||
onSelectMaxCrawlDepth: (index, maxCrawlDepth) => ({ index, maxCrawlDepth }),
|
||||
onSelectSitemapUrls: (index, sitemapUrls) => ({ index, sitemapUrls }),
|
||||
onSetConnectorSchedulingEnabled: (index, enabled) => ({ index, enabled }),
|
||||
setConnectorSchedulingInterval: (index, newSchedule) => ({ index, newSchedule }),
|
||||
toggleIncludeSitemapsInRobotsTxt: (index) => ({ index }),
|
||||
}),
|
||||
reducers: () => ({
|
||||
crawlerConfigActiveTab: [
|
||||
0,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCrawlerConfigActiveTab: (_, { crawlerConfigActiveTab }) => crawlerConfigActiveTab,
|
||||
onDeleteCustomCrawler: () => 0,
|
||||
},
|
||||
],
|
||||
crawlerConfigurations: [
|
||||
[defaulCrawlerConfiguration],
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onReceiveCrawlerCustomScheduling: (_, { crawlerConfigurations }) => {
|
||||
// Handle case with no custom scheduling returned from server
|
||||
return crawlerConfigurations.length > 0 // @ts-expect-error upgrade typescript v5.1.6
|
||||
? crawlerConfigurations.map((configuration) => ({
|
||||
...defaulCrawlerConfiguration,
|
||||
...configuration,
|
||||
}))
|
||||
: [defaulCrawlerConfiguration];
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onAddCustomCrawler: (state, { index }) => {
|
||||
let newScheduleKey = `crawler_${index}`;
|
||||
let suffix = index;
|
||||
|
||||
// Check if the newScheduleKey already exists in the array
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
const existingKeys = state.map((crawler) => crawler.scheduleKey);
|
||||
if (existingKeys.includes(newScheduleKey)) {
|
||||
// Handle the case where a duplicate scheduleKey is found
|
||||
while (existingKeys.includes(`${newScheduleKey}_${suffix}`)) {
|
||||
suffix++;
|
||||
}
|
||||
newScheduleKey = `${newScheduleKey}_${suffix}`;
|
||||
}
|
||||
return [
|
||||
...state,
|
||||
{
|
||||
...defaulCrawlerConfiguration,
|
||||
name: `Crawler ${suffix}`,
|
||||
scheduleKey: newScheduleKey,
|
||||
},
|
||||
];
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onDeleteCustomCrawler: (state, { index }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.filter((_, i) => i !== index);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectMaxCrawlDepth: (state, { index, maxCrawlDepth }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) => (i === index ? { ...crawler, maxCrawlDepth } : crawler));
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCustomEntryPointUrls: (state, { index, entryPointUrls }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index ? { ...crawler, customEntryPointUrls: entryPointUrls } : crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectCustomSitemapUrls: (state, { index, sitemapUrls }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index ? { ...crawler, customSitemapUrls: sitemapUrls } : crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
toggleIncludeSitemapsInRobotsTxt: (state, { index }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index
|
||||
? { ...crawler, includeSitemapsInRobotsTxt: !crawler.includeSitemapsInRobotsTxt }
|
||||
: crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectDomainUrls: (state, { index, domainUrls }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index
|
||||
? {
|
||||
...crawler,
|
||||
selectedDomainUrls: domainUrls,
|
||||
selectedEntryPointUrls: filterSeedUrlsByDomainUrls(
|
||||
crawler.selectedEntryPointUrls,
|
||||
domainUrls
|
||||
),
|
||||
selectedSitemapUrls: filterSeedUrlsByDomainUrls(
|
||||
crawler.selectedSitemapUrls,
|
||||
domainUrls
|
||||
),
|
||||
}
|
||||
: crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectEntryPointUrls: (state, { index, entryPointUrls }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index ? { ...crawler, selectedEntryPointUrls: entryPointUrls } : crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSelectSitemapUrls: (state, { index, sitemapUrls }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) =>
|
||||
i === index ? { ...crawler, selectedSitemapUrls: sitemapUrls } : crawler
|
||||
);
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onSetConnectorSchedulingEnabled: (state, { index, enabled }) => {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) => (i === index ? { ...crawler, enabled } : crawler));
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setConnectorSchedulingInterval: (state, { index, newSchedule }) => {
|
||||
const { interval } = newSchedule;
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler, i) => (i === index ? { ...crawler, interval } : crawler));
|
||||
},
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onRecieveDomainConfigData: (state, { domainConfigs }) => {
|
||||
const domainConfigsMap = domainConfigsToDomainConfigMap(domainConfigs);
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
return state.map((crawler) => {
|
||||
const entryPointUrls = crawler.selectedDomainUrls.flatMap(
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
(selectedDomainUrl) => domainConfigsMap[selectedDomainUrl].seedUrls
|
||||
);
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
const selectedEntryPointUrls = crawler.customEntryPointUrls.filter((entryPointUrl) =>
|
||||
entryPointUrls.includes(entryPointUrl)
|
||||
);
|
||||
const customEntryPointUrls = crawler.customEntryPointUrls.filter(
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
(entryPointUrl) => !entryPointUrls.includes(entryPointUrl)
|
||||
);
|
||||
const sitemapUrls = crawler.selectedDomainUrls.flatMap(
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
(selectedDomainUrl) => domainConfigsMap[selectedDomainUrl].sitemapUrls
|
||||
);
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
const selectedSitemapUrls = crawler.customSitemapUrls.filter((sitemapUrl) =>
|
||||
sitemapUrls.includes(sitemapUrl)
|
||||
);
|
||||
const customSitemapUrls = crawler.customSitemapUrls.filter(
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
(sitemapUrl) => !sitemapUrls.includes(sitemapUrl)
|
||||
);
|
||||
|
||||
return {
|
||||
...crawler,
|
||||
entryPointUrls,
|
||||
selectedEntryPointUrls,
|
||||
customEntryPointUrls,
|
||||
sitemapUrls,
|
||||
selectedSitemapUrls,
|
||||
customSitemapUrls,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
selectors: () => ({
|
||||
crawlerConfigurationsWithDomainData: [
|
||||
(selectors) => [selectors.domainConfigMap, selectors.crawlerConfigurations],
|
||||
(
|
||||
domainConfigMap: { [key: string]: DomainConfig },
|
||||
crawlerConfigs: CrawlerCustomSchedule[]
|
||||
): CrawlerCustomSchedule[] =>
|
||||
crawlerConfigs.map((crawlerConfig) => {
|
||||
const entryPointUrls = crawlerConfig.selectedDomainUrls.flatMap(
|
||||
(selectedDomainUrl) => domainConfigMap[selectedDomainUrl].seedUrls
|
||||
);
|
||||
const sitemapUrls = crawlerConfig.selectedDomainUrls.flatMap(
|
||||
(selectedDomainUrl) => domainConfigMap[selectedDomainUrl].sitemapUrls
|
||||
);
|
||||
|
||||
return {
|
||||
...crawlerConfig,
|
||||
entryPointUrls,
|
||||
sitemapUrls,
|
||||
};
|
||||
}),
|
||||
],
|
||||
crawlerCustomSchedulingIsValid: [
|
||||
(selectors) => [selectors.crawlerConfigurations],
|
||||
(crawlerConfigs: CrawlerCustomSchedule[]): boolean =>
|
||||
crawlerConfigs.every((config) => config.selectedDomainUrls.length > 0),
|
||||
],
|
||||
}),
|
||||
listeners: ({ actions, values }) => ({
|
||||
fetchCustomScheduling: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
try {
|
||||
const customSchedulingResponse = await http.get<CrawlerCustomSchedulesServer>(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/custom_scheduling`
|
||||
);
|
||||
const customScheduling = crawlerCustomSchedulingServerToClient(customSchedulingResponse);
|
||||
actions.onReceiveCrawlerCustomScheduling(customScheduling);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
postCustomScheduling: async () => {
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
const { crawlerConfigurations } = values;
|
||||
const customScheduling = crawlerCustomSchedulingClientToServer(crawlerConfigurations);
|
||||
try {
|
||||
actions.makePostCustomSchedulingRequest({ indexName, customScheduling });
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
}),
|
||||
});
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import { EuiTab, EuiTabs, EuiSpacer, EuiIcon } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlLogic } from './crawl_custom_settings_flyout_multi_crawl_logic';
|
||||
|
||||
const CRAWLER_TAB_PREFIX = i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.multipleCrawlTabPrefix',
|
||||
{
|
||||
defaultMessage: 'Crawl',
|
||||
}
|
||||
);
|
||||
|
||||
export const CrawlCustomSettingsFlyoutMultipleCrawlTabs: React.FC = () => {
|
||||
const { crawlerConfigActiveTab, crawlerConfigurations } = useValues(
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogic
|
||||
);
|
||||
const { onAddCustomCrawler, onSelectCrawlerConfigActiveTab } = useActions(
|
||||
CrawlCustomSettingsFlyoutMultiCrawlLogic
|
||||
);
|
||||
|
||||
const crawlerTabData = crawlerConfigurations.map((_, index) => ({
|
||||
key: `crawler_${index}`,
|
||||
index,
|
||||
label: `${CRAWLER_TAB_PREFIX} ${index + 1}`,
|
||||
}));
|
||||
|
||||
return (
|
||||
<>
|
||||
<EuiTabs>
|
||||
{crawlerTabData.map((tab) => (
|
||||
<EuiTab
|
||||
key={tab.key}
|
||||
isSelected={crawlerConfigActiveTab === tab.index}
|
||||
onClick={() => onSelectCrawlerConfigActiveTab(tab.index)}
|
||||
>
|
||||
{tab.label}
|
||||
</EuiTab>
|
||||
))}
|
||||
<EuiTab onClick={() => onAddCustomCrawler(crawlerConfigurations.length)}>
|
||||
<EuiIcon type="plus" />
|
||||
</EuiTab>
|
||||
</EuiTabs>
|
||||
<EuiSpacer />
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import { EuiSpacer } from '@elastic/eui';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutCrawlDepthPanel } from './crawl_custom_settings_flyout_crawl_depth_panel';
|
||||
import { MultiCrawlScheduler } from './crawl_custom_settings_flyout_crawl_scheduler';
|
||||
import { CrawlCustomSettingsFlyoutDomainConfigLogic } from './crawl_custom_settings_flyout_domain_logic';
|
||||
import { CrawlCustomSettingsFlyoutDomainsPanel } from './crawl_custom_settings_flyout_domains_panel';
|
||||
import { CrawlCustomSettingsFlyoutMultiCrawlLogic } from './crawl_custom_settings_flyout_multi_crawl_logic';
|
||||
import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
|
||||
|
||||
export const CrawlCustomSettingsFlyoutMultiCrawlScheduling: React.FC = () => {
|
||||
const { domainUrls } = useValues(CrawlCustomSettingsFlyoutDomainConfigLogic);
|
||||
|
||||
const {
|
||||
crawlerConfigurationsWithDomainData,
|
||||
crawlerConfigActiveTab,
|
||||
index: crawlerIndex,
|
||||
} = useValues(CrawlCustomSettingsFlyoutMultiCrawlLogic);
|
||||
|
||||
const {
|
||||
onSelectMaxCrawlDepth,
|
||||
onSelectDomainUrls,
|
||||
onSelectCustomEntryPointUrls,
|
||||
onSelectCustomSitemapUrls,
|
||||
onSelectEntryPointUrls,
|
||||
onSelectSitemapUrls,
|
||||
toggleIncludeSitemapsInRobotsTxt,
|
||||
setConnectorSchedulingInterval,
|
||||
onSetConnectorSchedulingEnabled,
|
||||
} = useActions(CrawlCustomSettingsFlyoutMultiCrawlLogic);
|
||||
|
||||
return (
|
||||
<>
|
||||
{crawlerConfigurationsWithDomainData.map((config, index) => {
|
||||
if (index === crawlerConfigActiveTab) {
|
||||
return (
|
||||
<React.Fragment key={index}>
|
||||
<CrawlCustomSettingsFlyoutCrawlDepthPanel
|
||||
maxCrawlDepth={config.maxCrawlDepth}
|
||||
onSelectMaxCrawlDepth={(e) => onSelectMaxCrawlDepth(index, e)}
|
||||
/>
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutDomainsPanel
|
||||
selectedDomainUrls={config.selectedDomainUrls}
|
||||
domainUrls={domainUrls}
|
||||
onSelectDomainUrls={(e) => onSelectDomainUrls(index, e)}
|
||||
/>
|
||||
<EuiSpacer />
|
||||
<CrawlCustomSettingsFlyoutSeedUrlsPanel
|
||||
scheduleConfig={config}
|
||||
onSelectCustomEntryPointUrls={(e) => onSelectCustomEntryPointUrls(index, e)}
|
||||
onSelectCustomSitemapUrls={(e) => onSelectCustomSitemapUrls(index, e)}
|
||||
onSelectEntryPointUrls={(e) => onSelectEntryPointUrls(index, e)}
|
||||
onSelectSitemapUrls={(e) => onSelectSitemapUrls(index, e)}
|
||||
toggleIncludeSitemapsInRobotsTxt={() => toggleIncludeSitemapsInRobotsTxt(index)}
|
||||
/>
|
||||
<EuiSpacer />
|
||||
<MultiCrawlScheduler
|
||||
index={crawlerIndex}
|
||||
interval={config.interval}
|
||||
schedulingEnabled={config.enabled}
|
||||
setConnectorSchedulingInterval={(e) => setConnectorSchedulingInterval(index, e)}
|
||||
onSetConnectorSchedulingEnabled={(e) => onSetConnectorSchedulingEnabled(index, e)}
|
||||
/>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</>
|
||||
);
|
||||
};
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import {
|
||||
CrawlerCustomScheduleClient,
|
||||
CrawlerCustomScheduleMappingClient,
|
||||
} from '../../../../../../../common/types/crawler';
|
||||
import { createApiLogic } from '../../../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
|
||||
export interface PostCustomSchedulingArgs {
|
||||
indexName: string;
|
||||
customScheduling: CrawlerCustomScheduleMappingClient;
|
||||
}
|
||||
|
||||
export const postCrawlerCustomScheduling = async ({
|
||||
indexName,
|
||||
customScheduling,
|
||||
}: PostCustomSchedulingArgs) => {
|
||||
const route = `/internal/enterprise_search/indices/${indexName}/crawler/custom_scheduling`;
|
||||
await HttpLogic.values.http.post<CrawlerCustomScheduleClient>(route, {
|
||||
body: JSON.stringify(Object.fromEntries(customScheduling)),
|
||||
});
|
||||
};
|
||||
|
||||
export const PostCustomSchedulingApiLogic = createApiLogic(
|
||||
['post_crawler_custom_scheduling_api_logic'],
|
||||
postCrawlerCustomScheduling,
|
||||
{
|
||||
showSuccessFlashFn: () =>
|
||||
i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.postCrawlerCustomSchedulingSuccess.message',
|
||||
{
|
||||
defaultMessage: 'Successfully saved crawler custom scheduling.',
|
||||
}
|
||||
),
|
||||
}
|
||||
);
|
|
@ -1,190 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow, ShallowWrapper } from 'enzyme';
|
||||
|
||||
import { EuiAccordion, EuiTabbedContent, EuiNotificationBadge, EuiCheckbox } from '@elastic/eui';
|
||||
|
||||
import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
|
||||
|
||||
import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
customEntryPointUrls: ['https://www.elastic.co/custom-entry-point'],
|
||||
customSitemapUrls: [
|
||||
'https://www.elastic.co/custom-sitemap1.xml',
|
||||
'https://swiftype.com/custom-sitemap2.xml',
|
||||
],
|
||||
entryPointUrls: ['https://www.elastic.co/guide', 'https://swiftype.com/documentation'],
|
||||
selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
|
||||
selectedEntryPointUrls: ['https://swiftype.com/documentation'],
|
||||
selectedSitemapUrls: ['https://www.elastic.co/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
|
||||
sitemapUrls: [
|
||||
'https://www.elastic.co/sitemap1.xml',
|
||||
'https://www.elastic.co/sitemap2.xml',
|
||||
'https://swiftype.com/sitemap1.xml',
|
||||
'https://swiftype.com/sitemap2.xml',
|
||||
],
|
||||
includeSitemapsInRobotsTxt: true,
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
// CrawlCustomSettingsFlyoutLogic
|
||||
onSelectCustomEntryPointUrls: jest.fn(),
|
||||
onSelectCustomSitemapUrls: jest.fn(),
|
||||
onSelectEntryPointUrls: jest.fn(),
|
||||
onSelectSitemapUrls: jest.fn(),
|
||||
toggleIncludeSitemapsInRobotsTxt: jest.fn(),
|
||||
};
|
||||
|
||||
const getAccordionBadge = (wrapper: ShallowWrapper) => {
|
||||
const accordionWrapper = wrapper.find(EuiAccordion);
|
||||
const extraActionWrapper = shallow(<div>{accordionWrapper.prop('extraAction')}</div>);
|
||||
return extraActionWrapper.find(EuiNotificationBadge);
|
||||
};
|
||||
|
||||
describe('CrawlCustomSettingsFlyoutSeedUrlsPanel', () => {
|
||||
let wrapper: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockValues(MOCK_VALUES);
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
|
||||
wrapper = shallow(
|
||||
<CrawlCustomSettingsFlyoutSeedUrlsPanel
|
||||
scheduleConfig={{
|
||||
customEntryPointUrls: MOCK_VALUES.customEntryPointUrls,
|
||||
customSitemapUrls: MOCK_VALUES.customSitemapUrls,
|
||||
includeSitemapsInRobotsTxt: MOCK_VALUES.includeSitemapsInRobotsTxt,
|
||||
selectedDomainUrls: MOCK_VALUES.selectedDomainUrls,
|
||||
selectedEntryPointUrls: MOCK_VALUES.selectedEntryPointUrls,
|
||||
selectedSitemapUrls: MOCK_VALUES.selectedSitemapUrls,
|
||||
entryPointUrls: MOCK_VALUES.entryPointUrls,
|
||||
sitemapUrls: MOCK_VALUES.sitemapUrls,
|
||||
}}
|
||||
onSelectCustomEntryPointUrls={MOCK_ACTIONS.onSelectCustomEntryPointUrls}
|
||||
onSelectCustomSitemapUrls={MOCK_ACTIONS.onSelectCustomSitemapUrls}
|
||||
onSelectEntryPointUrls={MOCK_ACTIONS.onSelectEntryPointUrls}
|
||||
onSelectSitemapUrls={MOCK_ACTIONS.onSelectSitemapUrls}
|
||||
toggleIncludeSitemapsInRobotsTxt={MOCK_ACTIONS.toggleIncludeSitemapsInRobotsTxt}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
describe('sitemaps tab', () => {
|
||||
let sitemapTab: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
|
||||
sitemapTab = shallow(<div>{tabs[0].content}</div>);
|
||||
});
|
||||
|
||||
it('allows the user to select sitemap urls', () => {
|
||||
expect(sitemapTab.find(SimplifiedSelectable).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
options: MOCK_VALUES.sitemapUrls,
|
||||
selectedOptions: MOCK_VALUES.selectedSitemapUrls,
|
||||
onChange: MOCK_ACTIONS.onSelectSitemapUrls,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('allows the user to toggle whether to include robots.txt sitemaps', () => {
|
||||
expect(sitemapTab.find(EuiCheckbox).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
onChange: MOCK_ACTIONS.toggleIncludeSitemapsInRobotsTxt,
|
||||
checked: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('allows the user to add custom sitemap urls', () => {
|
||||
expect(sitemapTab.find(UrlComboBox).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
selectedUrls: MOCK_VALUES.customSitemapUrls,
|
||||
onChange: MOCK_ACTIONS.onSelectCustomSitemapUrls,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('entry points tab', () => {
|
||||
let entryPointsTab: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
|
||||
entryPointsTab = shallow(<div>{tabs[1].content}</div>);
|
||||
});
|
||||
|
||||
it('allows the user to select entry point urls', () => {
|
||||
expect(entryPointsTab.find(SimplifiedSelectable).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
options: MOCK_VALUES.entryPointUrls,
|
||||
selectedOptions: MOCK_VALUES.selectedEntryPointUrls,
|
||||
onChange: MOCK_ACTIONS.onSelectEntryPointUrls,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('allows the user to add custom entry point urls', () => {
|
||||
expect(entryPointsTab.find(UrlComboBox).props()).toEqual(
|
||||
expect.objectContaining({
|
||||
selectedUrls: MOCK_VALUES.customEntryPointUrls,
|
||||
onChange: MOCK_ACTIONS.onSelectCustomEntryPointUrls,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('indicates how many seed urls are selected', () => {
|
||||
let badge = getAccordionBadge(wrapper);
|
||||
|
||||
expect(badge.render().text()).toContain('6');
|
||||
expect(badge.prop('color')).toEqual('accent');
|
||||
|
||||
wrapper.setProps({
|
||||
scheduleConfig: {
|
||||
...MOCK_VALUES,
|
||||
customEntryPointUrls: [],
|
||||
customSitemapUrls: [],
|
||||
selectedEntryPointUrls: [],
|
||||
selectedSitemapUrls: [],
|
||||
},
|
||||
});
|
||||
|
||||
badge = getAccordionBadge(wrapper);
|
||||
|
||||
expect(badge.render().text()).toContain('0');
|
||||
expect(badge.prop('color')).toEqual('subdued');
|
||||
});
|
||||
|
||||
it('shows empty messages when the user has not selected any domains', () => {
|
||||
wrapper.setProps({
|
||||
scheduleConfig: {
|
||||
...MOCK_VALUES,
|
||||
selectedDomainUrls: [],
|
||||
},
|
||||
});
|
||||
|
||||
// rerender(wrapper);
|
||||
|
||||
const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
|
||||
const sitemapsTab = shallow(<div>{tabs[0].content}</div>);
|
||||
const entryPointsTab = shallow(<div>{tabs[1].content}</div>);
|
||||
|
||||
expect(sitemapsTab.find(SimplifiedSelectable).prop('emptyMessage')).toBeDefined();
|
||||
expect(entryPointsTab.find(SimplifiedSelectable).prop('emptyMessage')).toBeDefined();
|
||||
});
|
||||
});
|
|
@ -1,265 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues, useActions } from 'kea';
|
||||
|
||||
import {
|
||||
EuiAccordion,
|
||||
EuiCheckbox,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiHorizontalRule,
|
||||
EuiIcon,
|
||||
EuiNotificationBadge,
|
||||
EuiPanel,
|
||||
EuiSpacer,
|
||||
EuiTabbedContent,
|
||||
EuiTitle,
|
||||
useGeneratedHtmlId,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n-react';
|
||||
|
||||
import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
|
||||
import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
|
||||
import { CrawlerCustomSchedule } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
|
||||
|
||||
type CrawlerCustomScheduleConfig = Pick<
|
||||
CrawlerCustomSchedule,
|
||||
| 'customEntryPointUrls'
|
||||
| 'customSitemapUrls'
|
||||
| 'includeSitemapsInRobotsTxt'
|
||||
| 'selectedDomainUrls'
|
||||
| 'selectedEntryPointUrls'
|
||||
| 'selectedSitemapUrls'
|
||||
| 'entryPointUrls'
|
||||
| 'sitemapUrls'
|
||||
>;
|
||||
|
||||
interface CrawlCustomSettingsFlyoutSeedUrlsPanelProps {
|
||||
scheduleConfig: CrawlerCustomScheduleConfig;
|
||||
onSelectCustomEntryPointUrls: (urls: string[]) => void;
|
||||
onSelectCustomSitemapUrls: (urls: string[]) => void;
|
||||
onSelectEntryPointUrls: (urls: string[]) => void;
|
||||
onSelectSitemapUrls: (urls: string[]) => void;
|
||||
toggleIncludeSitemapsInRobotsTxt: () => void;
|
||||
}
|
||||
|
||||
export const CrawlCustomSettingsFlyoutSeedUrlsPanelWithLogicProps: React.FC = () => {
|
||||
const {
|
||||
customEntryPointUrls,
|
||||
customSitemapUrls,
|
||||
entryPointUrls,
|
||||
includeSitemapsInRobotsTxt,
|
||||
selectedDomainUrls,
|
||||
selectedEntryPointUrls,
|
||||
selectedSitemapUrls,
|
||||
sitemapUrls,
|
||||
} = useValues(CrawlCustomSettingsFlyoutLogic);
|
||||
const {
|
||||
onSelectCustomEntryPointUrls,
|
||||
onSelectCustomSitemapUrls,
|
||||
onSelectEntryPointUrls,
|
||||
onSelectSitemapUrls,
|
||||
toggleIncludeSitemapsInRobotsTxt,
|
||||
} = useActions(CrawlCustomSettingsFlyoutLogic);
|
||||
|
||||
const scheduleConfig = {
|
||||
customEntryPointUrls,
|
||||
customSitemapUrls,
|
||||
includeSitemapsInRobotsTxt,
|
||||
selectedDomainUrls,
|
||||
selectedEntryPointUrls,
|
||||
selectedSitemapUrls,
|
||||
entryPointUrls,
|
||||
sitemapUrls,
|
||||
};
|
||||
|
||||
return (
|
||||
<CrawlCustomSettingsFlyoutSeedUrlsPanel
|
||||
scheduleConfig={scheduleConfig}
|
||||
onSelectCustomEntryPointUrls={onSelectCustomEntryPointUrls}
|
||||
onSelectCustomSitemapUrls={onSelectCustomSitemapUrls}
|
||||
onSelectEntryPointUrls={onSelectEntryPointUrls}
|
||||
onSelectSitemapUrls={onSelectSitemapUrls}
|
||||
toggleIncludeSitemapsInRobotsTxt={toggleIncludeSitemapsInRobotsTxt}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export const CrawlCustomSettingsFlyoutSeedUrlsPanel: React.FC<
|
||||
CrawlCustomSettingsFlyoutSeedUrlsPanelProps
|
||||
> = ({
|
||||
scheduleConfig,
|
||||
onSelectCustomEntryPointUrls,
|
||||
onSelectCustomSitemapUrls,
|
||||
onSelectEntryPointUrls,
|
||||
onSelectSitemapUrls,
|
||||
toggleIncludeSitemapsInRobotsTxt,
|
||||
}) => {
|
||||
const totalSeedUrls =
|
||||
scheduleConfig.customEntryPointUrls.length +
|
||||
scheduleConfig.customSitemapUrls.length +
|
||||
scheduleConfig.selectedEntryPointUrls.length +
|
||||
scheduleConfig.selectedSitemapUrls.length;
|
||||
|
||||
return (
|
||||
<EuiPanel hasBorder>
|
||||
<EuiAccordion
|
||||
id={useGeneratedHtmlId({ prefix: 'seedUrlAccordion' })}
|
||||
initialIsOpen
|
||||
buttonContent={
|
||||
<EuiFlexGroup direction="row" responsive={false} gutterSize="s" alignItems="center">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiIcon type="globe" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
<EuiTitle size="xs">
|
||||
<h3>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Seed URLs',
|
||||
}
|
||||
)}
|
||||
</h3>
|
||||
</EuiTitle>
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
}
|
||||
extraAction={
|
||||
<EuiFlexGroup alignItems="center" gutterSize="m">
|
||||
<EuiNotificationBadge size="m" color={totalSeedUrls > 0 ? 'accent' : 'subdued'}>
|
||||
{totalSeedUrls}
|
||||
</EuiNotificationBadge>
|
||||
<EuiFlexItem grow={false}>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor',
|
||||
{
|
||||
defaultMessage: 'selected',
|
||||
}
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
}
|
||||
>
|
||||
<EuiTabbedContent
|
||||
expand
|
||||
tabs={[
|
||||
{
|
||||
id: useGeneratedHtmlId({ prefix: 'sitemapsTab' }),
|
||||
name: i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel',
|
||||
{
|
||||
defaultMessage: 'Sitemaps',
|
||||
}
|
||||
),
|
||||
content: (
|
||||
<>
|
||||
<EuiSpacer size="s" />
|
||||
<EuiPanel color="subdued" borderRadius="none" hasShadow={false} paddingSize="s">
|
||||
<EuiCheckbox
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-includeRobotsSitemaps"
|
||||
id={useGeneratedHtmlId({ prefix: 'includeRobotsCheckbox' })}
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel"
|
||||
defaultMessage="Include sitemaps discovered in {robotsDotTxt}"
|
||||
values={{
|
||||
robotsDotTxt: <strong>robots.txt</strong>, // this is a technical term and shouldn't be translated
|
||||
}}
|
||||
/>
|
||||
}
|
||||
checked={scheduleConfig.includeSitemapsInRobotsTxt}
|
||||
onChange={toggleIncludeSitemapsInRobotsTxt}
|
||||
/>
|
||||
</EuiPanel>
|
||||
<SimplifiedSelectable
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-selectDomain"
|
||||
options={scheduleConfig.sitemapUrls}
|
||||
selectedOptions={scheduleConfig.selectedSitemapUrls}
|
||||
onChange={onSelectSitemapUrls}
|
||||
emptyMessage={
|
||||
scheduleConfig.selectedDomainUrls.length === 0
|
||||
? i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage',
|
||||
{
|
||||
defaultMessage: 'Please select a domain.',
|
||||
}
|
||||
)
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
<EuiHorizontalRule />
|
||||
<UrlComboBox
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-customSitemapUrls"
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel',
|
||||
{
|
||||
defaultMessage: 'Custom sitemap URLs',
|
||||
}
|
||||
)}
|
||||
onChange={onSelectCustomSitemapUrls}
|
||||
selectedUrls={scheduleConfig.customSitemapUrls}
|
||||
/>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: useGeneratedHtmlId({ prefix: 'entryPointsTab' }),
|
||||
name: i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel',
|
||||
{
|
||||
defaultMessage: 'Entry points',
|
||||
}
|
||||
),
|
||||
content: (
|
||||
<>
|
||||
<EuiSpacer size="s" />
|
||||
<SimplifiedSelectable
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-selectDomain"
|
||||
options={scheduleConfig.entryPointUrls}
|
||||
selectedOptions={scheduleConfig.selectedEntryPointUrls}
|
||||
onChange={onSelectEntryPointUrls}
|
||||
emptyMessage={
|
||||
scheduleConfig.selectedDomainUrls.length === 0
|
||||
? i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage',
|
||||
{
|
||||
defaultMessage: 'Please select a domain.',
|
||||
}
|
||||
)
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
<EuiHorizontalRule />
|
||||
<UrlComboBox
|
||||
data-telemetry-id="entSearchContent-crawler-customCrawlSettings-customEntryPointUrls"
|
||||
label={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel',
|
||||
{
|
||||
defaultMessage: 'Custom entry point URLs',
|
||||
}
|
||||
)}
|
||||
onChange={onSelectCustomEntryPointUrls}
|
||||
selectedUrls={scheduleConfig.customEntryPointUrls}
|
||||
/>
|
||||
</>
|
||||
),
|
||||
},
|
||||
]}
|
||||
autoFocus="selected"
|
||||
/>
|
||||
</EuiAccordion>
|
||||
</EuiPanel>
|
||||
);
|
||||
};
|
|
@ -1,158 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LogicMounter, mockHttpValues } from '../../../../../__mocks__/kea_logic';
|
||||
import '../../_mocks_/index_name_logic.mock';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { itShowsServerErrorAsFlashMessage } from '../../../../../test_helpers';
|
||||
import {
|
||||
CrawlRequestWithDetailsFromServer,
|
||||
CrawlerStatus,
|
||||
CrawlType,
|
||||
} from '../../../../api/crawler/types';
|
||||
import { crawlRequestWithDetailsServerToClient } from '../../../../api/crawler/utils';
|
||||
|
||||
import { CrawlDetailLogic, CrawlDetailValues } from './crawl_detail_logic';
|
||||
|
||||
const DEFAULT_VALUES: CrawlDetailValues = {
|
||||
dataLoading: true,
|
||||
flyoutClosed: true,
|
||||
crawlRequest: null,
|
||||
crawlRequestFromServer: null,
|
||||
selectedTab: 'preview',
|
||||
};
|
||||
|
||||
const crawlRequestResponse: CrawlRequestWithDetailsFromServer = {
|
||||
id: '12345',
|
||||
status: CrawlerStatus.Pending,
|
||||
created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
began_at: null,
|
||||
completed_at: null,
|
||||
type: CrawlType.Full,
|
||||
crawl_config: {
|
||||
domain_allowlist: [],
|
||||
seed_urls: [],
|
||||
sitemap_urls: [],
|
||||
max_crawl_depth: 10,
|
||||
},
|
||||
stats: {
|
||||
status: {
|
||||
urls_allowed: 4,
|
||||
pages_visited: 4,
|
||||
crawl_duration_msec: 100,
|
||||
avg_response_time_msec: 10,
|
||||
status_codes: {
|
||||
200: 4,
|
||||
404: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const clientCrawlRequest = crawlRequestWithDetailsServerToClient(crawlRequestResponse);
|
||||
|
||||
describe('CrawlDetailLogic', () => {
|
||||
const { mount } = new LogicMounter(CrawlDetailLogic);
|
||||
const { http } = mockHttpValues;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
mount();
|
||||
expect(CrawlDetailLogic.values).toEqual(DEFAULT_VALUES);
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('closeFlyout', () => {
|
||||
it('closes the flyout', () => {
|
||||
mount({ flyoutClosed: false });
|
||||
|
||||
CrawlDetailLogic.actions.closeFlyout();
|
||||
|
||||
expect(CrawlDetailLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
flyoutClosed: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('onRecieveCrawlRequest', () => {
|
||||
it('saves the crawl request and sets data loading to false', () => {
|
||||
mount({
|
||||
dataLoading: true,
|
||||
request: null,
|
||||
});
|
||||
|
||||
CrawlDetailLogic.actions.onRecieveCrawlRequest(crawlRequestResponse);
|
||||
|
||||
expect(CrawlDetailLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
dataLoading: false,
|
||||
crawlRequestFromServer: crawlRequestResponse,
|
||||
crawlRequest: clientCrawlRequest,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setSelectedTab', () => {
|
||||
it('sets the select tab', () => {
|
||||
mount({
|
||||
selectedTab: 'preview',
|
||||
});
|
||||
|
||||
CrawlDetailLogic.actions.setSelectedTab('json');
|
||||
|
||||
expect(CrawlDetailLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
selectedTab: 'json',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchCrawlRequest', () => {
|
||||
it('sets loading to true and opens the flyout', () => {
|
||||
mount({
|
||||
dataLoading: false,
|
||||
});
|
||||
|
||||
CrawlDetailLogic.actions.fetchCrawlRequest('12345');
|
||||
|
||||
expect(CrawlDetailLogic.values).toEqual({
|
||||
...DEFAULT_VALUES,
|
||||
dataLoading: true,
|
||||
flyoutClosed: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('updates logic with data that has been converted from server to client', async () => {
|
||||
mount();
|
||||
jest.spyOn(CrawlDetailLogic.actions, 'onRecieveCrawlRequest');
|
||||
|
||||
http.get.mockReturnValueOnce(Promise.resolve(crawlRequestResponse));
|
||||
|
||||
CrawlDetailLogic.actions.fetchCrawlRequest('12345');
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
'/internal/enterprise_search/indices/index-name/crawler/crawl_requests/12345'
|
||||
);
|
||||
expect(CrawlDetailLogic.actions.onRecieveCrawlRequest).toHaveBeenCalledWith(
|
||||
crawlRequestResponse
|
||||
);
|
||||
});
|
||||
|
||||
itShowsServerErrorAsFlashMessage(http.get, () => {
|
||||
mount();
|
||||
CrawlDetailLogic.actions.fetchCrawlRequest('12345');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,103 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
|
||||
import {
|
||||
CrawlRequestWithDetails,
|
||||
CrawlRequestWithDetailsFromServer,
|
||||
} from '../../../../api/crawler/types';
|
||||
import { crawlRequestWithDetailsServerToClient } from '../../../../api/crawler/utils';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
|
||||
type CrawlDetailFlyoutTabs = 'preview' | 'json';
|
||||
|
||||
export interface CrawlDetailValues {
|
||||
crawlRequest: CrawlRequestWithDetails | null;
|
||||
crawlRequestFromServer: CrawlRequestWithDetailsFromServer | null;
|
||||
dataLoading: boolean;
|
||||
flyoutClosed: boolean;
|
||||
selectedTab: CrawlDetailFlyoutTabs;
|
||||
}
|
||||
|
||||
export interface CrawlDetailActions {
|
||||
closeFlyout(): void;
|
||||
fetchCrawlRequest(requestId: string): { requestId: string };
|
||||
onRecieveCrawlRequest(crawlRequestFromServer: CrawlRequestWithDetailsFromServer): {
|
||||
crawlRequestFromServer: CrawlRequestWithDetailsFromServer;
|
||||
};
|
||||
setSelectedTab(selectedTab: CrawlDetailFlyoutTabs): { selectedTab: CrawlDetailFlyoutTabs };
|
||||
}
|
||||
|
||||
export const CrawlDetailLogic = kea<MakeLogicType<CrawlDetailValues, CrawlDetailActions>>({
|
||||
path: ['enterprise_search', 'crawler', 'crawl_detail_logic'],
|
||||
actions: {
|
||||
closeFlyout: true,
|
||||
fetchCrawlRequest: (requestId) => ({ requestId }),
|
||||
onRecieveCrawlRequest: (crawlRequestFromServer) => ({ crawlRequestFromServer }),
|
||||
setSelectedTab: (selectedTab) => ({ selectedTab }),
|
||||
},
|
||||
reducers: {
|
||||
crawlRequest: [
|
||||
null,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onRecieveCrawlRequest: (_, { crawlRequestFromServer }) =>
|
||||
crawlRequestWithDetailsServerToClient(crawlRequestFromServer),
|
||||
},
|
||||
],
|
||||
crawlRequestFromServer: [
|
||||
null,
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
onRecieveCrawlRequest: (_, { crawlRequestFromServer }) => crawlRequestFromServer,
|
||||
},
|
||||
],
|
||||
dataLoading: [
|
||||
true,
|
||||
{
|
||||
fetchCrawlRequest: () => true,
|
||||
onRecieveCrawlRequest: () => false,
|
||||
},
|
||||
],
|
||||
flyoutClosed: [
|
||||
true,
|
||||
{
|
||||
fetchCrawlRequest: () => false,
|
||||
closeFlyout: () => true,
|
||||
},
|
||||
],
|
||||
selectedTab: [
|
||||
'preview',
|
||||
{
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
fetchCrawlRequest: () => 'preview',
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
setSelectedTab: (_, { selectedTab }) => selectedTab,
|
||||
},
|
||||
],
|
||||
},
|
||||
listeners: ({ actions }) => ({
|
||||
fetchCrawlRequest: async ({ requestId }) => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
||||
try {
|
||||
const response = await http.get<CrawlRequestWithDetailsFromServer>(
|
||||
`/internal/enterprise_search/indices/${indexName}/crawler/crawl_requests/${requestId}`
|
||||
);
|
||||
|
||||
actions.onRecieveCrawlRequest(response);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
}),
|
||||
});
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
import '../../../../../__mocks__/shallow_useeffect.mock';
|
||||
import '../../_mocks_/index_name_logic.mock';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { EuiCodeBlock, EuiFlyout, EuiTab, EuiTabs } from '@elastic/eui';
|
||||
|
||||
import { Loading } from '../../../../../shared/loading';
|
||||
import { CrawlRequestWithDetailsFromServer } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlDetailsFlyout } from './crawl_details_flyout';
|
||||
import { CrawlDetailsPreview } from './crawl_details_preview';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
dataLoading: false,
|
||||
flyoutClosed: false,
|
||||
crawlRequestFromServer: {} as CrawlRequestWithDetailsFromServer,
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
setSelectedTab: jest.fn(),
|
||||
fetchLogRetention: jest.fn(),
|
||||
};
|
||||
|
||||
describe('CrawlDetailsFlyout', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders a flyout ', () => {
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
setMockValues(MOCK_VALUES);
|
||||
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
|
||||
expect(wrapper.is(EuiFlyout)).toBe(true);
|
||||
});
|
||||
|
||||
it('contains a tab group to control displayed content inside the flyout', () => {
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
setMockValues(MOCK_VALUES);
|
||||
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
const tabs = wrapper.find(EuiTabs).find(EuiTab);
|
||||
|
||||
expect(tabs).toHaveLength(2);
|
||||
|
||||
tabs.at(0).simulate('click');
|
||||
|
||||
expect(MOCK_ACTIONS.setSelectedTab).toHaveBeenCalledWith('preview');
|
||||
|
||||
tabs.at(1).simulate('click');
|
||||
|
||||
expect(MOCK_ACTIONS.setSelectedTab).toHaveBeenCalledWith('json');
|
||||
});
|
||||
|
||||
describe('when the preview tab is selected', () => {
|
||||
beforeEach(() => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
selectedTab: 'preview',
|
||||
});
|
||||
});
|
||||
|
||||
it('shows the correct tab is selected in the UX', () => {
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
const tabs = wrapper.find(EuiTabs).find(EuiTab);
|
||||
|
||||
expect(tabs.at(0).prop('isSelected')).toBe(true);
|
||||
expect(tabs.at(1).prop('isSelected')).toBe(false);
|
||||
});
|
||||
|
||||
it('shows the human readable version of the crawl details', () => {
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
|
||||
const crawlDetailsPreview = wrapper.find(CrawlDetailsPreview);
|
||||
expect(crawlDetailsPreview).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the json tab is selected', () => {
|
||||
beforeEach(() => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
selectedTab: 'json',
|
||||
});
|
||||
});
|
||||
|
||||
it('shows the correct tab is selected in the UX', () => {
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
const tabs = wrapper.find(EuiTabs).find(EuiTab);
|
||||
|
||||
expect(tabs.at(0).prop('isSelected')).toBe(false);
|
||||
expect(tabs.at(1).prop('isSelected')).toBe(true);
|
||||
});
|
||||
|
||||
it('shows the raw json of the crawl details', () => {
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
|
||||
expect(wrapper.find(EuiCodeBlock)).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('renders a loading screen when loading', () => {
|
||||
setMockValues({ ...MOCK_VALUES, dataLoading: true });
|
||||
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
|
||||
expect(wrapper.is(EuiFlyout)).toBe(true);
|
||||
expect(wrapper.find(Loading)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('is empty when the flyout is hidden', () => {
|
||||
setMockValues({
|
||||
flyoutClosed: true,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlDetailsFlyout />);
|
||||
|
||||
expect(wrapper.isEmptyRender()).toBe(true);
|
||||
});
|
||||
});
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import React from 'react';
|
||||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
import {
|
||||
EuiFlyout,
|
||||
EuiFlyoutHeader,
|
||||
EuiTitle,
|
||||
EuiFlyoutBody,
|
||||
EuiCodeBlock,
|
||||
EuiTab,
|
||||
EuiTabs,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { Loading } from '../../../../../shared/loading';
|
||||
|
||||
import { CrawlDetailLogic } from './crawl_detail_logic';
|
||||
|
||||
import { CrawlDetailsPreview } from './crawl_details_preview';
|
||||
|
||||
export const CrawlDetailsFlyout: React.FC = () => {
|
||||
const { closeFlyout, setSelectedTab } = useActions(CrawlDetailLogic);
|
||||
const { crawlRequestFromServer, dataLoading, flyoutClosed, selectedTab } =
|
||||
useValues(CrawlDetailLogic);
|
||||
|
||||
if (flyoutClosed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<EuiFlyout
|
||||
maxWidth="45rem"
|
||||
ownFocus
|
||||
onClose={closeFlyout}
|
||||
aria-labelledby="CrawlDetailsFlyoutTitle"
|
||||
>
|
||||
<EuiFlyoutHeader hasBorder>
|
||||
<EuiTitle size="m">
|
||||
<h2 id="CrawlDetailsFlyoutTitle">
|
||||
{i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title', {
|
||||
defaultMessage: 'Crawl request details',
|
||||
})}
|
||||
</h2>
|
||||
</EuiTitle>
|
||||
<EuiTabs style={{ marginBottom: '-25px' }}>
|
||||
<EuiTab isSelected={selectedTab === 'preview'} onClick={() => setSelectedTab('preview')}>
|
||||
{i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel', {
|
||||
defaultMessage: 'Preview',
|
||||
})}
|
||||
</EuiTab>
|
||||
<EuiTab isSelected={selectedTab === 'json'} onClick={() => setSelectedTab('json')}>
|
||||
{i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel', {
|
||||
defaultMessage: 'Raw JSON',
|
||||
})}
|
||||
</EuiTab>
|
||||
</EuiTabs>
|
||||
</EuiFlyoutHeader>
|
||||
<EuiFlyoutBody>
|
||||
{dataLoading ? (
|
||||
<Loading />
|
||||
) : (
|
||||
<>
|
||||
{selectedTab === 'preview' && <CrawlDetailsPreview />}
|
||||
{selectedTab === 'json' && (
|
||||
<EuiCodeBlock language="json" isCopyable>
|
||||
{JSON.stringify(crawlRequestFromServer, null, 2)}
|
||||
</EuiCodeBlock>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</EuiFlyoutBody>
|
||||
</EuiFlyout>
|
||||
);
|
||||
};
|
|
@ -1,119 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow, ShallowWrapper } from 'enzyme';
|
||||
|
||||
import { set } from '@kbn/safer-lodash-set/fp';
|
||||
|
||||
import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
|
||||
|
||||
import { CrawlerStatus, CrawlType } from '../../../../api/crawler/types';
|
||||
|
||||
import { CrawlDetailValues } from './crawl_detail_logic';
|
||||
import { CrawlDetailsPreview } from './crawl_details_preview';
|
||||
import { CrawlDetailsSummary } from './crawl_details_summary';
|
||||
|
||||
const MOCK_VALUES: Partial<CrawlDetailValues> = {
|
||||
crawlRequest: {
|
||||
id: '507f1f77bcf86cd799439011',
|
||||
status: CrawlerStatus.Pending,
|
||||
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
type: CrawlType.Full,
|
||||
crawlConfig: {
|
||||
domainAllowlist: ['https://www.elastic.co', 'https://www.swiftype.com'],
|
||||
seedUrls: ['https://www.elastic.co/docs', 'https://www.swiftype.com/documentation'],
|
||||
sitemapUrls: ['https://www.elastic.co/sitemap.xml', 'https://www.swiftype.com/sitemap.xml'],
|
||||
maxCrawlDepth: 10,
|
||||
},
|
||||
stats: {
|
||||
status: {
|
||||
urlsAllowed: 10,
|
||||
pagesVisited: 10,
|
||||
crawlDurationMSec: 36000,
|
||||
avgResponseTimeMSec: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
describe('CrawlDetailsPreview', () => {
|
||||
it('is empty when a crawl request has not been loaded', () => {
|
||||
setMockValues({
|
||||
crawlRequest: null,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlDetailsPreview />);
|
||||
expect(wrapper.isEmptyRender()).toBe(true);
|
||||
});
|
||||
|
||||
describe('when a crawl request has been loaded', () => {
|
||||
let wrapper: ShallowWrapper;
|
||||
|
||||
beforeEach(() => {
|
||||
setMockValues(MOCK_VALUES);
|
||||
wrapper = shallow(<CrawlDetailsPreview />);
|
||||
});
|
||||
|
||||
it('contains a summary', () => {
|
||||
const summary = wrapper.find(CrawlDetailsSummary);
|
||||
expect(summary.props()).toEqual({
|
||||
crawlDepth: 10,
|
||||
crawlType: 'full',
|
||||
domainCount: 2,
|
||||
stats: {
|
||||
status: {
|
||||
avgResponseTimeMSec: 100,
|
||||
crawlDurationMSec: 36000,
|
||||
pagesVisited: 10,
|
||||
urlsAllowed: 10,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('will default values on summary if missing', () => {
|
||||
const values = set('crawlRequest.stats', undefined, MOCK_VALUES);
|
||||
setMockValues(values);
|
||||
wrapper = shallow(<CrawlDetailsPreview />);
|
||||
|
||||
const summary = wrapper.find(CrawlDetailsSummary);
|
||||
expect(summary.prop('stats')).toEqual(null);
|
||||
});
|
||||
|
||||
it('contains a list of domains', () => {
|
||||
const domainList = wrapper.find(AccordionList).at(0);
|
||||
|
||||
expect(domainList.prop('items')).toEqual([
|
||||
'https://www.elastic.co',
|
||||
'https://www.swiftype.com',
|
||||
]);
|
||||
});
|
||||
|
||||
it('contains a list of seed urls', () => {
|
||||
const seedUrlList = wrapper.find(AccordionList).at(1);
|
||||
|
||||
expect(seedUrlList.prop('items')).toEqual([
|
||||
'https://www.elastic.co/docs',
|
||||
'https://www.swiftype.com/documentation',
|
||||
]);
|
||||
});
|
||||
|
||||
it('contains a list of sitemap urls', () => {
|
||||
const sitemapUrlList = wrapper.find(AccordionList).at(2);
|
||||
|
||||
expect(sitemapUrlList.prop('items')).toEqual([
|
||||
'https://www.elastic.co/sitemap.xml',
|
||||
'https://www.swiftype.com/sitemap.xml',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues } from 'kea';
|
||||
|
||||
import { EuiSpacer } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
|
||||
|
||||
import { CrawlDetailLogic } from './crawl_detail_logic';
|
||||
import { CrawlDetailsSummary } from './crawl_details_summary';
|
||||
|
||||
export const CrawlDetailsPreview: React.FC = () => {
|
||||
const { crawlRequest } = useValues(CrawlDetailLogic);
|
||||
|
||||
if (crawlRequest === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<CrawlDetailsSummary
|
||||
crawlType={crawlRequest.type}
|
||||
domainCount={crawlRequest.crawlConfig.domainAllowlist.length}
|
||||
crawlDepth={crawlRequest.crawlConfig.maxCrawlDepth}
|
||||
stats={crawlRequest.stats || null}
|
||||
/>
|
||||
<EuiSpacer />
|
||||
<AccordionList
|
||||
hasBorder
|
||||
initialIsOpen={crawlRequest.crawlConfig.domainAllowlist.length > 0}
|
||||
title={i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle', {
|
||||
defaultMessage: 'Domains',
|
||||
})}
|
||||
iconType="globe"
|
||||
items={crawlRequest.crawlConfig.domainAllowlist}
|
||||
/>
|
||||
<EuiSpacer size="s" />
|
||||
<AccordionList
|
||||
hasBorder
|
||||
initialIsOpen={crawlRequest.crawlConfig.seedUrls.length > 0}
|
||||
title={i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle', {
|
||||
defaultMessage: 'Seed URLs',
|
||||
})}
|
||||
iconType="crosshairs"
|
||||
items={crawlRequest.crawlConfig.seedUrls}
|
||||
/>
|
||||
<EuiSpacer size="s" />
|
||||
<AccordionList
|
||||
hasBorder
|
||||
initialIsOpen={crawlRequest.crawlConfig.sitemapUrls.length > 0}
|
||||
title={i18n.translate(
|
||||
'xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle',
|
||||
{
|
||||
defaultMessage: 'Sitemap URLs',
|
||||
}
|
||||
)}
|
||||
iconType="visMapRegion"
|
||||
items={crawlRequest.crawlConfig.sitemapUrls}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue