diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml new file mode 100644 index 00000000..dd2546f1 --- /dev/null +++ b/.github/workflows/docs-ci.yml @@ -0,0 +1,29 @@ +name: Build documentation +run-name: Building static documentation website after push by ${{ github.actor }} +on: + push: + branches: + - master +permissions: + contents: write +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Configure Git Credentials + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + - uses: actions/setup-python@v5 + with: + python-version: 3.x + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + key: mkdocs-material-${{ env.cache_id }} + path: .cache + restore-keys: | + mkdocs-material- + - run: pip install mkdocs-material mkdocs-exclude-search + - run: mkdocs gh-deploy --force \ No newline at end of file diff --git a/.gitignore b/.gitignore index 03e39155..b671453e 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,5 @@ extract-task-validation/target/ CLAUDE.md INTEGRATION_TESTS_PLAN.md .claude/ +.venv/ +.smarttomcat/ diff --git a/README.md b/README.md index 948afa35..dd0757b8 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,10 @@ En automatisant le processus d'extraction et de livraison de vos géodonnées, v Extract est modulable et extensible, à installer dans l’environnement informatique de chaque fournisseur de données, et accessible via un navigateur. +## 📋Documentation + +Toute la documentation pour installer, paramétrer et utiliser Extract est en ligne (en anglais) : https://asit-asso.github.io/extract/ + ## Groupe utilisateurs L'ASIT, des administrations et gestionnaires de réseaux forment un groupe utilisateur qui pilote et finance le projet @@ -41,62 +45,12 @@ Actuellement, 37 fournisseurs diffusent tout ou partie de leurs géodonnées ave * Holdigaz Prestations SA * ASIT - Association pour le système d'information du territoire -## Pour installer - -Suivez les guides d'installation et d'exploitation ici : https://github.com/asit-asso/extract/wiki - -### Prérequis: -* Windows or Linux, 64bit -* Java 17 (Oracle ou OpenJDK) -* Tomcat 9 (Extract n’est actuellement pas compatible avec Tomcat 10 en raison de l’utilisation de Spring Boot 2) -* PostgreSQL >= 12 - -## Développement - -### Packaging - -Pour générer un WAR de l'application, il faut lancer la commande suivante -```bash -mvn package -``` - -Outre le fichier WAR, l'archive de livraison d'une nouvelle version contient : - -* La dernière version du script de mise à jour de la base de données (`sql/update_db.sql`) -* La documentation : - * guide d'installation (`doc/MAN_ASIT_Extract_ManuelInstallation.pdf`) - * guide d'exploitation (`doc/MAN_ASIT_Extract_ManuelExploitation.pdf`) - * guide d'utilisation (`doc/MAN_ASIT_Extract_ManuelUtilisation.pdf`) - * documentation de création d'un connecteur (`doc/extract-connector-sample/`) - * documentation de création d'un plugin de tâche (`doc/extract-task-sample/`) -* Le script d'exemple FME (`fme/`) - -### Tests - -Les tests unitaires peuvent se lancer indépendamment du packaging par la commande -```bash -mvn -q test -Punit-tests --batch-mode --fail-at-end -``` - -Pour exécuter les tests d'intégration -```bash -mvn -q verify -Pintegration-tests --batch-mode -``` - -Pour les tests fonctionnels (nécessite que l'application tourne sur le port 8080 du localhost) -```bash -mvn -q verify -Pfunctional-tests --batch-mode -``` - -## Documentation et liens: - -Aide et documentation : https://github.com/asit-asso/extract/wiki +## Liens: Présentations sur le projet aux Rencontres ASIT : https://asit-asso.ch/toutes-les-rencontres#2018 L'ASIT, Association pour le Système d'Information du Territoire, à l'origine du projet : https://asit-asso.ch -Forked from easySDI (www.easysdi.org) : https://svn.easysdi.org/svn/easysdi/branches/4.5.x/java ## Screenshots: diff --git a/doc/DC_ASITVD_Extract_DossierArchitectureTechnique_V1.12.pdf b/doc/DC_ASITVD_Extract_DossierArchitectureTechnique_V1.12.pdf deleted file mode 100644 index 909f4e69..00000000 Binary files a/doc/DC_ASITVD_Extract_DossierArchitectureTechnique_V1.12.pdf and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelExploitation.pdf b/doc/MAN_ASIT_Extract_ManuelExploitation.pdf deleted file mode 100644 index fa59d7e0..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelExploitation.pdf and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelExploitation_V1.13.docx b/doc/MAN_ASIT_Extract_ManuelExploitation_V1.13.docx deleted file mode 100644 index 3c3f2c1b..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelExploitation_V1.13.docx and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelInstallation.pdf b/doc/MAN_ASIT_Extract_ManuelInstallation.pdf deleted file mode 100644 index 208695ed..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelInstallation.pdf and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelInstallation_V1.13.docx b/doc/MAN_ASIT_Extract_ManuelInstallation_V1.13.docx deleted file mode 100644 index 53e83def..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelInstallation_V1.13.docx and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelUtilisation.pdf b/doc/MAN_ASIT_Extract_ManuelUtilisation.pdf deleted file mode 100644 index ffdc8b89..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelUtilisation.pdf and /dev/null differ diff --git a/doc/MAN_ASIT_Extract_ManuelUtilisation_V1.9.docx b/doc/MAN_ASIT_Extract_ManuelUtilisation_V1.9.docx deleted file mode 100644 index b1abb2db..00000000 Binary files a/doc/MAN_ASIT_Extract_ManuelUtilisation_V1.9.docx and /dev/null differ diff --git a/doc/extract-connector-sample/getting-started.md b/doc/extract-connector-sample/getting-started.md deleted file mode 100644 index 0726103a..00000000 --- a/doc/extract-connector-sample/getting-started.md +++ /dev/null @@ -1,112 +0,0 @@ -# Extract - Initialiser un nouveau plugin de connecteur - -## Introduction - -Ce module java est un exemple de plugin de connecteur qui peut être utilisé pour initialiser un nouveau -plugin pour le projet Extract. -Le code source est documenté et permet d'avoir les indications nécessaires pour développer un nouveau plugin, -Il peut être importé dans un nouveau environnement Java, des adaptations sont cependant -nécessaires selon le fonctionnement attendu. - -## Pré-requis pour l'utilisation -* OS 64 bits -* Java 17 -* Tomcat 9 - -## Pré-requis pour le développement et la compilation -* Java 17 -* [Yarn][Yarn_Site] -* Projet extract-interface (Interface commune pour l'utilisation des plugins connecteurs et tâches) - -## Initialisation du nouveau plugin de connecteur. -Le projet doit être un module Java. \ -Le projet du nouveau plugin doit définir une dépendance vers le projet extract-interface.\ -Les dépendances requises sont définies dans le fichier pom.xml. - -Pour initialiser un nouveau plugin, suivre les étapes dans l'ordre : -1. Copier le code du plugin extract-connector-sample vers un workspace java. Le système propose demande de définir un - nouveau nom. Si ce n'est pas le cas, utiliser le menu contextuel (clic droit) `Refactor > Rename` - - -2. Editer le fichier **pom.xml** du module, remplacer les occurences de `extrat-connector-sample` par le nouveau nom du plugin. - Après un clic droit sur le fichier, choisir l'option `Add as Maven Project` - - -3. Après un clic droit sur l'espace de nom `ch.asit_asso.extract.connectors.sample`, choisir le menu `Refactor > Rename`. - Saisir le nom de la nouvelle classe permettant d'identifier le plugin. Si l'interface le demande, cliquer sur le bouton - `Add in current Module` afin d'appliquer les changements sur le mode uniquement. - Cela aura pour effet de modifier automatiquement le nom du package dans tous les fichiers du module. - - -4. Après un clic droit sur le fichier **SampleConnector.java**, choisir le menu `Refactor > Rename` puis saisir le nom - de la nouvelle classe principale du plugin (e.g EasySDIv4). Cela aura pour effet de renommer le fichier et de modifer - toutes les références à cette classe partout où elle est utilisée. - - -5. Editer le fichier **LocalizedMessages.java** : ajuster la valeur du paramètre LOCALIZED_FILE_PATH_FORMAT qui - correspond au chemin vers le répertoire `lang` - - -6. vérifier le fichier **module-info.java** en particulier la référence à `SampleConnector`. Vérifier également la ligne 4 - de ce fichier (référence à l'espace de nom `ch.asit_asso.extract.connectors.sample`) - - -7. Vérifier le fichier **resources\META-INF\services\ch.asit_asso.extract.connectors.common.IConnector** en particulier -la cohérence de la classe `ch.asit_asso.extract.connectors.sample.SampleConnector` - - -8. Après un clic droit sur le dossier **resources\connectors\sample**, choisir le menu `Refactor > Rename`. Saisir - le nouveau nom - - -9. Editer le fichier **resources\connectors\\lang\fr\messages.properties**. Modifier ou - ajouter les libellés qui seront utilisés par le code source du plugin. Cette étape peut se faire - de manière progressive pendant le développement - - -10. Editer le fichier **resources\connectors\\properties\config.properties**. Ce ficher contient les - paramètres de configuration du plugn utilisés par le code source. Cette étape peut se faire - de manière progressive pendant le développement - - -11. Le dossier **resources\connectors\\templates** contient des modèles XML utilisés par le connecteur. -Néanmoins, ils sont spécifiques au connecteur EasySDIv4, le format XML est en effet utilisés comme format d'échange -pour les commandes. Ces fichiers ne serviront sans doute pas pour d'autres types de connecteurs. - - -## Points important à prendre en compte pendant le développement - -Le code source est suffisamment commenté afin d'aider le développeur à développer le -nouveau plugin. Les commentaires en **MAJUSCULE** permettent d'identifer les parties de code ou les fonctions -importantes à mettre à jour. - -Il est notamment recommandé d'apporter les modifications suivantes dans la class Connector : -* Ajuster si besoin la variable `CONFIG_FILE_PATH` -* Modifier la valeur du paramètre `code` par une valeur permettant d'identifier le plugin (e.g `EasySDIv4`) - -Ensuite, les fonctions à adapter sont celles qui surchargent les fonctions de l'interface -IConnector : -* `getParams` pour définir les paramètres du connecteur. Cette méthode retourne les paramètres - du plugin sous forme de tableau au format JSON. Si le plugin n’accepte pas de paramètres, renvoyer un tableau vide -* `importCommands` qui permet de gérer l'import des commandes effectués par le serveur distant -* `exportResult` qui permet de gérer l'envoi du traitement d'une commande vers le serveur qui l'a produite - -Les fonctions privées dans le code permettent de réaliser des traitements utilitaires ou de traiter -les données reçues. Ces fonctions peuvent être addaptées ou supprimées selon le besoin. -Ces fonctions sont bien identifiées dans le code de la classe Connector. - -## Installation ou mise à jour du plugin dans EXTRACT - -Avant de compiler le connecteur, supprimer le dossier **target**.\ -Dès que le connecteur est compilé et que le fichier jar est généré, il suffit de placer le JAR -dans le répertoire **WEB-INF/classes/connectors** de l’application -(contenant tous les plugins de connecteurs).\ -En cas de mise à jour, il convient de supprimer le WAR de l’ancienne version afin d’éviter des conflits. - -``` -Le redémarrage de l’application Tomcat EXTRACT est ensuite requis afin que la -modification des connecteurs soit prise en compte. -``` - - -[Yarn_Site]: https://yarnpkg.com/ "Site du gestionnaire de package Yarn" diff --git a/doc/extract-task-sample/getting-started.md b/doc/extract-task-sample/getting-started.md deleted file mode 100644 index 8ce189d9..00000000 --- a/doc/extract-task-sample/getting-started.md +++ /dev/null @@ -1,113 +0,0 @@ -# Extract - Initialiser un nouveau plugin de tâche - -## Introduction - -Ce module java est un exemple de plugin de tâche qui peut être utilisé pour initialiser un nouveau -plugin pour le projet Extract. -Le code source est documenté et permet d'avoir les indications nécessaires pour développer un nouveau plugin, -Il peut être importé dans un nouveau environnement Java, des adaptations sont cependant -nécessaires selon le fonctionnement attendu. - -## Pré-requis pour l'utilisation -* OS 64 bits -* Java 17 -* Tomcat 9 - -## Pré-requis pour le développement et la compilation -* Java 17 -* [Yarn][Yarn_Site] -* Projet extract-interface (Interface commune pour l'utilisation des plugins connecteurs et tâches) - -## Initialisation du nouveau plugin de tâche. -Le projet doit être un module Java. \ -Le projet du nouveau plugin doit définir une dépendance vers le projet extract-interface.\ -Les dépendances requises sont définies dans le fichier pom.xml. - -Pour initialiser un nouveau plugin, suivre les étapes dans l'ordre : -1. Copier le cde du plugin extract-task-sample vers un workspace java. Le système propose demande de définir un -nouveau nom. Si ce n'est pas le cas, utiliser le menu contextuel (clic droit) `Refactor > Rename` - - -2. Editer le fichier **pom.xml** du module, remplacer les occurences de `extrat-task-sample` par le nouveau nom du plugin. -Après un clic droit sur le fichier, choisir l'option `Add as Maven Project` - - -3. Après un clic droit sur l'espace de nom `ch.asit_asso.extract.plugins.sample`, choisir le menu `Refactor > Rename`. -Saisir le nom de la nouvelle classe permettant d'identifier le plugin. Si l'interface le demande, cliquer sur le bouton -`Add in current Module` afin d'appliquer les changements sur le mode uniquement. -Cela aura pour effet de modifier automatiquement le nom du package dans tous les fichiers du module. - - -4. Après un clic droit sur le fichier **SamplePlugin.java**, choisir le menu `Refactor > Rename` puis saisir le nom -de la nouvelle classe principale du plugin. Cela aura pour effet de renommer le fichier et de modifer -toutes les références à cette classe partout où elle est utilisée. - - -5. Refaire l'opération 4 pour les fichiers **SampleRequest.java** et **SampleResult.java** - - -6. Editer le fichier **LocalizedMessages.java** : ajuster la valeur du paramètre LOCALIZED_FILE_PATH_FORMAT qui -correspond au chemin vers le répertoire `lang` - - -7. Vérifier le fichier **module-info.java** en particulier la référence à `SamplePlugin`. Vérifier également la ligne 4 -de ce fichier (référence à l'espace de nom `ch.asit_asso.extract.plugins.sample`) - - -8. Vérifier le fichier **resources\META-INF\services\ch.asit_asso.extract.plugins.common.ITaskProcessor** : en particulier -la cohérence de la classe `ch.asit_asso.extract.plugins.sample.SamplePlugin` - - -9. Après un clic droit sur le dossier **resources\plugins\sample**, choisir le menu `Refactor > Rename`. Saisir -le nouveau nom - - -10. Editer le fichier **resources\plugins\\lang\fr\messages.properties**. Modifier ou -ajouter les libellés qui seront utilisés par le code source du plugin. Cette étape peut se faire -de manière progressive pendant le développement - - -11. Editer le fichier **resources\plugins\\lang\fr\help.html** afin d'y décrire le focntionnement -du plugin. *Le contenu de fichier est au format HTML* - - -12. Editer le fichier **resources\plugins\\properties\config.properties**. Ce ficher contient les -paramètres de configuration du plugn utilisés par le code source. Cette étape peut se faire -de manière progressive pendant le développement - - -## Points important à prendre en compte pendant le développement - -Le code source est suffisamment commenté afin d'aider le développeur à développer le -nouveau plugin. Les commentaires en **MAJUSCULE** permettent d'identifer les parties de code ou les fonctions -importantes à mettre à jour. - -Il est notamment recommandé d'apporter les modifications suivantes dans la class Plugin: -* Ajuster si besoin la variable `CONFIG_FILE_PATH` -* Modifier la valeur du paramètre `code` par une valeur permettant d'identifier le plugin (e.g `remark` ou `fmeserver`) -* Changer la valeur du paramètre pictoClass par un nom de classe CSS approprié (image du logo du plugin). Chercher -une icône sur le site [Font Awesome][Fontawesome_Site] - - -Ensuite, les fonctions à adapter sont celles qui surchargent les fonctions de l'interface -ITaskProcessor : -* `getParams` pour définir les paramètres du connecteur. Cette méthode retourne les paramètres -du plugin sous forme de tableau au format JSON. Si le plugin n’accepte pas de paramètres, renvoyer un tableau vide -* `execute` qui permet de gérer l'exécution du plugin - -## Installation ou mise à jour du plugin dans EXTRACT - -Avant de compiler le plugin, supprimer le dossier **target**.\ -Dès que le plugin est compilé et que le fichier jar est généré, il suffit de placer le JAR -dans le répertoire **WEB-INF/classes/task_processors** de l’application -(contenant tous les plugins de tâches).\ -En cas de mise à jour, il convient de supprimer le WAR de l’ancienne version afin d’éviter des conflits. - -``` -Le redémarrage de l’application Tomcat EXTRACT est ensuite requis afin que la -modification des plugins soit prise en compte. -``` - - -[Yarn_Site]: https://yarnpkg.com/ "Site du gestionnaire de package Yarn" -[Fontawesome_Site]: https://fontawesome.com/icons "Site web FontAwesome" \ No newline at end of file diff --git a/docs/assets/admin-guide/connectors-list.png b/docs/assets/admin-guide/connectors-list.png new file mode 100644 index 00000000..bbf8c5fb Binary files /dev/null and b/docs/assets/admin-guide/connectors-list.png differ diff --git a/docs/assets/admin-guide/connectors-parameters.png b/docs/assets/admin-guide/connectors-parameters.png new file mode 100644 index 00000000..09a667b9 Binary files /dev/null and b/docs/assets/admin-guide/connectors-parameters.png differ diff --git a/docs/assets/admin-guide/connectors-rules.png b/docs/assets/admin-guide/connectors-rules.png new file mode 100644 index 00000000..7a9449a0 Binary files /dev/null and b/docs/assets/admin-guide/connectors-rules.png differ diff --git a/docs/assets/admin-guide/ldap-authentication.png b/docs/assets/admin-guide/ldap-authentication.png new file mode 100644 index 00000000..a3592617 Binary files /dev/null and b/docs/assets/admin-guide/ldap-authentication.png differ diff --git a/docs/assets/admin-guide/misc.png b/docs/assets/admin-guide/misc.png new file mode 100644 index 00000000..37edc8d7 Binary files /dev/null and b/docs/assets/admin-guide/misc.png differ diff --git a/docs/assets/admin-guide/operating-hours.png b/docs/assets/admin-guide/operating-hours.png new file mode 100644 index 00000000..935250fd Binary files /dev/null and b/docs/assets/admin-guide/operating-hours.png differ diff --git a/docs/assets/admin-guide/orchestration.png b/docs/assets/admin-guide/orchestration.png new file mode 100644 index 00000000..c7a377cc Binary files /dev/null and b/docs/assets/admin-guide/orchestration.png differ diff --git a/docs/assets/admin-guide/processes-list.png b/docs/assets/admin-guide/processes-list.png new file mode 100644 index 00000000..96901430 Binary files /dev/null and b/docs/assets/admin-guide/processes-list.png differ diff --git a/docs/assets/admin-guide/processes-parameters.png b/docs/assets/admin-guide/processes-parameters.png new file mode 100644 index 00000000..28007a9f Binary files /dev/null and b/docs/assets/admin-guide/processes-parameters.png differ diff --git a/docs/assets/admin-guide/processes-tasks.png b/docs/assets/admin-guide/processes-tasks.png new file mode 100644 index 00000000..c0cc9296 Binary files /dev/null and b/docs/assets/admin-guide/processes-tasks.png differ diff --git a/docs/assets/admin-guide/smtp-server.png b/docs/assets/admin-guide/smtp-server.png new file mode 100644 index 00000000..0944c99a Binary files /dev/null and b/docs/assets/admin-guide/smtp-server.png differ diff --git a/docs/assets/admin-guide/tasks-list.png b/docs/assets/admin-guide/tasks-list.png new file mode 100644 index 00000000..15b886e5 Binary files /dev/null and b/docs/assets/admin-guide/tasks-list.png differ diff --git a/docs/assets/admin-guide/users-groups-list.png b/docs/assets/admin-guide/users-groups-list.png new file mode 100644 index 00000000..25d3d708 Binary files /dev/null and b/docs/assets/admin-guide/users-groups-list.png differ diff --git a/docs/assets/admin-guide/users-groups-settings.png b/docs/assets/admin-guide/users-groups-settings.png new file mode 100644 index 00000000..c6f59abb Binary files /dev/null and b/docs/assets/admin-guide/users-groups-settings.png differ diff --git a/docs/assets/admin-guide/users-list.png b/docs/assets/admin-guide/users-list.png new file mode 100644 index 00000000..c0b8cb8e Binary files /dev/null and b/docs/assets/admin-guide/users-list.png differ diff --git a/docs/assets/admin-guide/users-settings.png b/docs/assets/admin-guide/users-settings.png new file mode 100644 index 00000000..14d1ee78 Binary files /dev/null and b/docs/assets/admin-guide/users-settings.png differ diff --git a/docs/assets/admin-guide/validation-template-details.png b/docs/assets/admin-guide/validation-template-details.png new file mode 100644 index 00000000..7dee3ce2 Binary files /dev/null and b/docs/assets/admin-guide/validation-template-details.png differ diff --git a/docs/assets/admin-guide/validation-templates-list.png b/docs/assets/admin-guide/validation-templates-list.png new file mode 100644 index 00000000..3d5a08d3 Binary files /dev/null and b/docs/assets/admin-guide/validation-templates-list.png differ diff --git a/docs/assets/admin-guide/validation.png b/docs/assets/admin-guide/validation.png new file mode 100644 index 00000000..0a25e2d8 Binary files /dev/null and b/docs/assets/admin-guide/validation.png differ diff --git a/docs/assets/asit_extract_blanc.png b/docs/assets/asit_extract_blanc.png new file mode 100644 index 00000000..97914d7d Binary files /dev/null and b/docs/assets/asit_extract_blanc.png differ diff --git a/docs/assets/dev-guide/2fa-connection-flow.png b/docs/assets/dev-guide/2fa-connection-flow.png new file mode 100644 index 00000000..13a6c3ed Binary files /dev/null and b/docs/assets/dev-guide/2fa-connection-flow.png differ diff --git a/docs/assets/dev-guide/2fa-parameters-admin.png b/docs/assets/dev-guide/2fa-parameters-admin.png new file mode 100644 index 00000000..22134c89 Binary files /dev/null and b/docs/assets/dev-guide/2fa-parameters-admin.png differ diff --git a/docs/assets/dev-guide/2fa-parameters-own.png b/docs/assets/dev-guide/2fa-parameters-own.png new file mode 100644 index 00000000..7a955f17 Binary files /dev/null and b/docs/assets/dev-guide/2fa-parameters-own.png differ diff --git a/docs/assets/dev-guide/basic-logic-diagram.png b/docs/assets/dev-guide/basic-logic-diagram.png new file mode 100644 index 00000000..3d042365 Binary files /dev/null and b/docs/assets/dev-guide/basic-logic-diagram.png differ diff --git a/docs/assets/dev-guide/data-model.png b/docs/assets/dev-guide/data-model.png new file mode 100644 index 00000000..859e3485 Binary files /dev/null and b/docs/assets/dev-guide/data-model.png differ diff --git a/docs/assets/dev-guide/ldap-synchro.jpg b/docs/assets/dev-guide/ldap-synchro.jpg new file mode 100644 index 00000000..7fb2612b Binary files /dev/null and b/docs/assets/dev-guide/ldap-synchro.jpg differ diff --git a/docs/assets/dev-guide/login-diagram.jpg b/docs/assets/dev-guide/login-diagram.jpg new file mode 100644 index 00000000..bba725ff Binary files /dev/null and b/docs/assets/dev-guide/login-diagram.jpg differ diff --git a/docs/assets/dev-guide/request-lifecycle.png b/docs/assets/dev-guide/request-lifecycle.png new file mode 100644 index 00000000..979749e0 Binary files /dev/null and b/docs/assets/dev-guide/request-lifecycle.png differ diff --git a/docs/assets/dev-guide/software-architecture-diagram.png b/docs/assets/dev-guide/software-architecture-diagram.png new file mode 100644 index 00000000..d4ba1d2e Binary files /dev/null and b/docs/assets/dev-guide/software-architecture-diagram.png differ diff --git a/docs/assets/extract_favicon64.png b/docs/assets/extract_favicon64.png new file mode 100644 index 00000000..6a56a081 Binary files /dev/null and b/docs/assets/extract_favicon64.png differ diff --git a/docs/assets/how-to/fme-flow/connection.png b/docs/assets/how-to/fme-flow/connection.png new file mode 100644 index 00000000..63b5924a Binary files /dev/null and b/docs/assets/how-to/fme-flow/connection.png differ diff --git a/docs/assets/how-to/fme-flow/create-webhook.png b/docs/assets/how-to/fme-flow/create-webhook.png new file mode 100644 index 00000000..1f85ebef Binary files /dev/null and b/docs/assets/how-to/fme-flow/create-webhook.png differ diff --git a/docs/assets/how-to/fme-flow/data-download.png b/docs/assets/how-to/fme-flow/data-download.png new file mode 100644 index 00000000..ec89a2c8 Binary files /dev/null and b/docs/assets/how-to/fme-flow/data-download.png differ diff --git a/docs/assets/how-to/fme-flow/download-webhook.png b/docs/assets/how-to/fme-flow/download-webhook.png new file mode 100644 index 00000000..c72a5ae3 Binary files /dev/null and b/docs/assets/how-to/fme-flow/download-webhook.png differ diff --git a/docs/assets/how-to/fme-flow/manage-tokens.png b/docs/assets/how-to/fme-flow/manage-tokens.png new file mode 100644 index 00000000..da82d585 Binary files /dev/null and b/docs/assets/how-to/fme-flow/manage-tokens.png differ diff --git a/docs/assets/how-to/fme-flow/plugin.png b/docs/assets/how-to/fme-flow/plugin.png new file mode 100644 index 00000000..39d8afd5 Binary files /dev/null and b/docs/assets/how-to/fme-flow/plugin.png differ diff --git a/docs/assets/how-to/fme-flow/publish-files.png b/docs/assets/how-to/fme-flow/publish-files.png new file mode 100644 index 00000000..90e3d711 Binary files /dev/null and b/docs/assets/how-to/fme-flow/publish-files.png differ diff --git a/docs/assets/how-to/fme-flow/publish-repo.png b/docs/assets/how-to/fme-flow/publish-repo.png new file mode 100644 index 00000000..f5bab6d3 Binary files /dev/null and b/docs/assets/how-to/fme-flow/publish-repo.png differ diff --git a/docs/assets/how-to/fme-flow/publish.png b/docs/assets/how-to/fme-flow/publish.png new file mode 100644 index 00000000..9f5fc614 Binary files /dev/null and b/docs/assets/how-to/fme-flow/publish.png differ diff --git a/docs/assets/how-to/fme-flow/repo-permissions.png b/docs/assets/how-to/fme-flow/repo-permissions.png new file mode 100644 index 00000000..bf2bb354 Binary files /dev/null and b/docs/assets/how-to/fme-flow/repo-permissions.png differ diff --git a/docs/assets/how-to/fme-flow/token.png b/docs/assets/how-to/fme-flow/token.png new file mode 100644 index 00000000..3435aac9 Binary files /dev/null and b/docs/assets/how-to/fme-flow/token.png differ diff --git a/docs/assets/how-to/fme-flow/user-parameter.png b/docs/assets/how-to/fme-flow/user-parameter.png new file mode 100644 index 00000000..82cb7a06 Binary files /dev/null and b/docs/assets/how-to/fme-flow/user-parameter.png differ diff --git a/docs/assets/how-to/fme-form/plugin.png b/docs/assets/how-to/fme-form/plugin.png new file mode 100644 index 00000000..e6902191 Binary files /dev/null and b/docs/assets/how-to/fme-form/plugin.png differ diff --git a/docs/assets/how-to/fme-form/read-data.png b/docs/assets/how-to/fme-form/read-data.png new file mode 100644 index 00000000..152a475f Binary files /dev/null and b/docs/assets/how-to/fme-form/read-data.png differ diff --git a/docs/assets/how-to/fme-form/read-parameters.png b/docs/assets/how-to/fme-form/read-parameters.png new file mode 100644 index 00000000..f990fe4b Binary files /dev/null and b/docs/assets/how-to/fme-form/read-parameters.png differ diff --git a/docs/assets/how-to/fme-form/retrieve-variable.png b/docs/assets/how-to/fme-form/retrieve-variable.png new file mode 100644 index 00000000..91ba61bd Binary files /dev/null and b/docs/assets/how-to/fme-form/retrieve-variable.png differ diff --git a/docs/assets/how-to/fme-form/save-data.png b/docs/assets/how-to/fme-form/save-data.png new file mode 100644 index 00000000..20748f04 Binary files /dev/null and b/docs/assets/how-to/fme-form/save-data.png differ diff --git a/docs/assets/how-to/fme-form/set-variable.png b/docs/assets/how-to/fme-form/set-variable.png new file mode 100644 index 00000000..6272f775 Binary files /dev/null and b/docs/assets/how-to/fme-form/set-variable.png differ diff --git a/docs/assets/how-to/fme-form/user-parameter.png b/docs/assets/how-to/fme-form/user-parameter.png new file mode 100644 index 00000000..d3e55521 Binary files /dev/null and b/docs/assets/how-to/fme-form/user-parameter.png differ diff --git a/docs/assets/how-to/python/plugin.png b/docs/assets/how-to/python/plugin.png new file mode 100644 index 00000000..a3529927 Binary files /dev/null and b/docs/assets/how-to/python/plugin.png differ diff --git a/docs/assets/how-to/qgis/atlas.png b/docs/assets/how-to/qgis/atlas.png new file mode 100644 index 00000000..56956acf Binary files /dev/null and b/docs/assets/how-to/qgis/atlas.png differ diff --git a/docs/assets/how-to/qgis/control.png b/docs/assets/how-to/qgis/control.png new file mode 100644 index 00000000..2a67cfcb Binary files /dev/null and b/docs/assets/how-to/qgis/control.png differ diff --git a/docs/assets/how-to/qgis/max-features.png b/docs/assets/how-to/qgis/max-features.png new file mode 100644 index 00000000..3ec13409 Binary files /dev/null and b/docs/assets/how-to/qgis/max-features.png differ diff --git a/docs/assets/how-to/qgis/plugin.png b/docs/assets/how-to/qgis/plugin.png new file mode 100644 index 00000000..926fb420 Binary files /dev/null and b/docs/assets/how-to/qgis/plugin.png differ diff --git a/docs/assets/how-to/qgis/preview.png b/docs/assets/how-to/qgis/preview.png new file mode 100644 index 00000000..06bc8adf Binary files /dev/null and b/docs/assets/how-to/qgis/preview.png differ diff --git a/docs/assets/how-to/viageo/metadata.png b/docs/assets/how-to/viageo/metadata.png new file mode 100644 index 00000000..0ee3d9c2 Binary files /dev/null and b/docs/assets/how-to/viageo/metadata.png differ diff --git a/docs/assets/how-to/viageo/resources-id.png b/docs/assets/how-to/viageo/resources-id.png new file mode 100644 index 00000000..9c557b1f Binary files /dev/null and b/docs/assets/how-to/viageo/resources-id.png differ diff --git a/docs/assets/install/create_admin.png b/docs/assets/install/create_admin.png new file mode 100644 index 00000000..6d9e9f21 Binary files /dev/null and b/docs/assets/install/create_admin.png differ diff --git a/docs/assets/introduction/extract_robot.png b/docs/assets/introduction/extract_robot.png new file mode 100644 index 00000000..b2b4bcca Binary files /dev/null and b/docs/assets/introduction/extract_robot.png differ diff --git a/docs/assets/introduction/shema_global_transparent2.png b/docs/assets/introduction/shema_global_transparent2.png new file mode 100644 index 00000000..6b6f18a9 Binary files /dev/null and b/docs/assets/introduction/shema_global_transparent2.png differ diff --git a/docs/assets/misc/data-replication.png b/docs/assets/misc/data-replication.png new file mode 100644 index 00000000..495eebbb Binary files /dev/null and b/docs/assets/misc/data-replication.png differ diff --git a/docs/assets/user-guide/2FA-setup.png b/docs/assets/user-guide/2FA-setup.png new file mode 100644 index 00000000..c4677269 Binary files /dev/null and b/docs/assets/user-guide/2FA-setup.png differ diff --git a/docs/assets/user-guide/2FA.png b/docs/assets/user-guide/2FA.png new file mode 100644 index 00000000..de3fcb1b Binary files /dev/null and b/docs/assets/user-guide/2FA.png differ diff --git a/docs/assets/user-guide/active-connectors.png b/docs/assets/user-guide/active-connectors.png new file mode 100644 index 00000000..396d3b59 Binary files /dev/null and b/docs/assets/user-guide/active-connectors.png differ diff --git a/docs/assets/user-guide/login.png b/docs/assets/user-guide/login.png new file mode 100644 index 00000000..802ce96d Binary files /dev/null and b/docs/assets/user-guide/login.png differ diff --git a/docs/assets/user-guide/request-details.png b/docs/assets/user-guide/request-details.png new file mode 100644 index 00000000..aadc3dff Binary files /dev/null and b/docs/assets/user-guide/request-details.png differ diff --git a/docs/assets/user-guide/request-reset-code.png b/docs/assets/user-guide/request-reset-code.png new file mode 100644 index 00000000..282f5b9d Binary files /dev/null and b/docs/assets/user-guide/request-reset-code.png differ diff --git a/docs/assets/user-guide/requests-admin-panel.png b/docs/assets/user-guide/requests-admin-panel.png new file mode 100644 index 00000000..e9069dfd Binary files /dev/null and b/docs/assets/user-guide/requests-admin-panel.png differ diff --git a/docs/assets/user-guide/requests-completed.png b/docs/assets/user-guide/requests-completed.png new file mode 100644 index 00000000..edb4c896 Binary files /dev/null and b/docs/assets/user-guide/requests-completed.png differ diff --git a/docs/assets/user-guide/requests-customer-request.png b/docs/assets/user-guide/requests-customer-request.png new file mode 100644 index 00000000..09350ef6 Binary files /dev/null and b/docs/assets/user-guide/requests-customer-request.png differ diff --git a/docs/assets/user-guide/requests-customer-response.png b/docs/assets/user-guide/requests-customer-response.png new file mode 100644 index 00000000..220c7b16 Binary files /dev/null and b/docs/assets/user-guide/requests-customer-response.png differ diff --git a/docs/assets/user-guide/requests-error.png b/docs/assets/user-guide/requests-error.png new file mode 100644 index 00000000..9cf92d0f Binary files /dev/null and b/docs/assets/user-guide/requests-error.png differ diff --git a/docs/assets/user-guide/requests-export-error.png b/docs/assets/user-guide/requests-export-error.png new file mode 100644 index 00000000..dd460acc Binary files /dev/null and b/docs/assets/user-guide/requests-export-error.png differ diff --git a/docs/assets/user-guide/requests-import-error.png b/docs/assets/user-guide/requests-import-error.png new file mode 100644 index 00000000..6b68332b Binary files /dev/null and b/docs/assets/user-guide/requests-import-error.png differ diff --git a/docs/assets/user-guide/requests-in-progress.png b/docs/assets/user-guide/requests-in-progress.png new file mode 100644 index 00000000..ac7d5bee Binary files /dev/null and b/docs/assets/user-guide/requests-in-progress.png differ diff --git a/docs/assets/user-guide/requests-no-processing.png b/docs/assets/user-guide/requests-no-processing.png new file mode 100644 index 00000000..26912cbb Binary files /dev/null and b/docs/assets/user-guide/requests-no-processing.png differ diff --git a/docs/assets/user-guide/requests-processing-history.png b/docs/assets/user-guide/requests-processing-history.png new file mode 100644 index 00000000..6fe2fe0b Binary files /dev/null and b/docs/assets/user-guide/requests-processing-history.png differ diff --git a/docs/assets/user-guide/requests-validation.png b/docs/assets/user-guide/requests-validation.png new file mode 100644 index 00000000..4ce05643 Binary files /dev/null and b/docs/assets/user-guide/requests-validation.png differ diff --git a/docs/assets/user-guide/reset-password.png b/docs/assets/user-guide/reset-password.png new file mode 100644 index 00000000..5db60b9e Binary files /dev/null and b/docs/assets/user-guide/reset-password.png differ diff --git a/docs/assets/user-guide/user-admin-menu.png b/docs/assets/user-guide/user-admin-menu.png new file mode 100644 index 00000000..f64bbc76 Binary files /dev/null and b/docs/assets/user-guide/user-admin-menu.png differ diff --git a/CHANGES.md b/docs/changelog/v2.3/CHANGES.md similarity index 100% rename from CHANGES.md rename to docs/changelog/v2.3/CHANGES.md diff --git a/doc/PATCH_ISSUE_308.md b/docs/changelog/v2.3/PATCH_ISSUE_308.md similarity index 100% rename from doc/PATCH_ISSUE_308.md rename to docs/changelog/v2.3/PATCH_ISSUE_308.md diff --git a/doc/PATCH_ISSUE_321.md b/docs/changelog/v2.3/PATCH_ISSUE_321.md similarity index 100% rename from doc/PATCH_ISSUE_321.md rename to docs/changelog/v2.3/PATCH_ISSUE_321.md diff --git a/doc/PATCH_ISSUE_323.md b/docs/changelog/v2.3/PATCH_ISSUE_323.md similarity index 100% rename from doc/PATCH_ISSUE_323.md rename to docs/changelog/v2.3/PATCH_ISSUE_323.md diff --git a/doc/PATCH_ISSUE_333.md b/docs/changelog/v2.3/PATCH_ISSUE_333.md similarity index 100% rename from doc/PATCH_ISSUE_333.md rename to docs/changelog/v2.3/PATCH_ISSUE_333.md diff --git a/doc/PATCH_ISSUE_337.md b/docs/changelog/v2.3/PATCH_ISSUE_337.md similarity index 100% rename from doc/PATCH_ISSUE_337.md rename to docs/changelog/v2.3/PATCH_ISSUE_337.md diff --git a/doc/PATCH_ISSUE_344.md b/docs/changelog/v2.3/PATCH_ISSUE_344.md similarity index 100% rename from doc/PATCH_ISSUE_344.md rename to docs/changelog/v2.3/PATCH_ISSUE_344.md diff --git a/doc/PATCH_ISSUE_346.md b/docs/changelog/v2.3/PATCH_ISSUE_346.md similarity index 100% rename from doc/PATCH_ISSUE_346.md rename to docs/changelog/v2.3/PATCH_ISSUE_346.md diff --git a/doc/PATCH_ISSUE_347.md b/docs/changelog/v2.3/PATCH_ISSUE_347.md similarity index 100% rename from doc/PATCH_ISSUE_347.md rename to docs/changelog/v2.3/PATCH_ISSUE_347.md diff --git a/doc/PATCH_ISSUE_351.md b/docs/changelog/v2.3/PATCH_ISSUE_351.md similarity index 100% rename from doc/PATCH_ISSUE_351.md rename to docs/changelog/v2.3/PATCH_ISSUE_351.md diff --git a/doc/PATCH_ISSUE_353.md b/docs/changelog/v2.3/PATCH_ISSUE_353.md similarity index 100% rename from doc/PATCH_ISSUE_353.md rename to docs/changelog/v2.3/PATCH_ISSUE_353.md diff --git a/doc/extract-connector-sample/.gitignore b/docs/extract-connector-sample/.gitignore similarity index 100% rename from doc/extract-connector-sample/.gitignore rename to docs/extract-connector-sample/.gitignore diff --git a/docs/extract-connector-sample/README.md b/docs/extract-connector-sample/README.md new file mode 100644 index 00000000..8fd914fa --- /dev/null +++ b/docs/extract-connector-sample/README.md @@ -0,0 +1,76 @@ +# Extract - Initialize a New Connector Plugin + +## Introduction + +This Java module is an example of a connector plugin that can be used to initialize a new plugin for the Extract project. +The source code is documented and provides the necessary guidance to develop a new plugin. +It can be imported into a new Java environment, but adaptations may be required depending on the expected behavior. + +## Prerequisites for Use +* 64-bit OS +* Java 17 +* Tomcat 9 + +## Prerequisites for Development and Compilation +* Java 17 +* [Yarn][Yarn_Site] +* Extract-interface project (Common interface for using connector and task plugins) + +## Initializing the New Connector Plugin +The project must be a Java module. +The new plugin project must define a dependency on the extract-interface project. +Required dependencies are defined in the pom.xml file. + +To initialize a new plugin, follow the steps in order: +1. Copy the extract-connector-sample plugin code into a Java workspace. The system will prompt you to define a new name. If it does not, use the context menu (right-click) `Refactor > Rename`. + +2. Edit the **pom.xml** file of the module, replace occurrences of `extrat-connector-sample` with the new plugin name. + After right-clicking the file, choose `Add as Maven Project`. + +3. Right-click the namespace `ch.asit_asso.extract.connectors.sample`, choose the menu `Refactor > Rename`. + Enter the new class name to identify the plugin. If prompted, click the `Add in current Module` button to apply the changes only to the module. + This will automatically update the package name in all files of the module. + +4. Right-click the **SampleConnector.java** file, choose `Refactor > Rename`, then enter the new main class name of the plugin (e.g., EasySDIv4). This will rename the file and update all references to this class wherever it is used. + +5. Edit the **LocalizedMessages.java** file: adjust the value of the LOCALIZED_FILE_PATH_FORMAT parameter, which corresponds to the path to the `lang` directory. + +6. Check the **module-info.java** file, especially the reference to `SampleConnector`. Also verify line 4 of this file (reference to the namespace `ch.asit_asso.extract.connectors.sample`). + +7. Check the file **resources\META-INF\services\ch.asit_asso.extract.connectors.common.IConnector**, especially the reference to the class `ch.asit_asso.extract.connectors.sample.SampleConnector`. + +8. Right-click the **resources\connectors\sample** folder, choose `Refactor > Rename`, and enter the new name. + +9. Edit the file **resources\connectors\\lang\fr\messages.properties**. Modify or add the labels used by the plugin source code. This step can be done progressively during development. + +10. Edit the file **resources\connectors\\properties\config.properties**. This file contains the plugin configuration parameters used by the source code. This step can be done progressively during development. + +11. The folder **resources\connectors\\templates** contains XML templates used by the connector. + However, they are specific to the EasySDIv4 connector, since XML is used as the exchange format for commands. These files will likely not be useful for other types of connectors. + +## Important Points to Consider During Development + +The source code is sufficiently commented to help the developer build the new plugin. Comments in **UPPERCASE** identify important code sections or functions that must be updated. + +It is recommended to make the following changes in the Connector class: +* Adjust the `CONFIG_FILE_PATH` variable if needed +* Change the value of the `code` parameter to one identifying the plugin (e.g., `EasySDIv4`) + +Next, adapt the functions that override the IConnector interface methods: +* `getParams` to define the connector parameters. This method returns plugin parameters as a JSON array. If the plugin does not accept parameters, return an empty array. +* `importCommands` to manage importing commands executed by the remote server. +* `exportResult` to manage sending the processing result of a command back to the originating server. + +Private functions in the code handle utility operations or process received data. These can be adapted or removed as needed. These functions are clearly identified in the Connector class code. + +## Installing or Updating the Plugin in EXTRACT + +Before compiling the connector, delete the **target** folder. +Once the connector is compiled and the JAR file is generated, place the JAR into the **WEB-INF/classes/connectors** directory of the application (containing all connector plugins). +For updates, delete the WAR of the previous version to avoid conflicts. + +``` +Restarting the Tomcat EXTRACT application is then required so that the connector changes are taken into account. +``` + +[Yarn_Site]: https://yarnpkg.com/ "Yarn package manager site" diff --git a/doc/extract-connector-sample/nb-configuration.xml b/docs/extract-connector-sample/nb-configuration.xml similarity index 100% rename from doc/extract-connector-sample/nb-configuration.xml rename to docs/extract-connector-sample/nb-configuration.xml diff --git a/doc/extract-connector-sample/pom.xml b/docs/extract-connector-sample/pom.xml similarity index 100% rename from doc/extract-connector-sample/pom.xml rename to docs/extract-connector-sample/pom.xml diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorConfig.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorConfig.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorConfig.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorConfig.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorImportResult.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorImportResult.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorImportResult.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ConnectorImportResult.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportRequest.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportRequest.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportRequest.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportRequest.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportResult.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportResult.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportResult.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/ExportResult.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/LocalizedMessages.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/LocalizedMessages.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/LocalizedMessages.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/LocalizedMessages.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/Product.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/Product.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/Product.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/Product.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/SampleConnector.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/SampleConnector.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/SampleConnector.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/SampleConnector.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/RequestUtils.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/RequestUtils.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/RequestUtils.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/RequestUtils.java diff --git a/doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/ZipUtils.java b/docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/ZipUtils.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/ZipUtils.java rename to docs/extract-connector-sample/src/main/java/ch/asit_asso/extract/connectors/sample/utils/ZipUtils.java diff --git a/doc/extract-connector-sample/src/main/java/module-info.java b/docs/extract-connector-sample/src/main/java/module-info.java similarity index 100% rename from doc/extract-connector-sample/src/main/java/module-info.java rename to docs/extract-connector-sample/src/main/java/module-info.java diff --git a/doc/extract-connector-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.connectors.common.IConnector b/docs/extract-connector-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.connectors.common.IConnector similarity index 100% rename from doc/extract-connector-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.connectors.common.IConnector rename to docs/extract-connector-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.connectors.common.IConnector diff --git a/doc/extract-connector-sample/src/main/resources/connectors/sample/lang/fr/messages.properties b/docs/extract-connector-sample/src/main/resources/connectors/sample/lang/fr/messages.properties similarity index 100% rename from doc/extract-connector-sample/src/main/resources/connectors/sample/lang/fr/messages.properties rename to docs/extract-connector-sample/src/main/resources/connectors/sample/lang/fr/messages.properties diff --git a/doc/extract-connector-sample/src/main/resources/connectors/sample/properties/config.properties b/docs/extract-connector-sample/src/main/resources/connectors/sample/properties/config.properties similarity index 100% rename from doc/extract-connector-sample/src/main/resources/connectors/sample/properties/config.properties rename to docs/extract-connector-sample/src/main/resources/connectors/sample/properties/config.properties diff --git a/doc/extract-connector-sample/src/main/resources/connectors/sample/templates/export_localandpredefined_case.xml b/docs/extract-connector-sample/src/main/resources/connectors/sample/templates/export_localandpredefined_case.xml similarity index 100% rename from doc/extract-connector-sample/src/main/resources/connectors/sample/templates/export_localandpredefined_case.xml rename to docs/extract-connector-sample/src/main/resources/connectors/sample/templates/export_localandpredefined_case.xml diff --git a/doc/extract-connector-sample/src/main/resources/connectors/sample/templates/export_rejection_case.xml b/docs/extract-connector-sample/src/main/resources/connectors/sample/templates/export_rejection_case.xml similarity index 100% rename from doc/extract-connector-sample/src/main/resources/connectors/sample/templates/export_rejection_case.xml rename to docs/extract-connector-sample/src/main/resources/connectors/sample/templates/export_rejection_case.xml diff --git a/doc/extract-connector-sample/src/test/java/ch/asit_asso/extract/connectors/sample/SampleConnectorTest.java b/docs/extract-connector-sample/src/test/java/ch/asit_asso/extract/connectors/sample/SampleConnectorTest.java similarity index 100% rename from doc/extract-connector-sample/src/test/java/ch/asit_asso/extract/connectors/sample/SampleConnectorTest.java rename to docs/extract-connector-sample/src/test/java/ch/asit_asso/extract/connectors/sample/SampleConnectorTest.java diff --git a/doc/extract-task-sample/.gitignore b/docs/extract-task-sample/.gitignore similarity index 100% rename from doc/extract-task-sample/.gitignore rename to docs/extract-task-sample/.gitignore diff --git a/docs/extract-task-sample/README.md b/docs/extract-task-sample/README.md new file mode 100644 index 00000000..4afa0475 --- /dev/null +++ b/docs/extract-task-sample/README.md @@ -0,0 +1,76 @@ +# Extract - Initialize a New Task Plugin + +## Introduction + +This Java module is an example of a task plugin that can be used to initialize a new plugin for the Extract project. +The source code is documented and provides the necessary guidance to develop a new plugin. +It can be imported into a new Java environment, but adaptations may be required depending on the expected behavior. + +## Prerequisites for Use +* 64-bit OS +* Java 17 +* Tomcat 9 + +## Prerequisites for Development and Compilation +* Java 17 +* [Yarn][Yarn_Site] +* Extract-interface project (Common interface for using connector and task plugins) + +## Initializing the New Task Plugin +The project must be a Java module. +The new plugin project must define a dependency on the extract-interface project. +Required dependencies are defined in the pom.xml file. + +To initialize a new plugin, follow the steps in order: +1. Copy the extract-task-sample plugin code into a Java workspace. The system will prompt you to define a new name. If it does not, use the context menu (right-click) `Refactor > Rename`. + +2. Edit the **pom.xml** file of the module, replace occurrences of `extrat-task-sample` with the new plugin name. + After right-clicking the file, choose `Add as Maven Project`. + +3. Right-click the namespace `ch.asit_asso.extract.plugins.sample`, choose the menu `Refactor > Rename`. + Enter the new class name to identify the plugin. If prompted, click the `Add in current Module` button to apply the changes only to the module. + This will automatically update the package name in all files of the module. + +4. Right-click the **SamplePlugin.java** file, choose `Refactor > Rename`, then enter the new main class name of the plugin. This will rename the file and update all references to this class wherever it is used. + +5. Repeat step 4 for the files **SampleRequest.java** and **SampleResult.java**. + +6. Edit the **LocalizedMessages.java** file: adjust the value of the LOCALIZED_FILE_PATH_FORMAT parameter, which corresponds to the path to the `lang` directory. + +7. Check the **module-info.java** file, especially the reference to `SamplePlugin`. Also verify line 4 of this file (reference to the namespace `ch.asit_asso.extract.plugins.sample`). + +8. Check the file **resources\META-INF\services\ch.asit_asso.extract.plugins.common.ITaskProcessor**, especially the reference to the class `ch.asit_asso.extract.plugins.sample.SamplePlugin`. + +9. Right-click the **resources\plugins\sample** folder, choose `Refactor > Rename`, and enter the new name. + +10. Edit the file **resources\plugins\\lang\fr\messages.properties**. Modify or add the labels used by the plugin source code. This step can be done progressively during development. + +11. Edit the file **resources\plugins\\lang\fr\help.html** to describe the plugin’s functionality. *The file content is in HTML format*. + +12. Edit the file **resources\plugins\\properties\config.properties**. This file contains the plugin configuration parameters used by the source code. This step can be done progressively during development. + +## Important Points to Consider During Development + +The source code is sufficiently commented to help the developer build the new plugin. Comments in **UPPERCASE** identify important code sections or functions that must be updated. + +It is recommended to make the following changes in the Plugin class: +* Adjust the `CONFIG_FILE_PATH` variable if needed +* Change the value of the `code` parameter to one identifying the plugin (e.g., `remark` or `fmeserver`) +* Change the value of the `pictoClass` parameter to an appropriate CSS class name (plugin logo image). Search for an icon on the [Font Awesome][Fontawesome_Site] website. + +Next, adapt the functions that override the ITaskProcessor interface methods: +* `getParams` to define the task plugin parameters. This method returns plugin parameters as a JSON array. If the plugin does not accept parameters, return an empty array. +* `execute` to handle the execution of the plugin. + +## Installing or Updating the Plugin in EXTRACT + +Before compiling the plugin, delete the **target** folder. +Once the plugin is compiled and the JAR file is generated, place the JAR into the **WEB-INF/classes/task_processors** directory of the application (containing all task plugins). +For updates, delete the WAR of the previous version to avoid conflicts. + +``` +Restarting the Tomcat EXTRACT application is then required so that the plugin changes are taken into account. +``` + +[Yarn_Site]: https://yarnpkg.com/ "Yarn package manager site" +[Fontawesome_Site]: https://fontawesome.com/icons "FontAwesome website" diff --git a/doc/extract-task-sample/nb-configuration.xml b/docs/extract-task-sample/nb-configuration.xml similarity index 100% rename from doc/extract-task-sample/nb-configuration.xml rename to docs/extract-task-sample/nb-configuration.xml diff --git a/doc/extract-task-sample/pom.xml b/docs/extract-task-sample/pom.xml similarity index 100% rename from doc/extract-task-sample/pom.xml rename to docs/extract-task-sample/pom.xml diff --git a/doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/LocalizedMessages.java b/docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/LocalizedMessages.java similarity index 100% rename from doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/LocalizedMessages.java rename to docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/LocalizedMessages.java diff --git a/doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/PluginConfiguration.java b/docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/PluginConfiguration.java similarity index 100% rename from doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/PluginConfiguration.java rename to docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/PluginConfiguration.java diff --git a/doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SamplePlugin.java b/docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SamplePlugin.java similarity index 100% rename from doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SamplePlugin.java rename to docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SamplePlugin.java diff --git a/doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleRequest.java b/docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleRequest.java similarity index 100% rename from doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleRequest.java rename to docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleRequest.java diff --git a/doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleResult.java b/docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleResult.java similarity index 100% rename from doc/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleResult.java rename to docs/extract-task-sample/src/main/java/ch/asit_asso/extract/plugins/sample/SampleResult.java diff --git a/doc/extract-task-sample/src/main/java/module-info.java b/docs/extract-task-sample/src/main/java/module-info.java similarity index 100% rename from doc/extract-task-sample/src/main/java/module-info.java rename to docs/extract-task-sample/src/main/java/module-info.java diff --git a/doc/extract-task-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.plugins.common.ITaskProcessor b/docs/extract-task-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.plugins.common.ITaskProcessor similarity index 100% rename from doc/extract-task-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.plugins.common.ITaskProcessor rename to docs/extract-task-sample/src/main/resources/META-INF/services/ch.asit_asso.extract.plugins.common.ITaskProcessor diff --git a/doc/extract-task-sample/src/main/resources/plugins/sample/lang/fr/help.html b/docs/extract-task-sample/src/main/resources/plugins/sample/lang/fr/help.html similarity index 100% rename from doc/extract-task-sample/src/main/resources/plugins/sample/lang/fr/help.html rename to docs/extract-task-sample/src/main/resources/plugins/sample/lang/fr/help.html diff --git a/doc/extract-task-sample/src/main/resources/plugins/sample/lang/fr/messages.properties b/docs/extract-task-sample/src/main/resources/plugins/sample/lang/fr/messages.properties similarity index 100% rename from doc/extract-task-sample/src/main/resources/plugins/sample/lang/fr/messages.properties rename to docs/extract-task-sample/src/main/resources/plugins/sample/lang/fr/messages.properties diff --git a/doc/extract-task-sample/src/main/resources/plugins/sample/properties/config.properties b/docs/extract-task-sample/src/main/resources/plugins/sample/properties/config.properties similarity index 100% rename from doc/extract-task-sample/src/main/resources/plugins/sample/properties/config.properties rename to docs/extract-task-sample/src/main/resources/plugins/sample/properties/config.properties diff --git a/docs/features/admin-guide.md b/docs/features/admin-guide.md new file mode 100644 index 00000000..b56d3e9c --- /dev/null +++ b/docs/features/admin-guide.md @@ -0,0 +1,286 @@ +## User administration + +### Users list +![Users list](../assets/admin-guide/users-list.png){width="1000"} + +This page lists all users, whether active or inactive. You can: + +* **create a new user** by clicking on the ``Nouvel utilisateur`` button +* **manage user groups** by clicking on the ``Groupes`` button +* **edit a user** by clicking on their login +* **delete a user** by clicking on the red button with a cross. It is not possible to delete (grayey out button): + * Your own account + * A user directly associated with a process + * The last active user of a group associated with a process + +It includes a filtering feature based on the following criteria: + +* Text search (partial and case insensitive) in the following fields: + * Login + * Name + * Email address + +* User's role +* User's state +* User's notifications status +* User's 2FA status + +The search is executed only when you click the green **search button** (magnifying glass icon). Alternatively, you can start the search by pressing **Enter** while focused on a search field. + +### User settings +![Users settings](../assets/admin-guide/users-settings.png){width="1000"} + +User's settings are: + +* User's full name (for display purposes) +* User's role in the application (operator or administrator). It is not possible to change the role of your own account +* The username they use to log in (must be unique within the application) +* Whether the user is active. An inactive user cannot log in to the application. It is not possible to deactivate the account of a user associated with a process. +* The email address to which notifications concerning them will be sent +* Whether email notifications are enabled. Only operator-type notifications are affected. Even if notifications are deactivated, the user will continue to receive emails allowing them to reset their password. All notifications for administrators are not affected by this setting. +* The password they use to log in. If the password is changed, it must follow the password policy. +* Whether two-factor authentication should be required for the user. In this case, the user will have to register with Google Authenticator (or compatible) the next time they log in, without the option to cancel the registration. If you choose not to require two-factor authentication, the user can still choose to enable it. +* User language preference (if Extract is in multilingual mode) + +
+Using the buttons at the top of the page, administrators can: + +* Manage the status of two-factor authentication for the user, i.e. enable it, disable it, or ask the user to register again with a new code. This is a drop-down button that displays a menu when clicked, showing the actions available depending on the user's status +* In the case of a local user and if LDAP is enabled, the administrator can choose to migrate the user to an LDAP user. The password to be used then becomes the one defined in the company directory. **Warning**: this operation cannot be reversed. + +
+These operations will result in the loss of any other changes currently in progress. + +If the user is an LDAP user, the only changes available in Extract are: + +* Email notifications status +* Two-factor authentication status (including the option to enforce it or not) +* User language preference (if Extract is in multilingual mode) + +### Users groups +User groups allow operators to be grouped together to be assigned to a process as a group. Group settings are accessed from the user list. + +#### Groups list +![Users groups list](../assets/admin-guide/users-groups-list.png){width="1000"} + +This page lists all users groups with their number of users (active or inactive). Following actions are possible: + +* **Create a new group** by clicking on the `Nouveau groupe` button +* **Edit a group** by clicking on its name +* **Delete a group** by clicking on the red button with a cross. It is not possible to delete a group associated with a process (grayed out button) + +#### Groups settings +![Users groups settings](../assets/admin-guide/users-groups-settings.png){width="1000"} + +Group settings are: + +* Group name (mandatory and unique) +* The list of users it contains. A user can belong to several groups. For groups associated with a process users can be added or removed, but there must always be at least one active user remaining. + +## Processes +A process is a sequence of tasks that generates the data requested by a client. + +### Processes list +![Processes list](../assets/admin-guide/processes-list.png){width="1000"} + +This page lists all processes whether or not they are associated with a rule. Following actions are available: + +* **Create a process** by clicking on the `Nouveau traitement` button +* **Edit a process** by clicking on its name +* **Duplicate a process** by clicking on the green button with a copy icon. An identical copy of the process will be created, including its tasks and settings. +* **Delete a process** by clicking on the red button with a cross. Processes associated with a request that is not completed or associated with a connector's rule cannot be deleted (grayed out button). + +It includes a filtering feature based on the name of the process. The search is executed only when you click the green **search button** (magnifying glass icon). Alternatively, you can start the search by pressing **Enter** while focused on the search field. + +### Process details + +#### Process parameters +![Processes parameters](../assets/admin-guide/processes-parameters.png){width="1000"} + +At the top of the page, the following are defined: + +* The name to be given to the process in the application +* The users or user groups authorized to interact with requests associated with this process. At least one operator or user group must be specified. Administrators can interact with all requests. + +#### Process tasks +![Processes tasks](../assets/admin-guide/processes-tasks.png){width="600"} + +The tasks currently configured for the process are displayed in the order in which they will be executed. You can change the order of the tasks by dragging and dropping the task. Each task can be deleted by clicking on the red button with a cross. + +Depending on the task type, the following items may be displayed: + +* Configuration parameters +* A button to display help text + +!!! Warning + It is strongly recommended not to delete or reorder tasks in a process that has running requests. Result may be unpredictable. However, it is always possible to modify the parameters. + +#### Available tasks +![Tasks list](../assets/admin-guide/tasks-list.png){width="350"} + +On the right side of the screen is a list of available task with a brief description. To add a task to the process, drag and drop one of these items to the area on the left. If necessary, you can add the same task several times. + +## Connectors +Connectors allow you to retrieve requests from a server and send the results back to it. + +### Connectors list +![Connectors list](../assets/admin-guide/connectors-list.png){width="1000"} + +This page lists all defined connectors, whether active or not. Following actions are available: + +* **Create** a new connector by clicking the ``Nouveau connecteur`` button, then choosing a server type from the drop-down list +* **Edit** a connector by clicking on its name +* **Delete** a connector by clicking the red button with the cross. It is not possible to delete a connector with running requests (grayed out button). + +It includes a filtering feature based on the name of the process and the connector's type. The search is executed only when you click the green **search button** (magnifying glass icon). Alternatively, you can start the search by pressing **Enter** while focused on the search field. + +### Connector details + +#### Connector parameters +![Connectors parameters](../assets/admin-guide/connectors-parameters.png){width="1000"} + +Generic parameters for all server types are as follows: + +* The name to give to the connector in the application +* The number of seconds between two attempts to retrieve requests +* The number of retries to perform in case of an import problem before setting the connector to error +* Whether or not to activate the connector. This setting only applies to imports. The result can be exported even when the connector is deactivated. +* The type of connector to use (non-editable) + +In addition, each type of connector can define specific parameters. For easySDI v4 connector, these are: + +* The service address used to retrieve requests and return their results +* The username and password to connect to the service +* The maximum file size to be exported as a result of the request. Leave this parameter blank for no size limit +* The URL template for viewing the details of a request on the origin server. This parameter can be left blank, in which case the view link will not be displayed on the request details page. + + The following variables can be specified between curly brackets. They will be replaced with the properties of the relevant request. + + | Variable | Value | + | ----------- | ------------------------------------ | + | `client` | Client name who placed the order | + | `orderGuid` | The GUID that identifies the order | + | `orderLabel` | The order number | + | `organism` | The organization of the client who placed the order | + | `productGuid` | The GUID that identifies the product ordered | + | `productLabel` | The name of the product ordered | + +#### Connector rules +![Connectors rules](../assets/admin-guide/connectors-rules.png){width="1000"} + +Rules allow you to assign a process to a request. Rules are executed from top to bottom. The first active rule whose conditions are met triggers the assigned process, following rules are ignored. Therefore, it's best to place most specific requests first. If no rule matches the request, administrators are notified by email. + +You can create a new rule by clicking on the ``Ajouter une règle`` button and delete one by clicking on the red button with a cross. You can also change the rules order by dragging and dropping them. + +Following rule parameters are available: + +* The code defining the rules that link a request to its processing. For details on the required syntax, click the help button in the column header. +* The process to be associated with the request if the rule matches +* Whether the rule is active (inactive rules are ignored) + +!!! Info + The code defining the rules is limited to 65'000 characters by default. It's possible to remove this limitation by changing the type of the rule field in the database (see [Rule field size limitation](../../getting-started/configure#rule-field-size-limitation-matching-processes)) + +!!! Info + Rule management is not available when creating a connector. You must first save it, then edit it. + +## Application settings + +### Orchestration +![Orchestration](../assets/admin-guide/orchestration.png){width="500"} + +* **Storage path** : Defines the directory where files generated during request processing are stored. Tomcat user must have both read and write permissions for this directory. Data related to a request are automatically deleted once the request is successfully exported. +* **Show temporary file location to operators** : When enabled, the system displays the path of the temporary processing directory on the request details page. +* **Orchestrator update frequency** : Specifies the interval (in seconds) between executions of background tasks, such as: request processing, rule matching, data export. Notes : + * A lower value makes tasks more responsive but increases server resource usage. + * It also represents the maximum delay before connector setting changes take effect. + * This setting does not affect request imports frequency, which are controlled in each connector’s properties (see [Connector parameters](#connector-parameters)). +* **Request list update frequency** : Defines how often (in seconds) the home page data is refreshed (see [Requests list](../user-guide#requests-list)) +* **Interval between validation reminders** : Specifies the number of days between two email reminders sent to operators when a request is still pending validation. Set this value to 0 to disable reminders. + +### Operating hours +![Operating hours](../assets/admin-guide/operating-hours.png){width="500"} + +This section lets you define the time windows during which Extract automatically runs its tasks (import, processing, export, etc.) Regardless of the selected operating mode, you can always log in to the application to perform manual actions such as: Validating requests, modifying processed files, updating application settings, resetting passwords, etc. + +You can define operating time slots by clicking on the ``Ajouter une plage`` button. The days and times indicated are included in the time slot. If several time slots are defined, automatic tasks will run whenever at least one slot covers the current day and time. You can also delete a time slot by clicking the red Delete button next to it. + +Time slots can be defined at any time, but they are only applied if the operating mode ``Seulement durant les heures ci-dessous`` is selected. If this mode is chosen but no slot is defined, the system behaves the same as in ``Arrêt complet`` mode. + +### SMTP server +![SMTP servers](../assets/admin-guide/smtp-server.png){width="500"} + +This section covers the configuration of server settings for email notifications. + +* **SMTP User**: Used to authenticate with the SMTP server. +* **Sender Details**: You can specify both the sender’s name and email address. Depending on the server’s policy, these may differ. + +### LDAP authentication +![LDAP authentication](../assets/admin-guide/ldap-authentication.png){width="500"} + +This section lets you enable and configure LDAP login using your corporate directory. You can specify: + +* **LDAP servers**: Enter the server name(s) and port(s) (optional). If multiple servers are used, separate them with semicolons. +* **Encryption type**: Choose the security protocol for connections. +* **Domain or base DNs**: For Active Directory, enter the domain. For other LDAP servers, provide one or more base DNs (separated by semicolons). +* **Group DN**: Define the DN of the group whose members will be assigned roles in Extract. + * Members of the operator group will be assigned the Operator role. + * Members of the administrator group will be assigned the Administrator role. + * If a user belongs to both groups, they will be considered an Administrator. + +#### Synchronization with LDAP + +Extract can automatically run a task to keep LDAP users and their attributes up to date. To enable synchronization, you’ll need to provide the username and password of an LDAP account with read permissions and the synchronization interval (in hours). + +During each synchronization, Extract will: + +* **Deactivate** users who are no longer present in the directory or who have been disabled (in the case of Active Directory) +* **Update** existing LDAP users’ email addresses, full names, and roles if they have changed +* **Create** new Extract accounts for users who belong to an LDAP administrator or operator group but do not yet exist in Extract + +You can start synchronization immediately (in the background) by clicking the ``Synchroniser maintenant`` button. Otherwise, synchronization is performed using the settings saved in the database. Please save your settings beforehand if you have modified them. + +!!! Info + If a user is disabled or deleted in the LDAP directory, they will no longer be able to connect to Extract, even if synchronization has not been performed. + +#### LDAP connection test + +The ``Tester la connexion`` button lets you verify whether the current settings can successfully establish a connection. The test runs with the values you’ve entered, even if they haven’t been saved yet. + +* If synchronization is enabled, the test uses the provided username and password. +* If synchronization is disabled, the test runs anonymously. + +### Request validation +![Validation](../assets/admin-guide/validation.png){width="500"} + +This section lets you specify which request properties should be highlighted for operators in the request validation area (see [Validation in the user guide](../user-guide#validation)). No syntax checks is applied to the properties entered here — only properties that actually exists in the request will be considered. + +Additionally, this section provides access (by clicking on the `Gérer les modèles` button) to the management of predefined comment templates used during request validation steps. + +#### Validation comment templates list +![Validation tempalates list](../assets/admin-guide/validation-templates-list.png){width="1000"} + +This page lists all existing message templates. Following action are available: + +* **Create a new validation message** by clicking on the `Nouveau message` button +* **Edit a message** by clicking on its title +* **Delete a message** by clicking on the red button with a cross. It is not possible to delete a message associated with a validation task of a process (grayed out button). + +#### Validation comment template details +![Validation tempalate details](../assets/admin-guide/validation-template-details.png){width="1000"} + +Message parameters are: + +* Its title (for display in the validation plugin settings and in the list of comments available during validation) +* Message text. The strings ``[operatorName]`` and ``[operatorEmail]`` will be replaced by the name and email address of the operator who validated the process. + +### Miscellaneous information +![Misc](../assets/admin-guide/misc.png){width="500"} + +This section provides general information about Extract, including the application version installed. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/features/architecture.md b/docs/features/architecture.md new file mode 100644 index 00000000..b2ca8987 --- /dev/null +++ b/docs/features/architecture.md @@ -0,0 +1,627 @@ +# Application architecture + +## Foreword + +This document is the technical architecture file for the Extract project. It contains the structural elements of the project, such as the general architecture, data flows, interfaces, and data model. It is primarily intended for: + +* The IT project manager +* The project architects +* The production team +* The maintenance team* + +## General architecture + +### Basic logic diagram + +The following logical diagram illustrates the principle of a configurable process. + +![basic-logic-diagram](../assets/dev-guide/basic-logic-diagram.png){width="1000"} + +1. The import is performed by the application for each declared and active connector. +2. For each request element, the application applies the dispatch rules that define which process should be launched. +3. Within each process, the pre-configured tasks are executed. Each process runs independently. The tasks in a process may include the following: adding a comment, validation, extraction, archiving. +4. The export is performed for each request element by the connector that imported it. + +### Software architecture diagram + +The proposed solution is based on a modular and scalable architecture. The various functional components are illustrated in the diagram below. + +![software-architecture-diagram](../assets/dev-guide/software-architecture-diagram.png){width="800"} + +### Flows description + +The solution consists of a single server, so no specific flow needs to be set up other than HTTP(S) access to this server (port 80 or 443). + +## Data description + +### Database schema + +A single database and a single schema are created, and the user who owns the schema is used for all connections from the implemented solution (read/write). + +### Conceptual data model + +![data-model](../assets/dev-guide/data-model.png){width="1000"} + +### Data tables description + +#### CONNECTORS + +Table listing the connectors configured in the system. It is not possible to modify/delete a connector if a related request is incomplete. Deleting a connector involves deleting the associated rules and setting the id_connector of the related requests in the REQUESTS table to null. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_connector`` | int | **Primary key** | *1* +| ``connector_code`` | varchar 50 | Code identifying the connector type. Retrieved from the connector itself via a specific function. The code must be unique in the connector catalog. | *easysdiv4* +| ``connector_label`` | varchar 255 | Connector label. Retrieved from the connector itself via a specific function. | *EasySdi V4* +| ``connector_params`` | varchar 4000 | Connector-specific parameters in JSON format. The list of parameters and their types are retrieved from the connector itself via a specific function. | *cf 4.1* +| ``name`` | varchar 50 | Name given to the connector instance | *asitvd_dev* +| ``import_freq`` | int | Frequency of connector queries when active | *60* +| ``active`` | boolean | Defines whether the connector is active or not | *true* +| ``last_import_msg`` |varchar 4000 | Last message returned by the connector import | *Success* +| ``last_import_date`` | datetime | Date and time of the last import | *16/11/2016 21:15* + +#### RULES + +Table listing the rules applied to a given connector and associating a process with each of them. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_rule`` | int | **Primary key** | *1* +| ``id_process`` | int | **Foreign key** linking to the PROCESSES table | *1* +| ``id_connector`` | int | **Foreign key** linking to the CONNECTORS table | *1* +| ``rule`` | varchar 4000 | Definition of the rule according to the system's specific syntax | *prod_code == ‘11’* +| ``active`` | boolean | Defines whether the rule is active or not | *true* +| ``position`` | int | Scheduling of rules for the same connector | *5* + +#### PROCESSES + +Table listing the processes configured in the system. It is not possible to modify (i.e., the tasks that comprise it) a process if a related request is ONGOING. It is not possible to delete a process if a related request is incomplete or if it is associated with a connector rule. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_process`` | int | **Primary key** | *1* +| ``name`` | varchar 255 | Process title | *Réseau de Gaz* + +#### TASKS + +Table listing the tasks (plugins) configured for a given process. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_task`` | int | **Primary key**| *1* +| ``id_process`` | int | **Foreign key** linking to the PROCESSES table | *1* +| ``task_code`` | varchar 50 | Code identifying the task plugin type. Retrieved from the plugin itself via a specific function. The code must be unique in the connector catalog | *FME2016* +| ``task_label`` | varchar 255 | Task plugin label. Retrieved from the plugin itself via a specific function | *Extraction FME 2016* +| ``task_params`` | varchar 4000 | Parameters specific to the task plugin in json format. The list of parameters and their type are retrieved from the plugin itself via a specific function | *cf 0* +| ``position`` | int | Scheduling of tasks between each other for the same process | *3* + +#### PROCESSES_USERS + +Table ensuring the assignment of one or more users to a given process. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_process`` | int | **Foreign key** linking to the PROCESSES table | *1* +| ``id_user`` | int | **Foreign key** linking to the USERS table | *1* + +#### PROCESSES_USERGROUPS + +Table ensuring the assignment of one or more groups to a given process. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_process`` | int | **Foreign key** linking to the PROCESSES table | *1* +| ``id_usergroup`` | int | **Foreign key** linking to the USERGROUPS table | *1* + +#### USERS + +User table. It is not possible to delete a user if they are associated with a process. When a user is deleted, the items linked to them in the REQUEST_HISTORY table are assigned an ``id_user`` value of ``null``, indicating that the user is now unknown. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_user`` | int | **Primary key** | *1* +| ``user_type`` | string | User type: LOCAL: user whose Extract database is the authentication source. LDAP: user whose LDAP server is the authentication source | *LOCAL* +| ``two_factor_status`` | string | 2FA status: ACTIVE / INACTIVE / STANDBY | *ACTIVE* +| ``two_factor_forced`` | varchar 100 | 2FA imposed by the administrator ? | *true* +| ``two_factor_token`` | varchar 100 | String used to generate two-factor authentication codes (encrypted) | *Xxxxx* +| ``two_factor_standby_token`` | string | String used to generate two-factor authentication codes (encrypted) awaiting validation by the user | *Xxxxx* +| ``profile`` | string | Attribute that can take two values. ADMIN: Administrator. OPERATOR: Operator. Note: the texts corresponding to the values are managed in the application itself to ensure multilingual support. | *ADMIN* +| ``name`` | varchar 50 | Full name of the user | *Xxxx* +| ``login`` | varchar 50 | User login | *Xxxx* +| ``pass`` | varchar 60 | User password | ******* +| ``email`` | varchar 50 | User email | *example@myentreprise.ch* +| ``active`` | boolean | Defines whether the user is active or not | *true* +| ``mailactive`` | boolean | Defines whether notifications are active for the user | *true* +| ``tokenpass`` | varchar 50 | Token used for password recovery (single use). Attribute reset to null when the user next logs in (regardless of the password used) | *so37dd9sxwxdx3449ckl* +| ``tokenexpire`` | datetime | **Foreign key** linking to the USERGROUPS table. Token expiration date/time | *2016.01.01 12 :00* + +#### RECOVERY_CODES + +Table of recovery codes per user in case of loss of access to the device enabling two-factor authentication. When a code is successfully used, it is removed from this table. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_code`` | int | **Primary key** | *1* +| ``Id_user`` | int | **Foreign key** linking to the USERS table | *1* +| ``token`` | varchar 100 | Recovery code (hashed) | *Xxxxx* + +#### REMEMBER_ME_TOKENS + +Table tracking tokens issued so that two-factor authentication is not required for a user on a machine for a certain period of time. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_code`` | int | **Primary key** | *1* +| ``Id_user`` | int | **Foreign key** linking to the USERS table | *1* +| ``token`` | varchar 100 | Random string contained in the authorization cookie | *Xxxxxx* +| ``tokenexpire`` | timestamp | Date and time of authorization expiration | *154564566* + +#### REQUESTS + +Table listing current and completed requests. + +In order to manage connectors with multiple interfaces, the connector itself must provide the request elements in a standardized format for database entry. This format consists of **p_xxx** attributes. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``id_request`` | int | **Primary key** | *1* +| ``id_process`` | int | **Foreign key** linking to the PROCESSES table.
This parameter remains null if no rule matches the query element. | *1* +| ``id_connector`` | int | **Foreign key** linking to the CONNECTORS table.
This parameter is null if the associated connector has been deleted | *1* +| ``p_orderlabel`` | varchar 255 | Standardized query parameter
Order name| *123456 / 6* +| ``p_orderguid`` | varchar 255 | Standardized query parameter
Order ID, also to be provided when sending the result | *677587e2-057f-0144-ddc1-9feca766448f* +| ``p_productguid`` | varchar 255 | Standardized query parameter
ID of the product ordered, also to be provided when sending the result | *708e932b-81c3-2ce4-b907-ed07e61ac5f9* +| ``p_productlabel`` | varchar 255 | Standardized query parameter
Name of the product ordered | *Plan du réseau d’eau commune de Bex* +| ``p_organism`` | varchar 255 | Standardized query parameter
Organization receiving the order | *Terrassements SA* +| ``p_client`` | varchar 255 | Standardized query parameter
Customer receiving the order | *Terrassements SA* +| ``p_clientguid`` | varchar 255 | Standardized query parameter
Customer ID | *708e932b-81c3-2ce4-b907-ed07e61ac5f9* +| ``p_organismguid`` | varchar 255 | Standardized query parameter
Organization ID | *708e932b-81c3-2ce4-b907-ed07e61ac5f9* +| ``p_tiers`` | varchar 255 | Standardized query parameter
Third party linked to the order | *Commune de Bex* +| ``p_tiersguid`` | varchar 255 | Standardized query parameter
GUID of the third party linked to the order | *708e932b-81c3-2ce4-b907-ed07e61ac5f9* +| ``p_clientdetails`` | varchar 4000 | Standardized query parameter
Details of the customer receiving the order | *Nom de rue 37
1880 Bex
Tel :00.00.00.00.00
Mail : xxx@yyy.com* +| ``p_tiersdetails`` | varchar 4000 | Standardized query parameter
Details of the third party linked to the order | *Nom de rue 37
1880 Bex
Tel :00.00.00.00.00
Mail : xxx@yyy.com* +| ``p_perimeter`` | varchar 4000 | Standardized query parameter
Coordinates of the extraction polygon | *POLYGON((6.9378 46.1056,6.1245 …* +| ``p_surface`` | float | Standardized query parameter
Extraction polygon area | *123.4 (m2)* +| ``p_parameters`` | varchar 4000 | Standardized query parameter
List of non-standardized parameters passed as is to task plugins | *{‘format’ :’pdf’, … }* +| ``p_external_url`` | varchar 255 | URL for accessing order details on the origin server | *https://viageo.ch/commandes/123456* +| ``remark`` | varchar 4000 | Any comments related to the query element. Modifiable by task plugins | *Donnée fournie à titre confidentielle* +| ``folder_in`` | varchar 255 | Request element input directory. Defined by the application based on a base path (configurable by the administrator). It is sent to task plugins along with the other parameters of the request element | */677587e2-057f-0144-ddc1-9feca766448f/20161212180000/in* +| ``folder_out`` | varchar 255 | Output directory of the request element. Defined by the application based on a base path (configurable by the administrator). It is sent to the task plugins along with the other request element parameters. | */677587e2-057f-0144-ddc1-9feca766448f/20161212180000/out* +| ``start_date`` | datetime | Date and time the query was created in the system. | *22.11.2016 09:00* +| ``end_date`` | datetime | Date and time the query processing ended. | *22.11.2016 09:15* +| ``tasknum`` | int | Number of the task currently in progress in the process. | +| ``rejected`` | boolean | False by default.
Flag indicating that the operator or administrator wanted to stop the associated processing and thus reject the request. | *False* +| ``status`` | string | Status of the request in its lifecycle.

IMPORTED: Imported – The request is initialized in the system following a connector import (or when the status is forced by the administrator)
IMPORTFAIL: Import failure – The request cannot be processed because its import via the connector failed
ONGOING: In progress – A rule has allowed the request to be associated with a process. The orchestrator can move it forward.
STANDBY: On hold – Manual action is required on the current task (e.g., validation plugin).
ERROR: Error – The current task has encountered an error (e.g., FME script not found).
UNMATCHED: Unmatched – No rule has allowed a process to be associated with the request
TOEXPORT: To be exported – The last task in a process has been completed successfully. The request is ready for export
EXPORTFAIL: Export failure – The request has been processed but its export via its original connector has failed
FINISHED: Finished – The request has been processed and the export was successful

Note: the texts corresponding to the values are managed in the application itself in order to ensure multilingual support. | *IMPORTED* +| ``last_reminder`` | datetime | Date and time of the last reminder in the case of a request awaiting validation | *22.11.2016 09:00* + +#### REQUEST_HISTORY + +Table listing the processing history for each of the requests initiated. For a given request, there may be more items in this table than tasks, since the user can go back in the processing. This table must remain completely independent of the process (and therefore the tasks) attached to the request. This is to avoid corrupting the history if the process is modified retrospectively. All history items must therefore be duplicated in this table (e.g., task_label). + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``Id_record`` | int | **Primary key** | *1* +| ``id_request`` | int | **Foreign key** linking to the PROCESSES table| *1* +| ``id_user`` | int | **Foreign key** linking to the USERS table.

0 = system
null = unknown (user deleted)

Note: the texts corresponding to the values are managed in the application itself in order to ensure multilingual support. | *1* +| ``task_label`` | varchar 255 | Label of the corresponding task plugin or Export for the export step or Import for the import step | *Extraction FME 2016* +| ``status`` | string | Task status

ONGOING: In progress - The task is being processed
STANDBY: Waiting - Manual action is required (e.g. validation plugin)
ERROR: Error - The task has encountered an error (e.g. FME script not found) or the import/export has failed
FINISHED: Finished - The task/import/export has been successfully completed

Note: the texts corresponding to the values are managed in the application itself in order to ensure multilingual support. | *FINISHED* +| ``last_msg`` | varchar 4000 | Last message returned by the plugin (error message or success message) or by the connector (import/export step). Incremented by 1 with each new entry in this table (for the same id_request) | +| ``step`` | int | Standardized query parameter
Name of the product ordered | *3* +| ``start_date`` | datetime | Date and time when task processing began | *22.11.2016 09:00* +| ``end_date`` | datetime | Date and time when task processing ended
Null if the task plugin is currently running. | *22.11.2016 09:02* +| ``process_step`` | int | Task number in the process | *1* + +#### SYSTEM + +Table listing the system parameters required for the solution to function properly. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``key`` | varchar 50 | **Primary key** | *smtp_port* +| ``value`` | varchar 65000 | System attribute value | *25* + +Below is a list of the keys contained in this table. These keys cannot be modified. + +| Key | Value (example or domains) | +| --- | --- | +| ``freq_scheduler_sec`` | *1* | +| ``dashboard_interval`` | *10* | +| ``smtp_server`` | *mail.laboite.ch* | +| ``smtp_port`` | *25* | +| ``smtp_from_name`` | *EX3K* | +| ``smtp_from_mail`` | *ex3k.noreply@laboite.ch* | +| ``smtp_pass`` | ****** | +| ``smtp_user`` | *admin* | +| ``smtp_ssl`` | *0* | +| ``base_path`` | *D:\extract\orders* | +| ``display_temp_folder`` | *false* | +| ``mails_enable`` | *true* | +| ``op_mode`` | *ON / RANGES / OFF* | +| ``op_timeranges`` | *[
 {dayfrom :1, dayto :4, timefrom :7, timeto :18},
 {dayfrom :5, dayto :5, timefrom :7, timeto :16},
 {…}
]* | +| ``ldap_on`` | *true* | +| ``ldap_servers`` | *ldap.example.com, ldap2.example.com* | +| ``ldap_encryption_type`` | *LDAPS / STARTTLS* | +| ``ldap_base_dn`` | *cn=admin,dc=example,dc=com * | +| ``ldap_admins_group`` | *CN=YourGroup, OU=Users,DC=YourDomain,DC=COM* | +| ``ldap_operators_group`` | *CN=YourGroup, OU=Users,DC=YourDomain,DC=COM* | +| ``ldap_synchro_on`` | *true* | +| ``ldap_user`` | *Service_account_extract* | +| ``ldap_password`` | *xxxxx* | +| ``ldap_synchro_freq`` | *24* | +| ``ldap_last_synchro`` | *22.11.2016 09:00* | +| ``standby_reminder_days`` | *3* | +| ``validation_focus_properties`` | *REMARK, PROJECTION, FORMAT* | + +#### REMARKS + +Table listing predefined messages to be used as comments during the validation stage of a process. + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``Id_remark`` | int | **Primary key** | *1* +| ``content`` | varchar 65000 | Comment text | *Madame, Monsieur, Votre commande...* +| ``title`` | varchar 255 | Comment title (for display in the list) | *Périmètre non valide* + +#### USERGROUPS + +Table listing user groups that can be assigned to a process + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``Id_usergroup`` | int | **Primary key** | *1* +| ``name`` | varchar 50 | User group name | *Responsables cadastre* + +#### USERS_USERGROUPS + +Table ensuring that users belong to user groups + +| Attribute | Type | Description | Example | +| --- | --- | --- | --- | +| ``Id_usergroup`` | int | Foreign key linking to the USERGROUPS table | *1* +| ``Id_user`` | int | Foreign key linking to the USERS table | *1* + +### Deletion of database elements + +The table below shows the constraints and triggers established directly in the database for managing cascading deletions. + +Note: these actions/deletions do not need to be managed in the web interface. + +| Source element | Target element | Deletion/cascading action | +| --- | --- | --- | +| ``CONNECTORS`` | ``RULES`` | Deleting items in the RULES linked table
Note: however, this scenario is only possible for connectors without unfinished linked requests. The interface must not allow a connector to be deleted if it has an ongoing or pending request associated with it. | +| ``CONNECTORS`` | ``REQUESTS`` | Setting the id_connector in the REQUESTS table to null
Note: however, this scenario is only possible for connectors without unfinished linked requests. The interface must not allow a connector to be deleted if it has a request in progress or pending. | +| ``REQUESTS`` | ``REQUEST_HISTORY`` | Deleting items in the linked REQUEST_HISTORY table. | +| ``PROCESSES`` | ``REQUESTS`` | Setting the id_process in the REQUESTS table to null
Note: however, this scenario is only possible for completed requests. The interface must not allow a process to be deleted if it has a pending or ongoing request associated with it. | +| ``PROCESSES`` | ``RULES`` | Delete items in the linked RULES table
Note: however, this scenario should never occur, as the interface must not allow a process to be deleted if it has a rule associated with it | +| ``PROCESSES`` | ``TASKS`` | Deleting items in the TASKS related table | +| ``PROCESSES`` | ``PROCESSES_USERS`` | Deleting items in the PROCESSES_USERS related table | +| ``PROCESSES`` | ``PROCESSES_USERGROUPS`` | Deleting items in the PROCESSES_USERGROUPS related table | +| ``USERS`` | ``REQUEST_HISTORY`` | Setting the id_user in the REQUEST_HISTORY table to null | +| ``USERS`` | ``PROCESSES_USERS`` | Deleting items in the PROCESSES_USERS related table | +| ``USERS`` | ``USER_USERGROUPS`` | Deleting items in the USER_USERGROUPS related table | +| ``USERS`` | ``RECOVERY_CODES`` | Delete items in the RECOVERY_CODES related table | +| ``USERS`` | ``REMEMBER_ME_TOKENS`` | Delete items in the REMEMBER_ME_TOKENS related table | +| ``USERGROUPS`` | ``PROCESSES_USERGROUPS`` | Delete items in the PROCESSES_USERGROUPS related table | +| ``USERGROUPS`` | ``USER_USERGROUPS`` | Delete items in the USER_USERGROUPS related table | + +### Request lifecycle + +Requests follow the lifecycle illustrated in the diagram below. Transitions show the system or user event that triggered them. + +![request-lifecycle](../assets/dev-guide/request-lifecycle.png){width="900"} + +## Description of interfaces and third-party systems + +### Connector catalog + +#### Introduction + +The connector catalog contains all connectors available for the system. They must all implement the following interface functions: + +* ``String getCode``: returns the connector code, which must be unique in the catalog +* ``String getLabel``: returns the connector name +* ``String getDescription``: returns formatted text describing the connector +* ``String getHelp``: returns formatted text to guide the user in entering parameters +* ``String getParams``: returns the parameters specific to the connector, in json format. Each parameter is defined by a unique code, a label, a type, whether it is mandatory or not, and any other tags specific to the type. The types available for connectors are as follows: + * **text** : character string to be entered via a text box. The maximum length of the string is specified in an additional attribute. + * **pass**: character string to be entered via a hidden text field. The maximum length of the string is specified in an additional attribute. +* ``Object new(params)``: creates an instance of the connector by passing the user-defined parameters. Returns the created object. +* ``Object importCommands()``: launches the command import. Returns an object consisting of an error code, the list of request elements in standardized format, and, if applicable, an associated message. +* ``Object exportResults(request)``: launches the export of a command result. The request element (containing in particular the output directory, the remark attribute, the status, etc.) is passed as a parameter (complete object from the REQUESTS table). Returns an error code and, if applicable, an associated message. + +The following chapters list the return values of these functions for each of the connectors available in the first version of the solution. Note that plugins must implement multilingual support in the return values of functions. The descriptions below correspond to the English version. + +#### Easy SDI v4 connector + +| Function | Return | +| --- | --- | +| ``getCode`` | easysdiv4 | +| ``getLabel`` | EasySdi V4 | +| ``getDescription`` | *Connecteur pour la solution EasySdi V4* | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘url’, ‘label’ : ‘URL du service’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘login’, ‘label’ : ‘Login distant’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 50},
 {‘code’ : ‘pass’, ‘label’ : ‘Mot de passe’, ‘type’ : ‘pass’, ‘req’ : ‘true’, ‘maxlength’ : 50}
]

Below is an example of the corresponding settings (connector_params attribute in the CONNECTOR table):

{
  ‘url’ : ‘http://www.asitvd.ch/extractpoint/rest’,
  ‘login’ : ‘fournisseur12445’,
  ‘pass’ : ‘motdepasse’
}| + +### Task Plugin Catalog + +#### Introduction + +The task catalog contains all of the task plugins available for the system. They must all implement the following interface functions: + +* ``String getCode``: returns the plugin code, which must be unique in the catalog. +* ``String getLabel``: returns the task plugin name. +* ``String getDescription``: returns formatted text describing the plugin +* ``String getHelp``: returns formatted text to guide the user in entering parameters +* ``String getPictoClass``: returns the class to be used for the plugin pictogram +* ``String getParams``: returns the parameters specific to the task plugin, in json format. Each parameter is defined by a unique code, a label, a type, whether it is mandatory or not, and any other tags specific to the type. The types available for task plugins are as follows: + - **text**: character string to be entered via a text box. The maximum length of the string is specified in an additional attribute. + - **multitext**: character string to be entered via a multi-line text box. The maximum length of the string is specified in an additional attribute. Parameters of this type support dynamic strings using the same keywords as the text type. + - **pass**: character string to be entered via an obfuscated text field. The maximum length of the string is specified in an additional attribute. + - **list**: list of choices to be entered via a drop-down list. The available options are specified in an additional attribute (values separated by |) + - **boolean**: boolean to be entered via a checkbox + - **email**: email address or list of email addresses separated by a semicolon and/or a comma. + - **list_msgs** (for new validation plugin): multiple choice list filled in by the Extract core with the validation messages defined at the Extract instance level. + - **numeric** (for FME Desktop plugin): Numeric value. Min, max, and step are specified in three additional attributes. +* ``Object new(params)``: creates an instance of the plugin by passing the user-defined parameters. Returns the created object. +* ``Object execute(request)``: executes the task plugin according to the request element passed as parameters. Returns an object of the type: + * **State**: status among success, error, standby + * **Code**: if applicable, return code + * **Message**: message associated with the state (stored in last_msg for info). This can be a raw message from a tool underlying the plugin (e.g. FME), an informative message explaining why the plugin is on standby, etc. + * **Request**: request element passed as input and possibly modified by the plugin + +The following chapters list the return values of these functions for each of the plugins available in the first version of the solution. Note that plugins must implement multilingual support in the return values of functions (except, of course, for error/success messages from underlying tools). The descriptions below correspond to the English version. + +#### File Archiving Plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | ARCHIVE | +| ``getLabel`` | Archivage fichiers | +| ``getDescription`` | Copie des fichiers dans un répertoire local ou réseau | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘path’, ‘label’ : ‘Chemin d’archivage’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘login’, ‘label’ : ‘Login’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 255},
 {‘code’ : ‘pass’, ‘label’ : ‘Mot de passe’, ‘type’ : ‘pass’, ‘req’ : ‘false’, ‘maxlength’ : 255},
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
  ‘path’ : ‘/var/extraction/{no_commande}-{date}/{code-produit}/’
}| + +Note: This plugin supports dynamic strings in its settings according to the following keywords: + +* {orderLabel} +* {orderGuid} +* {productGuid} +* {productLabel} +* {startDate} +* {organism} +* {client} + +#### FME 2016 Extraction Plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | FME2016 | +| ``getLabel`` | Extraction FME 2016 | +| ``getDescription`` | Workbench ou script FME Desktop 2016 | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘path’, ‘label’ : ‘Chemin du fichier workbench’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘pathFME’, ‘label’ : ‘Chemin du programme FME’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘instances’, ‘label’ : ‘Nombre de fme.exe lancés par ce workspace’, ‘type’ : ‘numeric’, ‘req’ : ‘true’, ‘min’ : 1, ‘step’ : 1, ‘max : 8}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘path’ : ‘/var/FME/exports_scripts/gaz_prod.fmw’,
 ‘pathFME’: ‘/usr/share/fme-destop-2016/fme’,
 ‘instances’: 3
}| + +Please note that for the FME 2016 plugin to work properly, the scripts used must expose the following parameters: + + +* **opt_responseformat**: FME parameter, managed internally by the plugin because it expects a return in json (html by default) +* **Perimeter**: perimeter in WKT format, in WGS84, provided by the import connector. For example: +POLYGON((6.295363612819898 46.32659377501397,6.288481575413728 46.65130021757865,7.055293317619941 46. 656470518789625,7.057591178865101 46.33173342736276,6.295363612819898 46.32659377501397)) + +* **Parameters**: dynamic parameters provided in JSON format by the import connector. For example: +{“FORMAT”:“GDB”, “SELECTION” : “PASS_THROUGH”, ‘PROJECTION’:“SWITZERLAND”} +* **Product**: GUID of the product to be exported, provided by the import connector. For example: +a049fecb-30d9-9124-ed41-068b566a0855 + +#### FME Server Extraction Plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | FMESERVER | +| ``getLabel`` | Extraction FME Server | +| ``getDescription`` | Job FME Server API 1.2 | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘url’, ‘label’ : ‘Url du service’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘login’, ‘label’ : ‘Login distant’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 50},
 {‘code’ : ‘pass’, ‘label’ : ‘Mot de passe’, ‘type’ : ‘pass’, ‘req’ : ‘false’, ‘maxlength’ : 50}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘url’ : ‘http://localhost/FME/exports_scripts/gaz_prod’,
 ‘login’ : ‘admin’,
 ‘pass’ : ‘password01’
}| + +Please note that for the FME Server plugin to work properly, the scripts used must expose the following parameters: + + +* **opt_responseformat**: FME parameter, managed internally by the plugin because it expects a return in json (html by default) +* **Perimeter**: perimeter in WKT format, in WGS84, provided by the import connector. For example: +POLYGON((6.295363612819898 46.32659377501397,6.288481575413728 46.65130021757865,7.055293317619941 46. 656470518789625,7.057591178865101 46.33173342736276,6.295363612819898 46.32659377501397)) + +* **Parameters**: dynamic parameters provided in JSON format by the import connector. For example: +{“FORMAT”:“GDB”, “SELECTION” : “PASS_THROUGH”, ‘PROJECTION’:“SWITZERLAND”} +* **Product**: GUID of the product to be exported, provided by the import connector. For example: +a049fecb-30d9-9124-ed41-068b566a0855 + +#### Fixed remark plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | REMARK | +| ``getLabel`` | Remarque fixe | +| ``getDescription`` | Ajoute une remarque pré-définie à l’élément de requête | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘remark’, ‘label’ : ‘Remarque’, ‘type’ : ‘multitext’, ‘req’ : ‘true’, ‘maxlength’ : 5000},
 {‘code’ : ‘overwrite’, ‘label’ : ‘Ecraser la remarque existante’, ‘type’ : ‘boolean’}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘remark’ : ‘Les positions exactes des conduits doivent être vérifiées par sondage.\nVoir nos conditions sur http://laboite.ch/exploitation’,
 ‘overwrite’ : true
}| + +#### Operator validation plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | VALIDATION | +| ``getLabel`` | Validation opérateur | +| ``getDescription`` | Demande à un opérateur de valider l’élément | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {’code’: ‘valid_msgs’, ‘label’: ‘Modèles de remarque pour la validation’, ‘type’ : ‘list_msgs’, ‘req’ : false},
 {’code’: ‘reject_msgs’, ‘label’: ‘Modèles de remarque pour l’annulation’, ‘type’ : ‘list_msgs’, ‘req’ : false}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘valid_msgs : ‘312,313,314’,
 ‘reject_msgs’ : ‘313,315’
}

*Note: validation must be performed by one of the operators assigned to the process. It is therefore not necessary to specify an operator at the plugin level itself.*| + +#### Cancelation Plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | CANCELATION | +| ``getLabel`` | Annulation | +| ``getDescription`` | Annule le traitement et définit la remarque pour le client | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘reason’, ‘label’ : ‘Raison de l’annulation’, ‘type’ : ‘multitext’, ‘req’ : ‘true’, ‘maxlength’ : 5000}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘remark’ : ‘Désolé, nous n'avons pas de donnée sur cette zone’
}| + +#### Mail notification plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | NOTIFICATION | +| ``getLabel`` | Notification email | +| ``getDescription`` | Envoie une notification email personnalisable | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘emails’, ‘label’ : ‘Emails de destinataires, séparés par des points-virgules’, ‘type’ : ‘email’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘subject’, ‘label’ : ‘Objet du message’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘body’, ‘label’ : ‘Contenu du message’, ‘type’ : ‘multitext’, ‘req’ : ‘true’, ‘maxlength’ : 5000}
]| + +Note: This plugin supports dynamic strings in its settings according to the following keywords: + +* {orderLabel} +* {orderGuid} +* {productGuid} +* {productLabel} +* {startDate} +* {organism} +* {client} + +#### QGIS Server Atlas Plugin + +| Function | Return | +| --- | --- | +| ``getCode`` | QGISSERVERATLAS | +| ``getLabel`` | Impression Atlas QGIS Server | +| ``getDescription`` | Imprime les pages d’un Atlas QGIS Server touchées par le périmètre, en PDF | +| ``getHelp`` | *Se référer directement au connecteur* | +| ``getPictoClass`` | *Se référer directement au connecteur* | +| ``getParams`` | [
 {‘code’ : ‘url’, ‘label’ : ‘Url de base QGIS Server, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘template’, ‘label’ : ‘Template à utiliser pour le layout’, ‘type’ : ‘text’, ‘req’ : ‘true’, ‘maxlength’ : 255},
 {‘code’ : ‘path’, ‘label’ : ‘Chemin du projet QGIS’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 255},
 {‘code’ : ‘login’, ‘label’ : ‘Login distant’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 50},
 {‘code’ : ‘pass’, ‘label’ : ‘Mot de passe’, ‘type’ : ‘pass’, ‘req’ : ‘false’, ‘maxlength’ : 50},
 {‘code’ : ‘layers’, ‘label’ : ‘Layers à utiliser (séparation par des virgules)’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 255},
 {‘code’ : ‘crs’, ‘label’ : ‘CRS (Code EPSG)’, ‘type’ : ‘text’, ‘req’ : ‘false’, ‘maxlength’ : 50}
]

Below is an example of the corresponding settings (task_params attribute in the TASK table):

{
 ‘url’ : ‘https://monserveur.lan/qgis-server',
 ‘template’ : ‘myplan’,
 ‘path’ : ‘/etc/qgis-server/maps/world/world.qgs’,
 ‘login’ : ‘admin’,
 ‘pass’ : ‘password01’,
 ‘layers’ : ‘places,airports,water’,
 ‘crs’ : ‘EPSG:2056’
}| + +## Techincal details + +### Rules Management + +The solution implements a syntactic language designed to interpret incoming requests and associate them with specific processes. Users can define rules consisting of one or more criteria, separated by a Boolean operator: + +``criterion1 and/or criterion2 and/or criterion3 and/or … and/or criterionN`` + +Each criterion follows the following syntax: + +`` `` + +For example: ``productguid == “708e932b-81c3-2ce4-b907-ed07e61ac5f9”`` + +#### Properties + +The usable properties are listed in the table below. For each property, the type of operator that can be used (attribute/geographic) and the type of value to be entered (text/numeric/geometry) are indicated. + +| Property | DB Field | Description/Remark | Operator Type | Value Type | +| --- | --- | --- | --- | --- | +| ``orderlabel`` | p_orderlabel | p_orderlabel of the request element | Attribute | Text | +| ``orderguid`` | p_orderguid | p_orderguid of the request element | Attribute | Text | +| ``productguid`` | p_product_guid | p_product_guid of the request element | Attribute | Text | +| ``productlabel`` | p_productlabel | p_productlabel of the request element | Attribute | Text | +| ``organism`` | p_organism | p_organism of the request element | Attribute | Text | +| ``client`` | p_client | p_client of the request element | Attribute | Text | +| ``tiers`` | p_tiers | p_tiers of the request element | Attribute | Text | +| ``tiersguid`` | p_tiersguid | p_tiersguid of the request element | Attribute | Text | +| ``perimeter`` | p_perimeter | Perimeter of the request element, i.e., the export polygon in WKT format | Geographic | Geometry | +| ``surface`` | p_surface | p_surface of the request element | Attribute | Numeric | +| ``parameters.xxx`` | p_parameters.xxx | Dynamic property of the request element. Replace xxx with the desired property (e.g. format, projection, etc.) depending on the import connector. | Attribute | Depending on the type of dynamic property: text or numeric | + +#### Operators + +Two types of operators are possible: attribute-based and geographic, depending on the property used. + +The table below contains a list of the attribute-based operators that can be used. + +| Operator | Description/Remark | +| --- | --- | +| ``==`` | The property is equal to the value | +| ``!=`` | The property is different from the value | +| ``>`` | The property is greater than the value | +| ``<`` | The property is less than the value | +| ``>=`` | The property is greater than or equal to the value | +| ``<=`` | The property is less than or equal to the value | +| ``IN`` | The property is present in a list of values (specified in parentheses, separated by commas) | +| ``NOT IN`` | The property is not present in a list of values (specified in parentheses, separated by commas) | + +The table below contains a list of the geographic-based operators that can be used. + +| Operator | Description/Remark | +| --- | --- | +| ``intersects`` | The geometry contained in the property intersects the geometry passed as a value | +| ``contains`` | The geometry contains the geometry passed as a value | +| ``disjoint`` | The geometry is completely disjoint from the geometry passed as a value | +| ``equals`` | The geometry is equal to the geometry passed as a value | +| ``within`` | The geometry is included in the geometry passed as a value | + +#### Values + +Depending on the property and operator, values can be of three types. + +| Value Type | Description | Example | +| --- | --- | --- | +| ``Text`` | Text to be enclosed in quotation marks | "PDF" | +| ``Numeric`` | Number (integer or decimal) | 1234 | +| ``Geometry`` | Geometry converted to WKT format not enclosed in quotation marks | POLYGON((6.82 46.39,6.92 46.39,6.92 46.19,6.92 46.19,6.82 46.39))
LINESTRING(6.82 46.39, 6.92 46.39, 6.92 46.19)
POINT(6.82 46.39) | + +#### Examples + +Some typical examples of rules: +``` +orderlabel == “92445” AND perimeter intersects POLYGON((6.82 46.39,6.92 46.39,6.92 46.19,6.92 46.19,6.82 46.39)) +``` + +``` +orderlabel == “92445” AND parameters.format == “PDF" +``` + +### Authentication + +There are two types of users: + +* LOCAL: users whose Extract database is the authentication source. +* LDAP: users whose LDAP server is the authentication source. + +To log in, LDAP users must be part of an Extract Admin or Extract Operator group in the LDAP directory. + +If a user logs in to Extract with LDAP and is not present in the local database, they are created on the fly. + +Each time an LDAP user logs in, their information is updated (name, email, role). + +![login-diagram](../assets/dev-guide/login-diagram.jpg){width="900"} + +### Google Authenticator 2FA operation + +#### 2FA-specific connection flow + +![2fa-connection-flow](../assets/dev-guide/2fa-connection-flow.png){width="700"} + +#### 2FA parameters modification (for your own account) + +![2fa-parameters-own](../assets/dev-guide/2fa-parameters-own.png){width="500"} + +#### 2FA parameters modification (of a user by an administrator) + +![2fa-parameters-admin](../assets/dev-guide/2fa-parameters-admin.png){width="400"} + +### LDAP synchronization + +It is possible to launch an LDAP synchronization (if LDAP authentication is enabled). This synchronization performs the following operations: + +* **Creation** of LDAP Users (accounts existing in the LDAP directory, not yet in the Extract database) +* **Updating** the properties of existing LDAP users already in the Extract database (full name, email, role) +* **Deactivating** LDAP users from the Extract database if they are deactivated or deleted from the LDAP directory + +![ldap-synchro](../assets/dev-guide/ldap-synchro.jpg){width="800"} + +
+
+
+
+
\ No newline at end of file diff --git a/docs/features/development.md b/docs/features/development.md new file mode 100644 index 00000000..7e1c4bdb --- /dev/null +++ b/docs/features/development.md @@ -0,0 +1,90 @@ +--- +title: Development +--- + +## Packaging +Before packaging, you need to install the JS dependencies (i.e. node modules). Easiest approach is to use Docker: +```bash +docker compose up -d +``` +Install dependencies: +```bash +docker compose exec node bash # open a bash session inside the node docker container + +cd extract +./install_dependencies.sh # run the script to install dependencies +``` + +Stop docker compose and generate a WAR of the application, run the following command: + +``` bash +mvn clean package +``` + +Package while skipping tests (compile and run) +```powershell +mvn clean package "-Dmaven.test.skip" # add double quotes on windows powershell +``` + +In addition to the WAR file, the delivery archive of a new version +contains: + +- The latest version of the database update script + (`sql/update_db.sql`) +- The FME sample script (`fme/`) + +## Tests + +Unit tests can be run independently of packaging with the command: + +``` bash +mvn -q test -Punit-tests --batch-mode --fail-at-end +``` + +To run integration tests: + +``` bash +mvn -q verify -Pintegration-tests --batch-mode +``` + +For functional tests (requires the application to be running on +localhost port 8080): + +``` bash +mvn -q verify -Pfunctional-tests --batch-mode +``` + +## Documentation (edit and build) + +If you would like to set up a local development environment to edit the current documentation, perform the following steps. You just need an installation of python 3 on your machine. + +1. Create and activate a python virtual environment (not mandatory but recommended) + ```powershell + python -m venv .venv + + Set-ExecutionPolicy Unrestricted -Scope Process # only for windows + .venv\Scripts\Activate.ps1 + ``` + +2. Install the necessary packages + ```powershell + pip install mkdocs-material mkdocs-exclude-search + ``` + +3. Build and serve the documentation on localhost + ``` + mkdocs serve + ``` +4. Visit http://localhost:8000 + +You can then edit the markdown files under ``./docs`` following the syntax from [mkdocs-material](https://squidfunk.github.io/mkdocs-material/reference/){target="_blank"}. The browser will automatically reload when changes are made to the ``./docs`` folder. + +To publish your modification, send them (only the markdown files) through a pull request on GitHub. When your pull request will be merged to the main branch, the documentation will automatically be build and publish to the documentation website. + +
+
+
+
+ +
+ diff --git a/docs/features/image.png b/docs/features/image.png new file mode 100644 index 00000000..96901430 Binary files /dev/null and b/docs/features/image.png differ diff --git a/docs/features/user-guide.md b/docs/features/user-guide.md new file mode 100644 index 00000000..f313aa04 --- /dev/null +++ b/docs/features/user-guide.md @@ -0,0 +1,288 @@ + + +## Login + +![Login](../assets/user-guide/login.png){width="500"} + +Access to the application is secured and requires authentication via either: + +* A user account created by the administrator (see [User Administration](./admin-guide.md/#user-administration)), or ++ An account linked to the corporate LDAP directory (see [LDAP authentication](./admin-guide.md/#ldap-authentication)). + +Usernames are **not** case-sensitive, but passwords **are** case-sensitive. + +## Reset password + +If you've forgotten your password, click the ``Mot de passe oublié ?`` link on the login screen. This will guide you through a two-step password reset process. + +!!! Note + This option is only available for local user accounts. If you're using LDAP authentication, please reset your password through your corporate directory. + +### 1. Request reset code + +![Reset password](../assets/user-guide/request-reset-code.png){width="500"} + +Enter the email address associated with your user profile — the same one used to receive Extract notifications — and click ``Demander la réinitialisation``. A message containing a verification code will be sent to that address. Enter the code on the next page to proceed. + +### 2. Reset + +![Reset password](../assets/user-guide/reset-password.png){width="500"} + +Enter the code received by e-mail, then enter the desired password and confirm. The password must contain at least 8 characters. Then click on ``Réinitialiser le mot de passe``. If successful, you will be redirected to the login page with a message telling you that your password has been reset. You can now log in with your new password. + +**Reset code validity** + +The reset code remains valid only under the following conditions: + +* **Same session usage:** The code must be used within the same browser session in which it was requested. Do not close or refresh the browser window after requesting the code. +* **No login with old password:** If you log in using your current (old) password after requesting the reset code, the code becomes invalid. +* **Time limit:** The code will expire automatically if not used within 20 minutes of being issued. + +If you're unable to reset your password or don't remember the email address associated with your account, please contact your administrator for assistance. + +## Two-factor authentication + +![Reset password](../assets/user-guide/2FA.png){width="500"} + +Once 2FA is successfully activated and set up, users will be prompted to enter the verification code from their Google Authenticator app after logging in with their password. When checking the option ``Faire confiance à cet appreil pendant 30 jours`` the 2FA code is no longer asked on the current device for 30 days. However, this does not enable automatic login, username and password will still be required. + +**Recovery code** + +If a user has lost access to Google Authenticator or accidentally deleted the associated Extract account entry, they can still access the application by clicking on the ``Utiliser un code de récupération`` link. They will then need to enter one of the recovery codes provided during the initial setup (see [Two-Factor Authentication Setup](#two-factor-authentication-setup)). + +Important: + +* Each recovery code can only be used once. +* After logging in, it is recommended to reset two-factor authentication via the user administration section. + +## Current user administration +![User Administration Menu](../assets/user-guide/user-admin-menu.png){width="150"} + +On the right side of the menu bar, you’ll find options related to the currently logged-in user. + +By clicking on the user’s name, the following settings can be managed: + +**For all users:** + +* Enable or disable two-factor authentication +* Choose whether to receive email notifications +* Change the language preference (if Extract is in multilingual mode) + +**For local (non-LDAP) users only:** + +* Update full name +* Change username/login (must be unique within the application) +* Change email address (must be unique within the application) +* Change password + +Additionally, all users can log out by clicking the logout icon. + +### Two-Factor Authentication Setup + +When the user selects the drop-down button at the top of the page to enable or reset two-factor authentication, they are redirected to a registration page : + +![2FA setup](../assets/user-guide/2FA-setup.png){width="500"} + +1. **Scan the QR Code** + + Use Google Authenticator or any compatible app to scan the QR code displayed on screen. Alternatively, you can enter the code manually into the app. + +2. **Enter the Verification Code** + + Once the code is generated by your app, enter it in the field provided and click ``Continuer``. + If the code is correct, two-factor authentication (2FA) will be successfully enabled for your account. + +3. **Save Recovery Codes** + + A screen will then display single-use recovery codes. These can be used to access Extract in case you lose access to Google Authenticator. + + * You can download or print these codes. + * **Important**: These codes will not be shown again, so store them in a secure location. + * **Warning**: Recovery codes allow access to your account without the second factor. Treat them as confidential. + +If two-factor authentication is not mandatory for this user, you may choose to cancel the registration process. In this case, the account's 2FA status will remain unchanged: + +* If 2FA was previously disabled, it stays disabled. +* If it was already enabled, it remains active with the previously configured setup. + +## Requests list + +The home page provides a real-time overview of the application's activity, including: + +* Status of active connectors +* Requests in progress +* History of completed requests + +**Administrators** can view all requests. **Regular users** only see requests associated with processes where they are assigned as operators. + +The page updates automatically based on the refresh interval defined in the application settings (see [Orchestration Section](./admin-guide.md/#orchestration)). By default, this interval is set to 20 seconds. + +### Status of active connectors + +![Active Connectors](../assets/user-guide/active-connectors.png){width="250"} + +In the top-right corner of the page, chips show active connectors grouped by their current state: + +* Flashing red chip: Indicates connectors that have encountered errors and exceeded the maximum number of retry attempts. +* Green chip: Represents connectors whose last import request was successful (regardless of whether request was imported or not), or those still within the allowed retry limit after an error. + +If no connectors fall under a particular status, the corresponding chip is not shown. For operators, only error messages may be visible. + +Clicking on a chip opens a dropdown listing the relevant connectors. Hovering over a connector displays a tooltip with the time of the last import and any associated error message. Administrators can click a connector to view and edit its settings (see [Connectors detail](./admin-guide.md/#connectors)). + +### Requests in progress + +![Requests in progress](../assets/user-guide/requests-in-progress.png){width="1000"} + +The first table contains requests that have been imported but not yet processed. The background color of the line indicates the status: + +* Green : Processing is running at expected +* Orange : Processing is paused pending operator validation +* Red : Processing stopped due to an error + +Clicking anywhere on a row opens the request details (refer to [Request details](#request-details)). To open the request in a new tab or window, click directly on the request number. + +### Completed requests + +![Requests completed](../assets/user-guide/requests-completed.png){width="1000"} + +The second table lists requests that have been exported to their origin server. Due to its potentially large volume, it includes a filtering feature based on the following criteria: + +* Text search (partial and case insensitive) in the following fields: + * Request label + * Product label + * Customer name + * Third party name + +* Connector through which the request was imported +* Associated processing +* Request reception date using two date fields to define a range + +All search criteria are optional. When multiple criteria are specified, they are combined using the **AND** operator. + +The search is executed only when you click the green **search button** (magnifying glass icon). Alternatively, you can start the search by pressing **Enter** while focused on a text or date input field. + + +## Request details + +### Processing status + +![Requests details](../assets/user-guide/request-details.png){width="1000"} + +The stages in the processing of the request are displayed at the top of the page, along with their status. The color code is as follows: + +| Color | Status | +| ----------- | ------------------------------------ | +| **Light green** | The task has been successfully completed | +| **Dark green** | The task is running | +| **Orange** | The task awaits validation by an operator | +| **Red** | An error has occurred while running the task | +| **Grey** | Task has not yet been executed or has been skipped | + + +### Actions +If processing is interrupted by an error or a validation request, an action box is displayed so that you can decide how to proceed. + +#### Validation + +![Requests validation](../assets/user-guide/requests-validation.png){width="1000"} + +When a task requires the processing status to be validated by an operator in order to continue, the following actions are available : + +* **Validate**: The operator determines that the processing is in the expected state and confirms that processing can continue normally. For this action, entering a comment is optional. If a comment has already been defined at a previous stage of processing, it is displayed in the text box. It is then possible to modify, replace, or even delete it. If at least one comment template has been defined for the validation of this processing, a drop-down list appears below the comment field. If an item from this list is selected, the corresponding text replaces the current validation comment. This text can be modified before validation. + +* **Cancel**: The operator determines that the process cannot be successfully completed. The request will then be rejected and exported without result. No other tasks will be performed. In this case, a comment **must** be entered to explain to the customer why their request was rejected. If a comment has already been defined at a previous stage of the process, it will be displayed in the text box. It is then possible to modify, replace, or even delete it. If at least one comment template has been defined for canceling this processing, a drop-down list appears below the comment field. If an item from this list is selected, the corresponding text replaces the current cancellation comment. It is possible to modify this text before canceling. + +**Restart**: Deletes all files generated so far for this request and runs the process from the beginning. + +#### Task error + +![Request error](../assets/user-guide/requests-error.png){width="1000"} + +When the last task ended with an error, the following actions are available. + +* **Retry**: Runs the task that caused the error again. +* **Restart**: Deletes all files generated so far for this request and runs the process from the beginning. +* **Continue**: Ignores the current task and continues processing with the next task. If processing is complete, the result of the request is exported. +* **Cancel**: The operator determines that the processing cannot be successfully completed. The request will then be rejected and exported without a result. No other tasks will be executed. A comment must be entered to explain to the customer why their request was rejected. + +#### No processing associated + +!!! Info + Only administrators can view this error status + +![No processing associated](../assets/user-guide/requests-no-processing.png){width="1000"} + +If the request could not be associated with any processing based on the rules defined for the connector with which it was imported, the following actions are available. + +* **Retry**: Restarts the process of associating the request with a process. This function is particularly useful when the connector rules have been modified to handle this specific case. +* **Cancel**: The administrator determines that the request cannot be processed successfully. It will then be rejected and exported without result. A comment must be entered to explain to the customer why their request was rejected. + +#### Import error + +!!! Info + Only administrators can view this error status + +![Import error](../assets/user-guide/requests-import-error.png){width="1000"} + +If an error preventing the request from being processed is detected during import, only one action is possible: + +* **Cancel**: The request will be rejected and exported without result. A comment must be entered to explain to the customer why their request was rejected. + +#### Export error + +![Export error](../assets/user-guide/requests-export-error.png){width="1000"} + +If the result of the request could not be sent to the original server, following actions are possible: + +* **Retry export**: Attempts to send the result of the request again as is. This function is particularly useful in the event of a temporary error, such as server unavailability. +* **Restart**: Deletes all files generated so far for this request and runs the process from the beginning. +* **Cancel**: The administrator determines that the request cannot be processed successfully. It will then be rejected and exported without result. A comment must be entered to explain to the customer why their request was rejected. + +### Request information + +#### Customer response + +![Request Customer Response](../assets/user-guide/requests-customer-response.png){width="1000"} + +This section displays the information that will be returned to the customer in the current processing state. + +You can download the generated files by clicking on them. If more than one file is available, there is a button that allows you to download them all in a ZIP file. + +If the request has an error or is awaiting validation, files can be added to those that will be returned to the user using the ``Ajouter des fichiers`` button. Files can also be deleted by clicking on the red icon next to the file. **Warning**: Once the deletion confirmation has been accepted, this action cannot be undone. + +This section may not be displayed if no comments are defined, if the request's output directory does not contain any files, and if the request is not in a state that allows the files in the output directory to be modified. + +#### Customer request + +![Request Customer Requests](../assets/user-guide/requests-customer-request.png){width="1000"} + +These are the parameters of the request as submitted by the customer. + +If the connector settings ``URL de détail de commande`` is set, a link in the panel header opens the original order details in a new window. + +On the map, two buttons allow you to download the order's footprint polygon in KML format (WGS84 coordinates) or DXF format (in the map's coordinate system). + +#### Processing history + +![Request Processing History](../assets/user-guide/requests-processing-history.png){width="1000"} + +This table tracks all stages of request processing, including any new task executions, processing restarts, or ignored tasks. + +#### Administration panel +!!! Info + Only administrators can view this error status + +![Request Admin Panel](../assets/user-guide/requests-admin-panel.png){width="1000"} + +This panel is collapsed by default and can be expanded by clicking on its title. + +It contains a button that allows you to delete a request. Processing will be immediately interrupted and no results will be returned to the client (not even a rejected request). This tool is intended as a last resort and should not be used during normal operation. + +In addition, it contains useful information for resolving potential problems, e.g. with FME scripts. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/getting-started/configure.md b/docs/getting-started/configure.md new file mode 100644 index 00000000..11eb6d76 --- /dev/null +++ b/docs/getting-started/configure.md @@ -0,0 +1,221 @@ +--- +title: Configure +--- + +## Application settings +!!! Info + The global application settings are found in the ``application.settings`` file at the root of the ``WEB-INF\classes`` folder. Changes are taken into account when the Tomcat Extract application is restarted. + +**application.external.url**
+URL to access the application. This value is used to generate links in notification e-mails. + +**database.encryption.secret**
+Random 32-character ISO-8859-1 string used as key to encrypt certain database values. + +**database.encryption.salt**
+Random 32-character ISO-8859-1 string used as "salt" to encrypt certain database values. + +**email.templates.cache**
+Boolean value defining whether email notification templates should be cached. + +**email.templates.encoding**
+Encoding used by email notification templates + +**email.templates.path**
+Relative path to directory containing email notification templates. If the path is relative to the +``WEB-INF/classes`` directory, it must be prefixed with ``classpath:/``. + +**extract.i18n.language**
+Language ISO code used by the application. A single code means the application is in monolingual mode. Several codes separated by comma means the application is in multilingual mode (language set in users preferences). In multilingual mode, the first one acts as the default language (e.g. for fallback, creation of first user, etc.). Here are a couple of examples: + +* `extract.i18n.language=fr` Extract is in french +* `extract.i18n.language=de` Extract is in german +* `extract.i18n.language=fr,de` Extract is in french and german, french being the default language + +All codes must have corresponding application's language files (usually `messages.properties`, `messages.js` and `*Help.html`) under ``WEB-INF/classes``, `WEB-INF/classes/static/lang` and in all plugin `lang` directory. If this is not the case, you can add a new language (see [Add a new language section](../customize/#add-a-new-language)). + +By default, Extract supports french (`fr`) and german (`de`). + +**check.authenticity**
+Boolean value defining if the signature of executable binaries run by Extract are checked or not. By default, the value is true `check.authenticity=true` but you can set it on `false` (it's known to cause antivirus alerts when set to `true`). + +**http.proxyHost**
+Server name to be used as proxy for external connections. This property can be omitted if no proxy is used. + +**http.proxyPassword**
+Password used to log in to the proxy server. This property can be omitted if no proxy is used or if it does not require authentication. + +**http.proxyPort**
+HTTP port of proxy server. This property can be omitted if no proxy is used. + +**http.proxyUser**
+Username used to log in to the proxy server. This property can be omitted if no proxy is used or if it does not require authentication. + +**ldap.attributes.fullname**
+LDAP attribute name containing the user's full name (e.g. John Doe). + +**ldap.attributes.login**
+LDAP attribute name containing the user's identifier, to be typed into the login form ( e.g. jdoe). + +**ldap.attributes.mail**
+LDAP attribute name containing the user's e-mail address. + +**ldap.user.objectclass**
+LDAP object class to which users must belong. + +**logging.config**
+Path to file containing application log configuration. If the path is relative to the application's ``WEB-INF/classes`` directory, it must be prefixed with ``classpath:/``. + +**spring.batch.jdbc.initialize-schema**
+String defining whether the Spring Batch initializer should be started. This value must be set to ``never`` in order to avoid creating unused tables in the database. + +**spring.batch.job.enabled**
+Boolean value defining whether scheduled tasks should be started automatically by Spring Batch. Must be set to ``false``. (These tasks are started by the application.) + +**spring.datasource.driver-class-name**
+Qualified name of the database driver class to be used. + +**spring.datasource.password**
+Database login password. + +**spring.datasource.url**
+JDBC database connection string. + +**spring.datasource.username**
+Database login username + +**spring.servlet.multipart.max-file-size**
+Maximum size of each file that can be uploaded to the application. If you receive a 500 error when adding a large file to a request folder, it may be useful to increase this value, remembering to also increase ``spring.servlet.multipart.max-request-size`` if necessary. + +**spring.servlet.multipart.max-request-size**
+Maximum size of an upload request. This value must be greater than or equal to that of ``spring.servlet.multipart.max-file-size``. + +**spring.jpa.database-platform**
+Qualified name of the dialect class to be used to communicate with the database. + +**spring.jpa.hibernate.ddl-auto**
+Value defining the application's behaviour with regard to database architecture. By default, this property is set to ``update``, which allows tables to be generated automatically when the application is first started. Subsequently, you can speed up application start-up by setting this value to ``none``. In this case, if a table or column has been accidentally deleted, it will not be automatically recreated, which may lead to operating problems. It is also recommended to set this value back to ``update`` when installing a new version of Extract, so that any database modifications can be applied. + +**spring.jpa.properties.hibernate.id.new_generator_mappings**
+Boolean value defining whether to use the latest ID generator version for database entries. Must be set to ``true``. + +**spring.jpa.show-sql**
+Boolean value defining whether to display SQL generated by the application (e.g. in logs). We strongly advise you to leave this value at ``false``. + +**spring.thymeleaf.cache**
+Boolean value defining whether the application's pages templates should be cached. + +**spring.thymeleaf.enabled**
+Boolean value defining whether to enable the templating engine. Must be set to ``true``. + +**spring.thymeleaf.encoding**
+Encoding used by the application's pages templates. + +**spring.thymeleaf.mode**
+Template type used for application pages. Must be set to ``HTML``. + +**spring.thymeleaf.prefix**
+Path to directory containing application page templates. If the path is relative to the application's ``WEB-INF/classe``s directory, it must be prefixed with ``classpath:/``. + +**spring.thymeleaf.suffix**
+Application page templates extension. + +**spring.thymeleaf.template-resolver-order**
+Templating engine priority for resolving application page templates. Must be set to ``1``. + +**table.page.size**
+Number of lines to display in tables using pagination. + +## Logging +!!! Info + The logging application settings are found in the ``logback-spring.xml`` file at the root of the ``WEB-INF\classes`` folder. They define what information should be written to the application logs. Changes are taken into account when the Tomcat Extract application is restarted. + +The setup takes place in two steps: + +1. The setting of an ``appender``, i.e. a location to which to send the information to be logged, and the format of this information. This can be a file, a database, a console, etc. + +2. The setting of a ``logger``. A ``logger`` serves as the connection between a specific part of an application and one or more predefined ``appenders``. A ``logger`` can simultaneously use multiple ``appenders``, and a single ``appender`` can be shared by multiple ``loggers``. The ``logger`` is identified by a name, corresponding to the package from which the class messages originate, encompassing any of its sub-packages. Additionally, the ``logger`` specifies a minimum level of messages to be logged. The root ``logger`` provides default logging behavior for all application classes, including dependencies. + +For more detailed information, please refer to the [Logback help](https://logback.qos.ch/manual/configuration.html){target="_blank"}. + +## Tomcat user access rights +Since Extract is a Tomcat application, it relies on the user running the Tomcat service to access various resources (network shares, local folders, etc.). It is therefore recommended to run the Tomcat service with an account having access to the necessary resources. + +### Changing the Tomcat user (Windows) + +1. Open the Tomcat monitor (``tomcat8w.exe``, or ``Start Menu > All Programs > Apache Tomcat > Configure Tomcat``) +2. Go to the ``Log On`` tab +3. Check the ``This account`` option +4. Edit the desired account information +5. Click on OK +6. Restart the Tomcat service + +### Changing the Tomcat user (Linux) + +1. Edit the ``/etc/systemd/system/tomcat9.service`` file +2. In the ``Service`` category, change the values of the ``User`` and ``Group`` keys to the user's name and group respectively. +3. Save the file +4. Restart the Tomcat service + +!!! Warning + The name of the file, its location and even the entire procedure may differ depending on the distribution used, the version of Tomcat and its installation mode. + +## Tomcat default locale +In emails sent by Tomcat, dates are formatted according to Tomcat's locale. To set this, use the Duser.language and Duser.region parameters. For example : +```xml +-Duser.language=fr -Duser.region=CH +``` + +These parameters are defined in the same way as for heap size (see [Heap Size](#heap-size)). + +## Optimizing + +### Heap size +The FME Desktop scripts run by Extract can lead to heavy heap usage. If you are using this plugin, it is recommended that you set the starting size of the heap (parameter ``Xms``) to 1024 MB and the maximum size (parameter ``Xmx``) to at least 2048 MB. + +These parameters are set in the ``setenv.sh`` (Linux) or ``setenv.bat`` (Windows) file in Tomcat's bin directory. + +If Tomcat is running as a Windows service, these parameters are defined in the ``Java`` tab of the service properties. This tab can only be accessed via the Tomcat monitor (``tomcat8w.exe``, or ``Start Menu > All Programs > Apache Tomcat > Configure Tomcat``). It is not visible via the Windows services management console (services.msc). + +In all cases, Tomcat must be restarted for the changes to take effect. + +### Cache Size +During startup, the logs may report that an item failed to load due to insufficient cache size, for example: + +``` +24-May-2017 10:52:57.029 WARNING [https-jsse-nio-8443-exec-10] org.apache.catalina.webresources.Cache.getResource Unable to add the resource at [/WEB-INF/classes/static/bower_components/select2/docs/_sass/vendor/font-awesome/_variables.scss] to the cache for web application [/extract] because there was insufficient free space available after evicting expired cache entries - consider increasing the maximum size of the cache +``` + +In this case, you can increase the maximum cache size by modifying the ``context.xml`` file in Tomcat's ``conf`` directory : + +```xml + + + + + + + + +``` + +## Rule field size limitation (matching processes) +Since the creation of text fields is problematic when handled solely with JPA (so that the application is not tied to a particular database system), It was decided to define the field that stores the rules linking requests to their corresponding processing as a ``VARCHAR`` data type. Unfortunately, this means limiting the number of characters in each rule to 65,000. + +Using PostgreSQL, however, you can remove this limitation by manually changing the type of the rule field in the rules table. The SQL command to make this change is : + +```sql +ALTER TABLE rules ALTER COLUMN rule TYPE TEXT +``` + +Field values of existing records are preserved. + +!!! Warning + There is no guarantee that a similar modification will work with another DBMS. + + +
+
+
+
+
\ No newline at end of file diff --git a/docs/getting-started/customize.md b/docs/getting-started/customize.md new file mode 100644 index 00000000..914d2eea --- /dev/null +++ b/docs/getting-started/customize.md @@ -0,0 +1,172 @@ +--- +title: Customize +--- + +## Add a new language + +### For the main module + +You can set a new language for the Extract application user interface. If you want to translate the main module of Extract, do the following : + +1. Create a new `messages_{lang-code}.properties` files under `WEB-INF\classes` named after the two-character ISO code of the desired language (e.g. ``messages_it.properties``) +2. Create a new directory in ``WEB-INF\classes\static\lang``, named after the two-character ISO code of the desired language (e.g. ``it``) +2. Copy the contents of a previously defined language directory (``messages.js`` and any `html` files) +3. Translate all messages stored in these files + +The date selector used on the home page can also be translated. Its language files are located in the ``WEB-INF\classes\static\lib\bootstrap-datepicker\dist\locales`` directory. If the desired language is not yet defined : + +1. Copy an existing language file by changing the language code in its name +2. Translate all containing strings + +To apply the new language, update the ``extract.i18n.language`` property in the ``WEB-INF\classes\application.properties`` file, then restart the Tomcat application (see [Application settings](../configure/#application-settings)) + +### For plugins + +If you want to translate connector and extraction plugin as well, the easiest is to work on the code base of Extract and re-package the application : + +1. In all plugins, create a new directory under `{plugin-name/src/main/resources/plugins/lang}`, named after the two-character ISO code of the desired language (e.g. ``it``) +2. Copy the contents of a previously defined language directory (`messages.properties`, ``messages.js`` and any `html` files) +3. Translate all messages stored in these files +4. Re-package the application `mvn clean package "-Dmaven.test.skip"` and re-deploy it on tomcat + +### Fallback language + +* For all `messages.properties` and `messages.js` files, there is a fallback mechanism that if a key is missing in a localization file, it tries first to find it in the default language (the first one specified in the `extract.i18n.language` property), than in french. +* For all `*Help.html` files, there is no fallback mechanism. If the file is missing, the tooltip help will be empty. + +## Plugins +!!! Info + The type of connector providing the requests and the tasks that can be included in a process are managed by plugins that can be added, removed or updated independently of the application itself. + +### Installing or updating a plugin + +If a new connector type or a new task is available, simply place its JAR in the ``WEB-INF/classes/connectors`` (for connectors) or ``WEB-INF/classes/task_processors`` (for tasks) directory. When performing an upgrade, it is recommended to delete the JAR file of the previous version to prevent any potential conflicts. + +After making changes to the plugins, the Tomcat Extract application must be restarted. + +### Development of a new connector plugin +!!! Info + Documented sample code is available to help you create a new connector. You'll find it in the docs folder of the application repository : [https://github.com/asit-asso/extract/tree/master/docs/extract-connector-sample](https://github.com/asit-asso/extract/tree/master/docs/extract-connector-sample){target="_blank"}. + +* The project must be structured as a Java module, requiring the inclusion of a ``module-info.java`` file that declares its dependencies +* The new connector project must define a dependency on the ``extract-interface`` project +* The connector's main class must implement the ``IConnector`` interface +* The connector must declare a constructor without parameters +* The value returned by the ``getCode`` method must be unique across all Extract connectors +* The ``getParams`` method must return the connector's parameters as a JSON-formatted array. If the connector doesn't accept parameters, return an empty array (``[]``). +* Static files used by the connector (such as property files, language files, etc.) should be placed in the ``resources/connectors//`` subdirectory. +* A file named ``ch.asit_asso.extract.connectors.common.IConnector`` must be created in the ``resources/META-INF/services`` subdirectory. The file should contain the fully qualified name of the main class. + +### Development of a new task plugin +!!! Info + Documented sample code is available to help you create a new connector. You'll find it in the docs folder of the application repository : [https://github.com/asit-asso/extract/tree/master/docs/extract-task-sample](https://github.com/asit-asso/extract/tree/master/docs/extract-task-sample){target="_blank"}. + +* The project must be structured as a Java module, requiring the inclusion of a ``module-info.java`` file that declares its dependencies +* The new task project must define a dependency on the ``extract-interface`` project +* The task's main class must implement the ``ITaskProcessor`` interface +* The task plugin must declare a constructor without parameters +* The value returned by the ``getCode`` method must be unique across all Extract tasks +* The ``getParams`` method must return the plugin parameters as a JSON-formatted array. If the plugin doesn't accept parameters, return an empty array (``[]``). +* The ``getPicto`` method must return the name of the [FontAwesome](https://fontawesome.com/icons){target="_blank"} icon to be displayed in the plugin's title bar +* Static files used by the plugin (such as property files, language files, etc.) should be placed in the ``resources/plugin//`` subdirectory. +* A file named ``ch.asit_asso.extract.plugins.common.ITaskProcessor`` must be created in the ``resources/META-INF/services`` subdirectory. The file should contain the fully qualified name of the plugin main class. If this requirement is not met, the plugin will not be detected by Extract. +* The task plugin is allowed to modify specific attributes (see below) of the original request when returning a result : + + | Attribute | Purpose | + | ----------- | ------------------------------------ | + | `rejected` | Indicates whether Extract should process the request. If set to `true`, the processing is interrupted and the request is immediately exported back to the origin server without any result. In this case, the ``remark`` attribute must be provided to explain the reason for the interruption. | + | `remark` | An explanatory note included with the request to inform the requester about how their data is being processed. It's recommended to append this note to the original attribute content to preserve all existing information. | + +Changes to any other request attributes will be ignored. + +## Basemap + +To customize the map layers that display the order perimeter within the request details view, create a configuration file named ``map.custom.js`` and place it under the ``WEB-INF/classes/static/js/requestMap`` directory. + +This directory includes a ``map.custom.js.dist`` file that provides an example configuration, which you can use as a template to create your own setup. + +The following constraints apply : + +* The ``map.custom.js`` file defines an ``initializeMap`` function, which returns a ``Promise`` that resolves with the initialized map instance. +* The map's ``target`` property must be set to the string ``"orderMap"``. +* Each map layer must include a ``title`` attribute in its ``options``. This title will be used as a label in the layer list widget. +* Any layer representing a base map must include a ``type`` attribute in its ``options`` with the value ``"base"`` + +## System emails + +Emails are generated using HTML templates with the Thymeleaf 3.1 engine (learn more at [Thymeleaf Documentation](https://www.thymeleaf.org/doc/tutorials/3.1/usingthymeleaf.html){target="_blank"}). These templates can be found in the following directory: ``WEB-INF\classes\templates\email\html``. The relevant files are listed below: + +| Path | Usage | +| ----------- | ------------------------------------ | +| `layout\master` | General layout used by all system emails | +| `exportFailed.html` | Request's export failed | +| `importFailed.html` | Request's import failed | +| `invalidProductImported.html` | An imported request includes a product that cannot be processed | +| `passwordReset.html` | Send password reset code | +| `taskFailed.html` | Request processing interrupted due to an error | +| `taskStandby.html` | Request processing awaits an operator validation | +| `taskStandbyNotification.html` | Remind that a request is still awaiting validation | +| `unmatchedRequest.html` | An imported request does not met any rule defined for the connector | + +The message delivery follows the Model-View-Controller (MVC) architecture. The content displayed in the message depends on the data transmitted by Extract.. + +To support internationalization of the application, all displayed strings, including emails, are stored in language-specific files. These files can be found in the ``WEB-INF\classes\messages_{language}.properties`` file. The strings are defined using the following format: + +``` +string.id=Value text +``` + +Parameters within strings are specified by their index (starting from 0) enclosed in curly braces. For example, ``{0}`` refers to the first parameter. + +**Warning** : Ensure that the number of parameters expected by the string matches the number of parameters provided. Otherwise, message delivery will fail. + +In email templates, strings are specified using the ``th:text`` attribute. When the string does not require any parameters, use the following syntax: + +``` +th:text="#{string.id}" +``` + +When the string does require parameters, use the following syntax: + +``` +th:text="${#messages.msg('string.id', param0, param1,…)}" +``` + +The plain text in email templates serves only as a placeholder for previewing the template in an HTML viewer. When the email is generated, these placeholders are automatically replaced with the corresponding values from the language file. + +Following parameters can be used (provided that this information is available when sending the email) : + +* `orderLable` : request's label +* `productLabel` : Product name +* `startDate` : Submission date (e.g., “20 nov. 2025, 10:24:49”) +* `startDateISO` : Submission date ISO (e.g., “2025-11-20T09:24:49.221Z”) +* `endDate` : End date (e.g., “20 nov. 2025, 10:24:49”) +* `endDateISO` : End date ISO (e.g., “20 nov. 2025, 10:24:49”) +* `status` : Request status +* `rejected` : Boolean, true if the request was rejected +* `client` or `clientName` : Client name +* `clientGuid` : Unique client identifier +* `organism` or `organisationName` : Client organization name +* `organismGuid` : Unique organization identifier +* `tiers` : Third party organization name +* `perimeter` : Geographic perimeter (WKT format) +* `surface` : Area of the order + +The dynamic parameter can be accessed as follow : + +`parameter.KEY_NAME`, e.g. `parameter.FORMAT` for the format if exists. + +Parameter can also be used outside of a localized string using the following syntax : + +```html +

+``` + +If a parameter does not exist, the value is replaced by an empty string. If a dynamic parameter does not exist, the value is replaced by `null`. + + +
+
+
+
+
\ No newline at end of file diff --git a/doc/mitigations/Security recommendations for Tomcat 9 deployments (Linux).md b/docs/getting-started/cybersecurity-linux.md similarity index 73% rename from doc/mitigations/Security recommendations for Tomcat 9 deployments (Linux).md rename to docs/getting-started/cybersecurity-linux.md index 3784a7cc..4900b75f 100644 --- a/doc/mitigations/Security recommendations for Tomcat 9 deployments (Linux).md +++ b/docs/getting-started/cybersecurity-linux.md @@ -1,412 +1,440 @@ -# Security recommendations for Tomcat 9 deployments (Linux) -## **Priority Summary**: - -1. **Run Tomcat as a non-root user** and enforce **strict file permissions**. -2. **Add TLS 1.2/1.3 with FIPS-validated ciphers** and **HSTS**. -3. **Harden cookie security** (HttpOnly and Secure flags). -4. **Enable AccessLogValve** and **log client IPs** behind proxies. -5. **Patch Tomcat regularly** and **enable LockOutRealm**. -6. **Remove unnecessary applications and connectors** and **harden the shutdown port**. -7. **Disable auto-deployment** and **stack tracing**. -8. **Audit file changes** and **protect the keystore**. - -## 1. **Run Tomcat as a non-root user** - -Running Tomcat as a non-root user is a critical security practice. If Tomcat were to be compromised while running as root, an attacker would gain full control of the server. By running it under a limited, non-root user, the potential damage is restricted to the Tomcat environment, reducing the risk to the overall system. - -- **Why it matters**: Minimizes privileges and limits the damage an attacker can inflict if they gain access to Tomcat. - -### How-to -**Create a user and group for Tomcat**: -```shell -sudo useradd -s /sbin/nologin -g tomcat -d $CATALINA_BASE tomcat -``` - -**Set the ownership**: -```shell -sudo chown -R tomcat:tomcat $CATALINA_BASE -``` - -**Run Tomcat under the `tomcat` user**: -* Open `tomcat.service` (assuming Tomcat is managed by `systemd`): -```shell -sudo nano /etc/systemd/system/tomcat.service -``` - -* Ensure the following lines are set to run Tomcat as the `tomcat` user: -``` -[Unit] -Description=Apache Tomcat Web Application Container -After=syslog.target network.target - -[Service] -Type=forking -User=tomcat -Group=tomcat - -ReadWritePaths=/var/log/extract/ - -Environment=JAVA_HOME=/usr/lib/jvm/jdk-17.0.12-oracle-x64 -Environment=CATALINA_PID=/opt/apache-tomcat-9.0.95/temp/tomcat.pid -Environment=CATALINA_HOME=/opt/apache-tomcat-9.0.95 -Environment=CATALINA_BASE=/opt/apache-tomcat-9.0.95 -Environment=CATALINA_OPTS= -Environment="JAVA_OPTS=-Dfile.encoding=UTF-8 -Dnet.sf.ehcache.skipUpdateCheck=true -Xms2g -Xmx4g" - -ExecStart=/opt/apache-tomcat-9.0.95/bin/startup.sh -ExecStop=/opt/apache-tomcat-9.0.95/bin/shutdown.sh - -[Install] -WantedBy=multi-user.target -``` - -The line `ReadWritePaths=/var/log/extract` is mandatory because of the new sandboxing imposed by the `systemd` service manager. - -Reload `systemd` in order to discover and load the new Tomcat service file: - -```shell -systemctl daemon-reload -``` - -Enable the service to start at boot: - -```shell -systemctl enable tomcat.service -``` - -To control the service: - -```shell -service tomcat [start | stop | restart | status] -``` - -Or with Systemd directly: - -```shell -systemctl [start | stop | restart | status] tomcat -``` - -Reload systemd and restart Tomcat: -```shell -sudo systemctl daemon-reload -sudo systemctl restart tomcat -``` -## 2. **Set proper file permissions** - -- **$CATALINA_HOME folder** must be owned by the root user, group `tomcat`. -- **$CATALINA_HOME/conf folder** must be owned by the root user, group `tomcat`. -- **$CATALINA_BASE/logs folder** must be owned by the tomcat user, group `tomcat`. -- **$CATALINA_BASE/work folder** must be owned by the tomcat user, group `tomcat`. -- Permissions: - - **$CATALINA_BASE/logs**: `750` - - **$CATALINA_HOME/bin**: `750` - - Files in **$CATALINA_BASE/logs**: `640` - - JAR files in **$CATALINA_HOME/bin**: `640` - -Proper file permissions ensure that only authorized users can access or modify critical Tomcat files and directories. Limiting access to configuration files (e.g., `server.xml`) to root, while allowing Tomcat-specific logs and runtime files to be owned by the `tomcat` user, strengthens the security by preventing unauthorized changes. - -- **Why it matters**: Prevents unauthorized access or modification of sensitive configuration files and logs. - -### How-to -**Change the ownership**: -```shell -sudo chown -R root:tomcat $CATALINA_HOME -sudo chown -R tomcat:tomcat $CATALINA_HOME/{logs,temp,webapps,work} -``` - -**Change file permissions**: -```shell -sudo chmod -R 750 $CATALINA_HOME/bin -sudo chmod -R 750 $CATALINA_HOME/logs -sudo find $CATALINA_HOME/logs -type f -exec chmod 640 {} \; -sudo find $CATALINA_HOME/bin -type f -name "*.jar" -exec chmod 640 {} \; -``` - -## 3. **Add TLS v1.2 or v1.3 with FIPS-validated ciphers** - -TLS (Transport Layer Security) is essential for encrypting data transmitted between clients and servers. By enforcing TLS 1.2 or 1.3 with strong, FIPS-validated ciphers, you ensure secure communication that meets strict compliance standards. - -- **Why it matters**: Encrypts sensitive data in transit and complies with modern security standards like FIPS 140-2. -### How-to -**Edit the `server.xml` file **: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Change file permissions**: -```xml - - - - - - - TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256 - - - -``` - -**Restart Tomcat**: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -## 4. **Add HTTP Strict Transport Security (HSTS)** - -HSTS instructs browsers to only interact with your server using HTTPS. It prevents downgrade attacks where an attacker could force a user to connect via HTTP. Once a browser receives the HSTS header, it will automatically convert all future requests to HTTPS, ensuring secure communication. - -- **Why it matters**: Protects against protocol downgrade attacks and ensures that all communications use HTTPS. -### How-to -**Modify the `web.xml` file in the default web app**: -```shell -sudo nano /opt/tomcat/conf/web.xml -``` - -**Change the filter values**: -```xml - - HSTSFilter - org.apache.catalina.filters.HttpHeaderSecurityFilter - - hstsEnabled - true - - - hstsMaxAgeSeconds - 31536000 - - - - HSTSFilter - /* - -``` - -**Restart Tomcat**: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -## 5. **Disable X-Powered-By Header** - -By default, Tomcat may expose the `X-Powered-By` header, which discloses information about the server and its version. Disabling this prevents attackers from easily identifying the version of Tomcat being used, reducing the risk of targeted attacks based on known vulnerabilities. - -- **Why it matters**: Hides unnecessary information from potential attackers and reduces the risk of exploit-based attacks. -### How-to -**Edit the `server.xml` file **: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Change file permissions**: -```xml - -``` - -**Restart Tomcat**: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -## 6. **Harden Cookie Security (HttpOnly & Secure flags)** - -- **HttpOnly flag**: Ensures that cookies cannot be accessed via JavaScript, protecting them from cross-site scripting (XSS) attacks. -- **Secure flag**: Ensures that cookies are only transmitted over HTTPS, preventing them from being exposed during plaintext transmission. -- **Why it matters**: Protects session cookies from being stolen or accessed by malicious scripts. -### How-to -**Edit the `web.xml` file **: -```shell -sudo nano /opt/tomcat/conf/web.xml -``` - -**Set the cookie properties**: -```xml - - - true - true - - -``` - -## 7. **Enable AccessLogValve for each virtual host** - -Configuring **AccessLogValve** for each virtual host in Tomcat allows you to log incoming requests for auditing and troubleshooting purposes. These logs provide valuable insights into who is accessing the server and can help detect potential malicious activity. - -- **Why it matters**: Provides detailed logs for auditing, forensics, and detecting suspicious activity. - -### How-to - -**Edit the `server.xml` file **: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Change file permissions**: -```xml - - - -``` - -## 8. **Regularly patch Tomcat for security vulnerabilities** - -Tomcat, like any other software, can have security vulnerabilities. Regularly applying patches and updates ensures that your installation is protected against known threats. It’s crucial to stay up to date with the latest security patches to mitigate risks. - -- **Why it matters**: Fixes known vulnerabilities and prevents attackers from exploiting outdated software. - -## 9. **Use LockOutRealm to prevent brute-force attacks** - -The **LockOutRealm** helps prevent brute-force login attacks by locking out users after a set number of failed login attempts. Configuring this for admin accounts is particularly important. - -- **Why it matters**: Prevents attackers from brute-forcing admin credentials. -### How-to -**Edit the `server.xml` file **: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Add `failureCount` et `lockOutTime`**: -```xml - - - -``` - -**Restart Tomcat**: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -## 10. **Secure the keystore file** - -The keystore contains sensitive information, such as private keys used for SSL/TLS. Ensuring that the keystore file is properly protected by strict file permissions and password protection prevents unauthorized access to critical security credentials. - -- **Why it matters**: Protects sensitive private keys from unauthorized access or tampering. -### How-to -**Apply the modifications**: -```shell -sudo chmod 600 /path/to/keystore -sudo chown tomcat:tomcat /path/to/keystore -``` - -## 11. **Audit file changes to critical directories** - -- Changes to **$CATALINA_HOME/lib**, **$CATALINA_HOME/bin**, and **$CATALINA_BASE/conf** should be logged. Auditing these changes provides insight into potential tampering or unauthorized modifications to critical Tomcat files. -- **Why it matters**: Helps in detecting suspicious or unauthorized modifications that could compromise the server. - -### How-to -**Add the auditing rules**: -```shell -sudo apt install auditd -``` - -**Add the auditing rules**: -```shell -sudo auditctl -w /opt/tomcat/lib -p wa -k tomcat_lib_changes -sudo auditctl -w /opt/tomcat/bin -p wa -k tomcat_bin_changes -sudo auditctl -w /opt/tomcat/conf -p wa -k tomcat_conf_changes -sudo auditctl -l -``` - -**View logs**: -```shell -sudo ausearch -k tomcat_lib_changes -``` - -## 12. **Remove unnecessary applications and connectors** - -- **Remove folders**: Unused folders such as `documentation`, `examples`, `manager`, and `host-manager` should be deleted from the production environment to minimize the attack surface. -- **Remove unused connectors**: Disable and remove any unused connectors (e.g., HTTP connector) to reduce exposure to attacks. -- **Why it matters**: Minimizes the attack surface and reduces potential entry points for attackers. -### How-to -**Remove example apps and manager apps**: -```shell -sudo rm -rf /opt/tomcat/webapps/examples -sudo rm -rf /opt/tomcat/webapps/manager -sudo rm -rf /opt/tomcat/webapps/host-manager -``` - -**Disable unused connectors** in `server.xml`: -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -Comment out or remove unused connectors. -### 13. **Log client IP behind proxies or load balancers** - -When Tomcat is behind a proxy or load balancer, it’s important to log the client’s actual IP address, not the proxy’s IP. This helps identify the true origin of requests. - -- **Why it matters**: Enables accurate identification of the source of traffic, which is crucial for auditing and security monitoring. -### How-to -**Edit `server.xml` to add `RemoteIpValve` within the `` section:** -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Add the valve**: -```xml - -``` - -## 14. **Disable auto-deployment and stack tracing** - -- **AutoDeploy** allows dynamic deployment of new applications while Tomcat is running, which can lead to unauthorized changes or the introduction of vulnerabilities. Disabling it ensures that deployments are controlled and reviewed. -- **Disable stack tracing**: Disabling stack traces prevents the disclosure of internal application details during errors, protecting sensitive information. -- **Why it matters**: Ensures controlled deployments and prevents information leakage. -### How-to -**Edit `server`:** -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Modify the `` block to disable `autoDeploy` and `deployOnStartup`:** -```xml - -``` - -**Disable stack traces in error pages (`web.xml`):** -```xml - - java.lang.Throwable - /error.jsp - -``` - -## 15. **Harden the shutdown port** - -The shutdown port allows Tomcat to be stopped remotely. Disabling this port or restricting access ensures that attackers cannot shut down your server unexpectedly. - -- **Why it matters**: Prevents unauthorised shutdown of Tomcat, ensuring continuous availability. - -### How-to -**Edit `server.xml`:** -```shell -sudo nano /opt/tomcat/conf/server.xml -``` - -**Set the shutdown port to `-1` to disable it:** -```xml - -``` - ---- - -## **Additional Recommendations**: - -1. **Replace the ROOT web application**: It is a best practice to replace the default Tomcat ROOT web application with a custom one or remove it if not needed. The default ROOT application can expose unnecessary information. -2. **Use a Java Security Manager**: The Java Security Manager enforces restrictions on the permissions of Java code running within Tomcat. Enabling it adds an additional layer of security by limiting what code can do. -3. **Set `failureCount` to 5 failed login attempts** for admin accounts: This limits the number of attempts to brute-force credentials, protecting admin accounts from attacks. -## Sources -https://www.stigviewer.com/stig/apache_tomcat_application_server_9/ -https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-131Ar2.pdf -https://tomcat.apache.org/tomcat-9.0-doc/security-howto.html -https://www.openlogic.com/blog/apache-tomcat-security-best-practices -https://knowledge.broadcom.com/external/article/226769/enable-http-strict-transport-security-hs.html \ No newline at end of file +--- +title: Cybersecurity risk mitigations (Linux) +--- + +# Security recommendations for Tomcat 9 deployments (Linux) + +## Priority Summary + +1. Run Tomcat as a non-root user and enforce strict file permissions. +2. Add TLS 1.2/1.3 with FIPS-validated ciphers and HSTS. +3. Harden cookie security (HttpOnly and Secure flags). +4. Enable AccessLogValve and log client IPs behind proxies. +5. Patch Tomcat regularly and enable LockOutRealm. +6. Remove unnecessary applications and connectors and harden the shutdown port. +7. Disable auto-deployment and stack tracing. +8. Audit file changes and protect the keystore. + +## 1. Run Tomcat as a non-root user + +Running Tomcat as a non-root user is a critical security practice. If Tomcat were to be compromised while running as root, an attacker would gain full control of the server. By running it under a limited, non-root user, the potential damage is restricted to the Tomcat environment, reducing the risk to the overall system. + +- **Why it matters**: Minimizes privileges and limits the damage an attacker can inflict if they gain access to Tomcat. + +**How-to** + +1. Create a user and group for Tomcat: +```shell +sudo useradd -s /sbin/nologin -g tomcat -d $CATALINA_BASE tomcat +``` + +2. Set the ownership: +```shell +sudo chown -R tomcat:tomcat $CATALINA_BASE +``` + +3. Run Tomcat under the `tomcat` user: + + * Open `tomcat.service` (assuming Tomcat is managed by `systemd`): +```shell +sudo nano /etc/systemd/system/tomcat.service +``` + + * Ensure the following lines are set to run Tomcat as the `tomcat` user: +``` +[Unit] +Description=Apache Tomcat Web Application Container +After=syslog.target network.target + +[Service] +Type=forking +User=tomcat +Group=tomcat + +ReadWritePaths=/var/log/extract/ + +Environment=JAVA_HOME=/usr/lib/jvm/jdk-17.0.12-oracle-x64 +Environment=CATALINA_PID=/opt/apache-tomcat-9.0.95/temp/tomcat.pid +Environment=CATALINA_HOME=/opt/apache-tomcat-9.0.95 +Environment=CATALINA_BASE=/opt/apache-tomcat-9.0.95 +Environment=CATALINA_OPTS= +Environment="JAVA_OPTS=-Dfile.encoding=UTF-8 -Dnet.sf.ehcache.skipUpdateCheck=true -Xms2g -Xmx4g" + +ExecStart=/opt/apache-tomcat-9.0.95/bin/startup.sh +ExecStop=/opt/apache-tomcat-9.0.95/bin/shutdown.sh + +[Install] +WantedBy=multi-user.target +``` + + The line `ReadWritePaths=/var/log/extract` is mandatory because of the new sandboxing imposed by the `systemd` service manager. + + * Reload `systemd` in order to discover and load the new Tomcat service file: +```shell +systemctl daemon-reload +``` + + * Enable the service to start at boot: +```shell +systemctl enable tomcat.service +``` + + To control the service: +```shell +service tomcat [start | stop | restart | status] +``` + + Or with Systemd directly: +```shell +systemctl [start | stop | restart | status] tomcat +``` + + * Reload systemd and restart Tomcat: +```shell +sudo systemctl daemon-reload +sudo systemctl restart tomcat +``` + +## 2. Set proper file permissions + +- **$CATALINA_HOME folder** must be owned by the root user, group `tomcat`. +- **$CATALINA_HOME/conf folder** must be owned by the root user, group `tomcat`. +- **$CATALINA_BASE/logs folder** must be owned by the tomcat user, group `tomcat`. +- **$CATALINA_BASE/work folder** must be owned by the tomcat user, group `tomcat`. +- Permissions: + - **$CATALINA_BASE/logs**: `750` + - **$CATALINA_HOME/bin**: `750` + - Files in **$CATALINA_BASE/logs**: `640` + - JAR files in **$CATALINA_HOME/bin**: `640` + +Proper file permissions ensure that only authorized users can access or modify critical Tomcat files and directories. Limiting access to configuration files (e.g., `server.xml`) to root, while allowing Tomcat-specific logs and runtime files to be owned by the `tomcat` user, strengthens the security by preventing unauthorized changes. + +- **Why it matters**: Prevents unauthorized access or modification of sensitive configuration files and logs. + +**How-to** + +* Change the ownership: +```shell +sudo chown -R root:tomcat $CATALINA_HOME +sudo chown -R tomcat:tomcat $CATALINA_HOME/{logs,temp,webapps,work} +``` + +* Change file permissions: +```shell +sudo chmod -R 750 $CATALINA_HOME/bin +sudo chmod -R 750 $CATALINA_HOME/logs +sudo find $CATALINA_HOME/logs -type f -exec chmod 640 {} \; +sudo find $CATALINA_HOME/bin -type f -name "*.jar" -exec chmod 640 {} \; +``` + +## 3. Add TLS v1.2 or v1.3 with FIPS-validated ciphers + +TLS (Transport Layer Security) is essential for encrypting data transmitted between clients and servers. By enforcing TLS 1.2 or 1.3 with strong, FIPS-validated ciphers, you ensure secure communication that meets strict compliance standards. + +- **Why it matters**: Encrypts sensitive data in transit and complies with modern security standards like FIPS 140-2. + +**How-to** + +* Edit the `server.xml` file: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Change file permissions: +```xml + + + + + + + TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256 + + + +``` + +* Restart Tomcat: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +## 4. Add HTTP Strict Transport Security (HSTS) + +HSTS instructs browsers to only interact with your server using HTTPS. It prevents downgrade attacks where an attacker could force a user to connect via HTTP. Once a browser receives the HSTS header, it will automatically convert all future requests to HTTPS, ensuring secure communication. + +- **Why it matters**: Protects against protocol downgrade attacks and ensures that all communications use HTTPS. + +**How-to** + +* Modify the `web.xml` file in the default web app: +```shell +sudo nano /opt/tomcat/conf/web.xml +``` + +* Change the filter values: +```xml + + HSTSFilter + org.apache.catalina.filters.HttpHeaderSecurityFilter + + hstsEnabled + true + + + hstsMaxAgeSeconds + 31536000 + + + + HSTSFilter + /* + +``` + +* Restart Tomcat: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +## 5. Disable X-Powered-By Header + +By default, Tomcat may expose the `X-Powered-By` header, which discloses information about the server and its version. Disabling this prevents attackers from easily identifying the version of Tomcat being used, reducing the risk of targeted attacks based on known vulnerabilities. + +- **Why it matters**: Hides unnecessary information from potential attackers and reduces the risk of exploit-based attacks. + +**How-to** + +* Edit the `server.xml` file: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Change file permissions: +```xml + +``` + +* Restart Tomcat: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +## 6. Harden Cookie Security (HttpOnly & Secure flags) + +- **HttpOnly flag**: Ensures that cookies cannot be accessed via JavaScript, protecting them from cross-site scripting (XSS) attacks. +- **Secure flag**: Ensures that cookies are only transmitted over HTTPS, preventing them from being exposed during plaintext transmission. +- **Why it matters**: Protects session cookies from being stolen or accessed by malicious scripts. + +**How-to** + +* Edit the `web.xml` file: +```shell +sudo nano /opt/tomcat/conf/web.xml +``` + +* Set the cookie properties: +```xml + + + true + true + + +``` + +## 7. Enable AccessLogValve for each virtual host + +Configuring **AccessLogValve** for each virtual host in Tomcat allows you to log incoming requests for auditing and troubleshooting purposes. These logs provide valuable insights into who is accessing the server and can help detect potential malicious activity. + +- **Why it matters**: Provides detailed logs for auditing, forensics, and detecting suspicious activity. + +**How-to** + +* Edit the `server.xml` file: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Change file permissions: +```xml + + + +``` + +## 8. Regularly patch Tomcat for security vulnerabilities + +Tomcat, like any other software, can have security vulnerabilities. Regularly applying patches and updates ensures that your installation is protected against known threats. It’s crucial to stay up to date with the latest security patches to mitigate risks. + +- **Why it matters**: Fixes known vulnerabilities and prevents attackers from exploiting outdated software. + +## 9. Use LockOutRealm to prevent brute-force attacks + +The **LockOutRealm** helps prevent brute-force login attacks by locking out users after a set number of failed login attempts. Configuring this for admin accounts is particularly important. + +- **Why it matters**: Prevents attackers from brute-forcing admin credentials. + +**How-to** + +* Edit the `server.xml` file: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Add `failureCount` et `lockOutTime`: +```xml + + + +``` + +* Restart Tomcat: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +## 10. Secure the keystore file + +The keystore contains sensitive information, such as private keys used for SSL/TLS. Ensuring that the keystore file is properly protected by strict file permissions and password protection prevents unauthorized access to critical security credentials. + +- **Why it matters**: Protects sensitive private keys from unauthorized access or tampering. + +**How-to** + +* Apply the modifications: +```shell +sudo chmod 600 /path/to/keystore +sudo chown tomcat:tomcat /path/to/keystore +``` + +## 11. Audit file changes to critical directories + +Changes to **$CATALINA_HOME/lib**, **$CATALINA_HOME/bin**, and **$CATALINA_BASE/conf** should be logged. Auditing these changes provides insight into potential tampering or unauthorized modifications to critical Tomcat files. + +- **Why it matters**: Helps in detecting suspicious or unauthorized modifications that could compromise the server. + +**How-to** + +* Add the auditing rules: +```shell +sudo apt install auditd +``` + +* Add the auditing rules: +```shell +sudo auditctl -w /opt/tomcat/lib -p wa -k tomcat_lib_changes +sudo auditctl -w /opt/tomcat/bin -p wa -k tomcat_bin_changes +sudo auditctl -w /opt/tomcat/conf -p wa -k tomcat_conf_changes +sudo auditctl -l +``` + +* View logs: +```shell +sudo ausearch -k tomcat_lib_changes +``` + +## 12. Remove unnecessary applications and connectors + +- **Remove folders**: Unused folders such as `documentation`, `examples`, `manager`, and `host-manager` should be deleted from the production environment to minimize the attack surface. +- **Remove unused connectors**: Disable and remove any unused connectors (e.g., HTTP connector) to reduce exposure to attacks. +- **Why it matters**: Minimizes the attack surface and reduces potential entry points for attackers. + +**How-to** + +* Remove example apps and manager apps: +```shell +sudo rm -rf /opt/tomcat/webapps/examples +sudo rm -rf /opt/tomcat/webapps/manager +sudo rm -rf /opt/tomcat/webapps/host-manager +``` + +* Disable unused connectors in `server.xml`: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Comment out or remove unused connectors. + +## 13. Log client IP behind proxies or load balancers + +When Tomcat is behind a proxy or load balancer, it’s important to log the client’s actual IP address, not the proxy’s IP. This helps identify the true origin of requests. + +- **Why it matters**: Enables accurate identification of the source of traffic, which is crucial for auditing and security monitoring. + +**How-to** + +* Edit `server.xml` to add `RemoteIpValve` within the `` section: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Add the valve: +```xml + +``` + +## 14. Disable auto-deployment and stack tracing + +- **AutoDeploy** allows dynamic deployment of new applications while Tomcat is running, which can lead to unauthorized changes or the introduction of vulnerabilities. Disabling it ensures that deployments are controlled and reviewed. +- **Disable stack tracing**: Disabling stack traces prevents the disclosure of internal application details during errors, protecting sensitive information. +- **Why it matters**: Ensures controlled deployments and prevents information leakage. + +**How-to** + +* Edit `server`: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Modify the `` block to disable `autoDeploy` and `deployOnStartup`: +```xml + +``` + +* Disable stack traces in error pages (`web.xml`): +```xml + + java.lang.Throwable + /error.jsp + +``` + +## 15. Harden the shutdown port + +The shutdown port allows Tomcat to be stopped remotely. Disabling this port or restricting access ensures that attackers cannot shut down your server unexpectedly. + +- **Why it matters**: Prevents unauthorised shutdown of Tomcat, ensuring continuous availability. + +**How-to** + +* Edit `server.xml`: +```shell +sudo nano /opt/tomcat/conf/server.xml +``` + +* Set the shutdown port to `-1` to disable it: +```xml + +``` + +--- + +## Additional Recommendations + +1. **Replace the ROOT web application**: It is a best practice to replace the default Tomcat ROOT web application with a custom one or remove it if not needed. The default ROOT application can expose unnecessary information. +2. **Use a Java Security Manager**: The Java Security Manager enforces restrictions on the permissions of Java code running within Tomcat. Enabling it adds an additional layer of security by limiting what code can do. +3. **Set `failureCount` to 5 failed login attempts** for admin accounts: This limits the number of attempts to brute-force credentials, protecting admin accounts from attacks. + +## Sources +* https://www.stigviewer.com/stig/apache_tomcat_application_server_9/ +* https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-131Ar2.pdf +* https://tomcat.apache.org/tomcat-9.0-doc/security-howto.html +* https://www.openlogic.com/blog/apache-tomcat-security-best-practices +* https://knowledge.broadcom.com/external/article/226769/enable-http-strict-transport-security-hs.html* \ No newline at end of file diff --git a/doc/mitigations/Security recommendations for Tomcat 9 deployments (Win).md b/docs/getting-started/cybersecurity-win.md similarity index 81% rename from doc/mitigations/Security recommendations for Tomcat 9 deployments (Win).md rename to docs/getting-started/cybersecurity-win.md index 1c37f166..c12de38b 100644 --- a/doc/mitigations/Security recommendations for Tomcat 9 deployments (Win).md +++ b/docs/getting-started/cybersecurity-win.md @@ -1,242 +1,270 @@ -# Security recommendations for Tomcat 9 deployments (Windows 10/11) -## **Priority Summary**: - -1. Run Tomcat as a non-administrative user and enforce strict file permissions. -2. Add TLS 1.2/1.3 with FIPS-validated ciphers and HSTS. -3. Harden cookie security (HttpOnly and Secure flags). -4. Enable AccessLogValve and log client IPs behind proxies. -5. Patch Tomcat regularly and enable LockOutRealm. -6. Remove unnecessary applications and connectors and harden the shutdown port. -7. Disable auto-deployment and stack tracing. -8. Audit file changes and protect the keystore. - -The rest of this guide assumes that you installed Tomcat9 as a service (https://tomcat.apache.org/download-90.cgi). - -## 1. **Run Tomcat as a Non-Administrative User** - -Running Tomcat as a non-administrative user on Windows limits the damage that can occur if the Tomcat instance is compromised. By default, Tomcat runs as a system service on Windows. You should configure it to run under a less privileged user account. - -- **Why it matters**: Minimizes privileges and limits the damage an attacker can inflict if they gain access to Tomcat. - -### How-to - -1. Create a new local user called `Tomcat` with limited privileges. -2. Ensure that the user can `Log as a Service` (Local Security Policy > Logal Policies > User Rights Assignment > Log on as a service) -2. Open the **Services** management console (`services.msc`). -3. Locate the `Apache Tomcat` service. -4. Right-click on the service and select **Properties**. -5. Go to the **Log On** tab. -6. Select **This account**, then enter the `Tomcat` account credentials. -7. Restart the service. - -## 2. **Set proper file permissions** - -Proper file permissions ensure only authorized users can access or modify critical Tomcat files. On Windows, this involves setting file permissions using the Security tab in the file properties. - -- **Why it matters**: Prevents unauthorized access or modification of Tomcat’s critical configuration files. - -### How-to - -1. Navigate to the Tomcat installation directory (`%CATALINA_HOME%`), typically C:\Program Files\Apache Software Foundation\Tomcat 9.0. -2. Right-click on the conf, bin, logs, webapps, and temp folders and select Properties. -3. Go to the Security tab and assign the following permissions: - - TomcatUser: Full Control for logs, temp, work, and webapps. - - Administrators: Full Control for conf and bin. - - Others: Read and execute only. - -- **Why it matters**: Prevents unauthorized access or modification of sensitive configuration files and logs. - -## 3. **Add TLS v1.2 or v1.3 with FIPS-validated ciphers** - -TLS (Transport Layer Security) is essential for encrypting data transmitted between clients and servers. By enforcing TLS 1.2 or 1.3 with strong, FIPS-validated ciphers, you ensure secure communication that meets strict compliance standards. - -1. Open the server.xml file located in %CATALINA_HOME%\conf\server.xml. -2. Add or modify the SSL connector: -```xml - - - - - - - TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256 - - - -``` -3. Restart the service - -## 4. **Add HTTP Strict Transport Security (HSTS)** - -HSTS instructs browsers to only interact with your server using HTTPS. It prevents downgrade attacks where an attacker could force a user to connect via HTTP. Once a browser receives the HSTS header, it will automatically convert all future requests to HTTPS, ensuring secure communication. - -- **Why it matters**: Protects against protocol downgrade attacks and ensures that all communications use HTTPS. -### How-to -**Open the `web.xml` file in `%CATALINA_HOME%\conf\web.xml`**: - -**Change the filter values**: -```xml - - HSTSFilter - org.apache.catalina.filters.HttpHeaderSecurityFilter - - hstsEnabled - true - - - hstsMaxAgeSeconds - 31536000 - - - - HSTSFilter - /* - -``` - -Restart the `Tomcat` service - -## 5. **Disable X-Powered-By Header** - -By default, Tomcat may expose the `X-Powered-By` header, which discloses information about the server and its version. Disabling this prevents attackers from easily identifying the version of Tomcat being used, reducing the risk of targeted attacks based on known vulnerabilities. - -- **Why it matters**: Hides unnecessary information from potential attackers and reduces the risk of exploit-based attacks. -### How-to -**Open the `server.xml` file in `%CATALINA_HOME%\conf\server.xml`**: - - -**Change file permissions**: -```xml - -``` - -And restart the `Tomcat` service. - -## 6. **Harden Cookie Security (HttpOnly & Secure flags)** - -- **HttpOnly flag**: Ensures that cookies cannot be accessed via JavaScript, protecting them from cross-site scripting (XSS) attacks. -- **Secure flag**: Ensures that cookies are only transmitted over HTTPS, preventing them from being exposed during plaintext transmission. -- **Why it matters**: Protects session cookies from being stolen or accessed by malicious scripts. -### How-to -1. Open `web.xml` in `%CATALINA_HOME%\conf\web.xml`. - -2. **Set the cookie properties**: -```xml - - - true - true - - -``` - -## 7. **Enable AccessLogValve for each virtual host** - -Configuring **AccessLogValve** for each virtual host in Tomcat allows you to log incoming requests for auditing and troubleshooting purposes. These logs provide valuable insights into who is accessing the server and can help detect potential malicious activity. - -- **Why it matters**: Provides detailed logs for auditing, forensics, and detecting suspicious activity. - -### How-to - -1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. - -2. **Change file permissions**: -```xml - - - -``` - -## 8. **Regularly patch Tomcat for security vulnerabilities** - -Tomcat, like any other software, can have security vulnerabilities. Regularly applying patches and updates ensures that your installation is protected against known threats. It’s crucial to stay up to date with the latest security patches to mitigate risks. - -- **Why it matters**: Fixes known vulnerabilities and prevents attackers from exploiting outdated software. - -## 9. **Use LockOutRealm to prevent brute-force attacks** - -The **LockOutRealm** helps prevent brute-force login attacks by locking out users after a set number of failed login attempts. Configuring this for admin accounts is particularly important. - -- **Why it matters**: Prevents attackers from brute-forcing admin credentials. -### How-to -1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. -2. **Add `failureCount` et `lockOutTime`**: -```xml - - - -``` -3. Restart the `Tomcat` service - -## 10. **Secure the keystore file** - -The keystore contains sensitive information, such as private keys used for SSL/TLS. Ensuring that the keystore file is properly protected by strict file permissions and password protection prevents unauthorized access to critical security credentials. - -- **Why it matters**: Protects sensitive private keys from unauthorized access or tampering. -### How-to -1. Right-click on the keystore file and select **Properties**. -2. Go to the **Security** tab. -3. Ensure only the `Tomcat` user has access, and set the permissions to **Read** and **Execute**. - -## 11. **Disable Auto-Deployment and Stack Tracing** - -Auto-deployment introduces security risks by allowing new applications to be deployed without manual intervention. Disabling stack traces prevents attackers from seeing sensitive system information. -- **Why it matters**: Prevents unauthorized deployments and hides sensitive details during errors. - -### How-to -1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. -2. Modify the `` block: -**Add the auditing rules**: -```xml - -``` -3. To disable stack traces, open `web.xml` in `%CATALINA_HOME%\conf\web.xml` and add: -```xml - - java.lang.Throwable - /error.jsp - -``` - -## 12. **Remove unnecessary applications and connectors** - -- **Remove folders**: Unused folders such as `documentation`, `examples`, `manager`, and `host-manager` should be deleted from the production environment to minimize the attack surface. -- **Remove unused connectors**: Disable and remove any unused connectors (e.g., HTTP connector) to reduce exposure to attacks. -- **Why it matters**: Minimizes the attack surface and reduces potential entry points for attackers. -- **Disable unused connectors** in `server.xml` - -## 13. **Harden the shutdown port** - -The shutdown port allows Tomcat to be stopped remotely. Disabling this port or restricting access ensures that attackers cannot shut down your server unexpectedly. - -- **Why it matters**: Prevents unauthorised shutdown of Tomcat, ensuring continuous availability. - -### How-to -1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. -2. **Set the shutdown port to `-1` to disable it:** -```xml - - -``` -You can also set the port to `-1` during the installation of the service. - ---- - -## **Additional Recommendations**: - -1. **Replace the ROOT web application**: It is a best practice to replace the default Tomcat ROOT web application with a custom one or remove it if not needed. The default ROOT application can expose unnecessary information. -2. **Use a Java Security Manager**: The Java Security Manager enforces restrictions on the permissions of Java code running within Tomcat. Enabling it adds an additional layer of security by limiting what code can do. -3. **Set `failureCount` to 5 failed login attempts** for admin accounts: This limits the number of attempts to brute-force credentials, protecting admin accounts from attacks. -## Sources -https://www.stigviewer.com/stig/apache_tomcat_application_server_9/ -https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-131Ar2.pdf -https://tomcat.apache.org/tomcat-9.0-doc/security-howto.html -https://www.openlogic.com/blog/apache-tomcat-security-best-practices -https://knowledge.broadcom.com/external/article/226769/enable-http-strict-transport-security-hs.html \ No newline at end of file +--- +title: Cybersecurity risk mitigations (Windows) +--- + +# Security recommendations for Tomcat 9 deployments (Windows 10/11) + +## Priority Summary + +1. Run Tomcat as a non-administrative user and enforce strict file permissions. +2. Add TLS 1.2/1.3 with FIPS-validated ciphers and HSTS. +3. Harden cookie security (HttpOnly and Secure flags). +4. Enable AccessLogValve and log client IPs behind proxies. +5. Patch Tomcat regularly and enable LockOutRealm. +6. Remove unnecessary applications and connectors and harden the shutdown port. +7. Disable auto-deployment and stack tracing. +8. Audit file changes and protect the keystore. + +The rest of this guide assumes that you installed Tomcat9 as a service (https://tomcat.apache.org/download-90.cgi). + +## 1. Run Tomcat as a Non-Administrative User + +Running Tomcat as a non-administrative user on Windows limits the damage that can occur if the Tomcat instance is compromised. By default, Tomcat runs as a system service on Windows. You should configure it to run under a less privileged user account. + +- **Why it matters**: Minimizes privileges and limits the damage an attacker can inflict if they gain access to Tomcat. + +**How-to** + +1. Create a new local user called `Tomcat` with limited privileges. +2. Ensure that the user can `Log as a Service` (Local Security Policy > Logal Policies > User Rights Assignment > Log on as a service) +2. Open the **Services** management console (`services.msc`). +3. Locate the `Apache Tomcat` service. +4. Right-click on the service and select **Properties**. +5. Go to the **Log On** tab. +6. Select **This account**, then enter the `Tomcat` account credentials. +7. Restart the service. + +## 2. Set proper file permissions + +Proper file permissions ensure only authorized users can access or modify critical Tomcat files. On Windows, this involves setting file permissions using the Security tab in the file properties. + +- **Why it matters**: Prevents unauthorized access or modification of Tomcat’s critical configuration files and logs. + +**How-to** + +1. Navigate to the Tomcat installation directory (`%CATALINA_HOME%`), typically ``C:\Program Files\Apache Software Foundation\Tomcat 9.0``. +2. Right-click on the conf, bin, logs, webapps, and temp folders and select Properties. +3. Go to the Security tab and assign the following permissions: + - TomcatUser: Full Control for logs, temp, work, and webapps. + - Administrators: Full Control for conf and bin. + - Others: Read and execute only. + +## 3. Add TLS v1.2 or v1.3 with FIPS-validated ciphers + +TLS (Transport Layer Security) is essential for encrypting data transmitted between clients and servers. By enforcing TLS 1.2 or 1.3 with strong, FIPS-validated ciphers, you ensure secure communication that meets strict compliance standards. + +1. Open the ``server.xml`` file located in ``%CATALINA_HOME%\conf\server.xml``. +2. Add or modify the SSL connector: +```xml + + + + + + + TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384, TLS_CHACHA20_POLY1305_SHA256 + + + +``` +3. Restart the service + +## 4. Add HTTP Strict Transport Security (HSTS) + +HSTS instructs browsers to only interact with your server using HTTPS. It prevents downgrade attacks where an attacker could force a user to connect via HTTP. Once a browser receives the HSTS header, it will automatically convert all future requests to HTTPS, ensuring secure communication. + +- **Why it matters**: Protects against protocol downgrade attacks and ensures that all communications use HTTPS. + +**How-to** + +1. Open the `web.xml` file in `%CATALINA_HOME%\conf\web.xml`. +2. Change the filter values : + +```xml + + HSTSFilter + org.apache.catalina.filters.HttpHeaderSecurityFilter + + hstsEnabled + true + + + hstsMaxAgeSeconds + 31536000 + + + + HSTSFilter + /* + +``` + +Restart the `Tomcat` service + +## 5. Disable X-Powered-By Header + +By default, Tomcat may expose the `X-Powered-By` header, which discloses information about the server and its version. Disabling this prevents attackers from easily identifying the version of Tomcat being used, reducing the risk of targeted attacks based on known vulnerabilities. + +- **Why it matters**: Hides unnecessary information from potential attackers and reduces the risk of exploit-based attacks. + +**How-to** + +1. Open the `server.xml` file in `%CATALINA_HOME%\conf\server.xml`. +2. Change file permissions: + +```xml + +``` + +And restart the `Tomcat` service. + +## 6. Harden Cookie Security (HttpOnly & Secure flags) + +- **HttpOnly flag**: Ensures that cookies cannot be accessed via JavaScript, protecting them from cross-site scripting (XSS) attacks. +- **Secure flag**: Ensures that cookies are only transmitted over HTTPS, preventing them from being exposed during plaintext transmission. +- **Why it matters**: Protects session cookies from being stolen or accessed by malicious scripts. + +**How-to** + +1. Open `web.xml` in `%CATALINA_HOME%\conf\web.xml`. +2. Set the cookie properties: + +```xml + + + true + true + + +``` + +## 7. Enable AccessLogValve for each virtual host + +Configuring **AccessLogValve** for each virtual host in Tomcat allows you to log incoming requests for auditing and troubleshooting purposes. These logs provide valuable insights into who is accessing the server and can help detect potential malicious activity. + +- **Why it matters**: Provides detailed logs for auditing, forensics, and detecting suspicious activity. + +**How-to** + +1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. +2. Change file permissions: + +```xml + + + +``` + +## 8. Regularly patch Tomcat for security vulnerabilities + +Tomcat, like any other software, can have security vulnerabilities. Regularly applying patches and updates ensures that your installation is protected against known threats. It’s crucial to stay up to date with the latest security patches to mitigate risks. + +- **Why it matters**: Fixes known vulnerabilities and prevents attackers from exploiting outdated software. + +## 9. Use LockOutRealm to prevent brute-force attacks + +The **LockOutRealm** helps prevent brute-force login attacks by locking out users after a set number of failed login attempts. Configuring this for admin accounts is particularly important. + +- **Why it matters**: Prevents attackers from brute-forcing admin credentials. + +**How-to** + +1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. +2. Add `failureCount` et `lockOutTime`: + +```xml + + + +``` +3. Restart the `Tomcat` service + +## 10. Secure the keystore file + +The keystore contains sensitive information, such as private keys used for SSL/TLS. Ensuring that the keystore file is properly protected by strict file permissions and password protection prevents unauthorized access to critical security credentials. + +- **Why it matters**: Protects sensitive private keys from unauthorized access or tampering. + +**How-to** + +1. Right-click on the keystore file and select **Properties**. +2. Go to the **Security** tab. +3. Ensure only the `Tomcat` user has access, and set the permissions to **Read** and **Execute**. + +## 11. Disable Auto-Deployment and Stack Tracing + +Auto-deployment introduces security risks by allowing new applications to be deployed without manual intervention. Disabling stack traces prevents attackers from seeing sensitive system information. + +- **Why it matters**: Prevents unauthorized deployments and hides sensitive details during errors. + +**How-to** + +1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. +2. Modify the `` block and add the auditing rules: + +```xml + +``` +3. To disable stack traces, open `web.xml` in `%CATALINA_HOME%\conf\web.xml` and add: +```xml + + java.lang.Throwable + /error.jsp + +``` + +## 12. Remove unnecessary applications and connectors + +- **Remove folders**: Unused folders such as `documentation`, `examples`, `manager`, and `host-manager` should be deleted from the production environment to minimize the attack surface. +- **Remove unused connectors**: Disable and remove any unused connectors (e.g., HTTP connector) to reduce exposure to attacks. +- **Why it matters**: Minimizes the attack surface and reduces potential entry points for attackers. +- **Disable unused connectors** in `server.xml` + +## 13. Harden the shutdown port + +The shutdown port allows Tomcat to be stopped remotely. Disabling this port or restricting access ensures that attackers cannot shut down your server unexpectedly. + +- **Why it matters**: Prevents unauthorised shutdown of Tomcat, ensuring continuous availability. + +**How-to** + +1. Open `server.xml` in `%CATALINA_HOME%\conf\server.xml`. +2. Set the shutdown port to `-1` to disable it: + +```xml + + +``` +You can also set the port to `-1` during the installation of the service. + +--- + +## Additional Recommendations + +1. **Replace the ROOT web application**: It is a best practice to replace the default Tomcat ROOT web application with a custom one or remove it if not needed. The default ROOT application can expose unnecessary information. +2. **Use a Java Security Manager**: The Java Security Manager enforces restrictions on the permissions of Java code running within Tomcat. Enabling it adds an additional layer of security by limiting what code can do. +3. **Set `failureCount` to 5 failed login attempts** for admin accounts: This limits the number of attempts to brute-force credentials, protecting admin accounts from attacks. +## Sources +* https://www.stigviewer.com/stig/apache_tomcat_application_server_9/ +* https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-131Ar2.pdf +* https://tomcat.apache.org/tomcat-9.0-doc/security-howto.html +* https://www.openlogic.com/blog/apache-tomcat-security-best-practices +* https://knowledge.broadcom.com/external/article/226769/enable-http-strict-transport-security-hs.html + +
+
+
+
+
+ + + + + diff --git a/docs/getting-started/install.md b/docs/getting-started/install.md new file mode 100644 index 00000000..ef949ee0 --- /dev/null +++ b/docs/getting-started/install.md @@ -0,0 +1,111 @@ +--- +title: Install +--- + +## Software prerequisites + +* 64-bit Windows or Linus operating system +* Java 17 (Oracle or OpenJDK full version, not Headless) +* PostgreSQL 13 or above (Extract may work with older versions that are not supported anymore) +* Tomcat 9 (Extract is not currently compatible with Tomcat 10 due to the use of SpringBoot 2) +* A “modern” web browser (<2 years old) to access the application + +## Installation + +!!! Info + In the given paths, **[ZIP]** indicates the root of the delivered ZIP file in the release assets. Otherwise, relative paths are expressed in relation to the directory in which the application is deployed. + +!!! Tip + The latest release can be downloaded [here](https://github.com/asit-asso/extract/releases/latest){target="_blank"}. + +1. Create a PostgreSQL database to store the application data. Encoding must be UTF-8. +2. The database must be accessible by a user with read, write and object creation/deletion privileges. The database content is automatically generated when the application is first started. +3. Deploy the `[ZIP]\application\extract##x.x.x.war` file to Tomcat server. +It is normal for any automatic start-up of the application after deployment to fail. +4. In the folder of the deployed application, go to the `WEB-INF\classes` directory and modify the ``application.properties`` file. +5. Modify the database connection properties: + * **spring.datasource.url** : JDBC database connection string + * **spring.datasource.username** : Database user name + * **spring.datasource.password** : Database user password + +6. (Optional) By default, Extract is in french but you can set it in german or in multilingual mode (french and german). In multilingual mode, users can choose their language preference. To do that, you must change the `extract.i18n.language` property in the `application.properties` file. The value must be the ISO code of the desired language (e.g. `de`). To set it up in multilingual mode, all languages ISO codes must be given separated by comma. The first one acts as the default language (e.g. for fallback, creation of first user, etc.). Here are a couple of examples : + + * `extract.i18n.language=fr` Extract is in french + * `extract.i18n.language=de` Extract is in german + * `extract.i18n.language=fr,de` Extract is in french and german, french being the default language + + To add a new language, please refer to [Add a new language section](../customize/#add-a-new-language). + +7. Enter the application URL using the `application.external.url` property. The value of this property is used to generate links in notification e-mails. +8. Enter a random 32-character ISO-8859-1 string for `database.encryption.secret` and `database.encryption.salt` properties. These strings are used as keys to encrypt certain database values. +9. Save the file. +10. In the same folder, log properties can be defined in the `logback-spring.xml` file. +11. Start the application. +12. Connect to the database. +13. Execute the `[ZIP]\sql\update_db.sql` script to apply any database schema modifications that are not automatically performed by the application. +14. Connect to the application. When logging in for the first time, the user must create an admin account: + + ![Create admin user](../assets/install/create_admin.png) + + If the application is in multilingual mode, the language preference of the admin user will be the language of the browser, if it exists in the Extract languages. If not, it will be the default language of Extract. The language can be changed later on (see [Current user administration](../features/user-guide.md#current-user-administration)). + +15. Fill in the fields and click `Créer le compte`. +16. Go to `Paramètres`. +17. Enter the storage path. This is the directory in which data relating to request processing will be written. The Tomcat user must therefore have read and write rights to this directory. +18. Enter SMTP server parameters for sending notifications. +19. Click on ``Enregistrer``. +20. Congratulation ! The application is now operational. + +## Application Update + +**If you already have a previous version of the application installed, please follow the procedure below.** + +1. Back-up the following files so you don't have to redefine your configuration: + * ``WEB-INF\classes\application.properties`` + * ``WEB-INF\classes\logback-spring.xml`` + * ``WEB-INF\classes\static\js\requestMap\map.custom.js`` (if you've set it) + * ``WEB-INF\classes\templates\email\html\*`` (if you've customized the e-mail templates) + * ``WEB-INF\classes\static\lang\*\message.properties`` + +2. Undeploy the existing Extract application. +3. For Tomcat on Windows: go to the directory where Tomcat is installed and delete all directories named ``[digit]-extract`` (e.g. ``0-extract``, ``1-extract``,...) under the ``temp`` sub-directory. +4. Deploy the new WAR. +5. Compare the ``WEB-INF\classes\application.properties`` file with the one you backed-up, and transfer any new or modified properties to the latter. +> !!! Info "Updating to version 2.1" + Enter a random 32-character ISO-8859-1 string for `database.encryption.secret` and `database.encryption.salt` properties. These strings are used as keys to encrypt certain database values. + +6. Overwrite the folowing file with your backup : + * ``WEB-INF\classes\logback-spring.xml`` + +7. Copy your backup ``map.custom.js`` file (if set) to the ``WEB-INF\classes\static\js\requestMap`` directory. +> !!! Info "Updating to version 2.1" + The OpenLayers library used to display the map underwent a major update (to 9.1.0) when Extract was upgraded to version 2.1. The code used to register a projection system has changed (see the new version of the ``map.custom.js`` example file). Depending on the functionality used, further modifications may be necessary. Please refer to the official documentation if necessary: [https://openlayers.org/en/v9.1.0/apidoc/](https://openlayers.org/en/v9.1.0/apidoc/){target="_blank"} + +8. Compare the e-mail templates in ``WEB-INF\classes\templates\email\html\`` with those you've backed up, to merge any changes you've made with the new version. +9. Compare the language files in ``WEB-INF\classes\static\lang\*\`` with those you've backed up, to merge any changes you've made with the new version. +10. Restart the Tomcat Extract application to take account of the updated configuration files. +11. Connect to the database. +12. Run the ``[ZIP]\sql\update_db.sql`` script to apply any changes to the structure that are not made automatically by the application. + +## Server Migration + +!!! Info + The procedure below details how to proceed if the application needs to be migrated to other servers. + +### Actions on the old server + +1. Make a backup of the directory containing current processing data. The path of this directory is defined in the application parameters (``Paramètres`` > ``Emplacement des fichiers temporaires``). +2. Make a backup of the configuration and customization files as per **step 1** [in the Application Update section](#application-update). +3. If the database is also being migrated, make a dump of the Extract database. + +### Actions on the new server + +1. If the database has also been migrated, restore the Extract database dump. +2. Restore backup of current processing data. +3. Follow the [update procedure](#application-update) from **step 4** +4. If the path to the folder containing the current processing data has changed, don't forget to modify it in the application (``Paramètres`` > ``Emplacement des fichiers temporaires``). +
+
+
+
+
\ No newline at end of file diff --git a/docs/getting-started/network-integration.md b/docs/getting-started/network-integration.md new file mode 100644 index 00000000..bed5d4b1 --- /dev/null +++ b/docs/getting-started/network-integration.md @@ -0,0 +1,20 @@ +--- +title: Integrate Extract in your network +--- + +## Web interface - Internal vs exposed + +Extract _can_ be exposed to the Internet. +But if all Operators and Administrators are on your internal network, Extract can resides on your LAN and does not need to be exposed on the Internet. + +## Connectors (Get order and send results) + +To get orders and return responses with files, extract uses "Connectors". +The default connector (easySDI v4) is used to interact with [viageo.ch API](https://viageo.ch/api/legacyOrder/doc){target="_blank"}. +The connector uses `HTTP GET` to get orders, and `HTTP POST` to sent responses with files and remarks. It always acts as an HTTP client, and does not need to be exposed to internet. +Authentication is done by BASIC AUTH. All traffic to viageo is TLS encrypted. + +## Use network shares + +If you store files on network drives (Scripts, archive files, etc..) you must run Tomcat service with a user that has access rights on the desired share. +See [Tomcat user access rights](./configure.md#tomcat-user-access-rights) \ No newline at end of file diff --git a/docs/how-to/extract-viageo.md b/docs/how-to/extract-viageo.md new file mode 100644 index 00000000..f53c2257 --- /dev/null +++ b/docs/how-to/extract-viageo.md @@ -0,0 +1,48 @@ +# Connect Extract to viageo.ch +!!! Warning + This section is only useful if your organization provides geodata on the platform [viageo.ch](https://viageo.ch){target="_blank"} or [plans-reseaux.ch](https://plans-reseaux.ch){target="_blank"}. You must be an [ASIT](https://asit-asso.ch){target="_blank"} member to distribute geodata through these platforms. + +!!! Info + If you want to test the connection to a non productive instance of viageo.ch, you can use the validation instance. See [viageo.ch Validation Instance](../misc/viageo-test.md) + +This section explains how to connect Extract to the data portal viageo.ch, to read requests from the platform and send back extracted geodata to it. + +## Configure your resources on viageo.ch + +1. Log in to your viageo.ch account. +2. Go to your ressources (`Diffuser` tab > `Mes ressources`) +3. For each geodata you want to distribute via Extract, go to edit mode and set the `Traitement` parameter to `Automatique` under the `Commande en ligne` section. + + ![metadata](../assets/how-to/viageo/metadata.png){width="600"} + +## Configure a connector on Extract + +1. Log in to your Extract instance with an administrator role +2. Create a new EasySDI v4 connector (see [Admin guide > Connectors](../features/admin-guide.md#connectors)) +3. The `URL du service`, `Login distant` and `Mot de passe` parameters can be found on viageo.ch under [`Diffuser` > `Automatiser le processus`](https://viageo.ch/diffuser/extract/){target="_blank"} + + !!! Warning + These parameters are only accessible for the main account of your viageo.ch organization. + +## Configure a process and link it to viageo resource + +1. Usually, you want to create one process by resource from viageo.ch. The process will be in charge of extracting the corresponding geodata. To create a new process see [Processes](../features/admin-guide.md#processes). +2. Once the process is created, create a new connector's rule (see [Connector rules](../features/admin-guide.md#connector-rules)) to connect the process to the corresponding resource from viageo.ch. To do that, the rule syntax is the following: + + ``` + productguid == "viageo resource id" + ``` + + !!! Info + Viageo resources id can be found on viageo.ch under [`Diffuser` > `Automatiser le processus`](https://viageo.ch/diffuser/extract/){target="_blank"} (only accessible for the main account of your organization) in the resources list. +
+ + ![resources id](../assets/how-to/viageo/resources-id.png){width="1000"} + + + +
+
+
+
+
\ No newline at end of file diff --git a/docs/how-to/fme-flow.md b/docs/how-to/fme-flow.md new file mode 100644 index 00000000..ed15ce3d --- /dev/null +++ b/docs/how-to/fme-flow.md @@ -0,0 +1,81 @@ +# Set up an extraction using FME Flow (Server) + +!!! info + + This tutorial uses the extraction plugin `Extraction FME Flow (Version 2)` which allows unlimited parameters length, thus allowing request with precise geometries like municipalities boundary. + +To follow along with this tutorial, download the sample script and data available [here](https://github.com/asit-asso/extraction-samples). Either by cloning the repo : + +``` +git clone https://github.com/asit-asso/extraction-samples.git +``` + +Or downloading a ZIP archive with the following link : https://github.com/asit-asso/extraction-samples/archive/refs/heads/main.zip + +## Set up the FME workspace inside FME Flow + +1. In the downloaded folder, go to the subfolder `fme` and open up the `sample-script-FME2024.fmw` FME workspace. This tutorial uses the same workspace as the previous tutorial for FME Form. + +2. First, we need to uncheck the `required` option of the `parametersFile` User Parameter in order to let FME Flow override the `parameters.json`. + + ![user-parameter](../assets/how-to/fme-flow/user-parameter.png) + +2. Then, connect to your FME Flow instance. + + ![connection](../assets/how-to/fme-flow/connection.png) + +3. Next, publish the workspace to FME Flow + + ![publish](../assets/how-to/fme-flow/publish.png) + + Choose a valid FME Flow connection and create a new repository where to publish the workspace, e.g. `extract` or use an existing one. + + ![publish-repo](../assets/how-to/fme-flow/publish-repo.png) + +4. Make sure the `Upload data files` option is checked ans select only the `swissTLMRegio_Buildings_2056.zip` file. Indeed, the latter is our business data to be extracted and must be uploaded to FME Flow. But the `parameters.json` file can be omitted since the parameters will be overridden by the `FME Data Download` body request (see below). + + ![publish-files](../assets/how-to/fme-flow/publish-files.png) + +5. The `Extraction FME Flow (Version 2)` plugin sends all request parameters encoded as a `GeoJSON` through the body of the `FME Data Download` POST request. To tell FME to use the request's body as GeoJSON input for the `FeatureReader`, we need to do the following : + + * In the last dialog, select the `Data Download` Service. + * Click on the edit button and under `Send HTTP Message Body to Reader`, select the GeoJSON Reader (in our case, this is the `FeatureReader` "Read_Parameters"). + * Under `Writers in Download`, select all available writers. Therefore, data outputted in all formats will be available in the Data Download Service response. + + ![data-download](../assets/how-to/fme-flow/data-download.png) + +7. Once we have uploaded the workspace and the necessary data to FME Flow, we need to create an API token to authenticate the request that will be posted by Extract. + + In FME Flow, under Workspaces, go to your extract repository. Select the corresponding workspace (`sample-script-FME2024.fmw` in our example). Under the `Advanced` tab, click on `Create a Webhook`. + + ![create-webhook](../assets/how-to/fme-flow/create-webhook.png) + + In the next dialog, make sure the `User Can Upload` option is checked and click on `Create`. + + In the next dialog, click on `Download Webhook`. This will download a text file with the URL to call the request and the API token to authenticate it. This is the **only** chance you get to retrieve these information ! + + ![download-webhook](../assets/how-to/fme-flow/download-webhook.png) + +8. In the downloaded text file, write down the URL to call the `FME Data Download Service` without the query parameters and the API token. + + ![token](../assets/how-to/fme-flow/token.png) + +## Connect the workspace to Extract + +1. In Extract, create a new or use an existing process (see [processes](../features/admin-guide.md#processes)). + +2. Drag and drop the task plugin `Extraction FME Flow (Version 2)` into the process task list. + +3. In the field `URL du service FME Server Data Download`, type the URL of your service. In the field `Token API FME`, type the API token. + + ![plugin](../assets/how-to/fme-flow/plugin.png) + +4. To test the extraction, you can link this process to viageo (see [Connect Extract to viageo.ch](../how-to/extract-viageo.md)). With our example, the linked resource can be orderable over the entire Switzerland in the following format : `shapefile`, `geopackage`, `GeoJSON` and `DXF`. + +5. Finally, you can order the linked product on viageo, Extract will retrieve the request and run the FME workspace by calling the FME Flow Data Download Service with all request parameters carried out in API request's body. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/how-to/fme-form.md b/docs/how-to/fme-form.md new file mode 100644 index 00000000..bf3b60ec --- /dev/null +++ b/docs/how-to/fme-form.md @@ -0,0 +1,67 @@ +# Set up an extraction using FME Form (desktop) + +!!! info + + This tutorial uses the extraction plugin `FME Form (Version 2)` which allows unlimited parameters length, thus allowing request with precise geometries like municipalities boundary. + +To follow along with this tutorial, download the sample script and data available [here](https://github.com/asit-asso/extraction-samples). Either by cloning the repo : + +``` +git clone https://github.com/asit-asso/extraction-samples.git +``` + +Or downloading a ZIP archive with the following link : https://github.com/asit-asso/extraction-samples/archive/refs/heads/main.zip + +## Set up the FME workspace + +1. In the downloaded folder, go to the subfolder `fme` and open up the `sample-script-FME2024.fmw` FME workspace. + +2. The workspace uses a `creator` as a starting point. + +3. The `FME Form (Version 2)` plugin sends all request parameters in a `GeoJSON` file. The latter is given as a CLI argument when running the workspace where the key is `--parametersFile` and the value is the path of the `GeoJSON` file. + + To tell FME to use this argument, we need to create a User Parameter : + + Under `User Parameters`, create a new `File/Folder/URL` parameter. The identifier must be `parametersFile`. For convenience, you can allocate a default value `$(FME_MF_DIR)\parameters.json` to the parameter. Thus, when run inside FME, the default value is taken and a local GeoJSON file is used. But when run by Extract, the `parametersFile` will be overridden by the CLI argument and the GeoJSON from Extract will be used. + + ![user-parameter](../assets/how-to/fme-form/user-parameter.png){width="800"} + +4. Then we need to read the `GeoJSON` file with a `FeatureReader`. The format is `GeoJSON` and the dataset is the `$(parametersFile)` parameter. The feature of the `GeoJSON` file is the request perimeter and can be use to extract or clip data. All other attributes are the request parameters. + + ![user-parameter](../assets/how-to/fme-form/read-parameters.png){width="400"} + +5. Before reading your actual data, you can set global variables from the GeoJSON attributes like the FolderOut (to know where to save the output data), the desired Format and CRS using a `VariableSetter` transformer. Thus, it will be possible to use them later after reading your business data. + + ![set-variable](../assets/how-to/fme-form/set-variable.png){width="300"} + +6. Then you can read your business data with a `FeatureReader` transformer. In our example, we use the feature from the GeoJSON parameters file (request's perimeter) to read only the data that intersect with it. + + ![read-data](../assets/how-to/fme-form/read-data.png){width="400"} + +7. Once your data are read, you can retrieve the previously set variables to know for example in which format and CRS the client wants the data. + + ![retrieve-variable](../assets/how-to/fme-form/retrieve-variable.png){width="300"} + +8. Reproject the data accordingly and save them in the given format. Remember, the data must be saved in the `FolderOut` parameter given in the `GeoJSON` file. + + ![save-data](../assets/how-to/fme-form/save-data.png){width="800"} + +## Connect the workspace to Extract + +1. In Extract, create a new or use an existing process (see [processes](../features/admin-guide.md#processes)). + +2. Drag and drop the task plugin `Extraction FME Form (Version 2)` into the process task list. + +3. In the field `Chemin du workspace FME`, type the path where your FME workspace is. In the field `Chemin du programme FME (fme.exe)`, type the path where your FME executable is. In the field `Nombre de fme.exe lancés par ce workspace (défaut = 1, max = 8)`, type the number of FME executable needed in parallel for your workspace (for our example, only one is needed). + + ![plugin](../assets/how-to/fme-form/plugin.png) + +4. To test the extraction, you can link this process to viageo (see [Connect Extract to viageo.ch](../how-to/extract-viageo.md)). With our example, the linked resource can be orderable over the entire Switzerland in the following format : `shapefile`, `geopackage`, `GeoJSON` and `DXF`. + +5. Finally, you can order the linked product on viageo, Extract will retrieve the request and run the FME workspace with all request parameters carried out in the GeoJSON parameters file. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/how-to/image.png b/docs/how-to/image.png new file mode 100644 index 00000000..bf2bb354 Binary files /dev/null and b/docs/how-to/image.png differ diff --git a/docs/how-to/python.md b/docs/how-to/python.md new file mode 100644 index 00000000..9eea2069 --- /dev/null +++ b/docs/how-to/python.md @@ -0,0 +1,257 @@ +# Set up an extraction using Python + +!!! info + + This tutorial uses the extraction plugin `Extraction Python` that allows you to run a custom Python script. A functional Python interpreter for the script (version, dependencies, etc.) must be provided. + +To follow along with this tutorial, you can download the sample script and data available [here](https://github.com/asit-asso/extraction-samples). Either by cloning the repo : + +``` +git clone https://github.com/asit-asso/extraction-samples.git +``` + +Or downloading a ZIP archive with the following link : https://github.com/asit-asso/extraction-samples/archive/refs/heads/main.zip + +In the subfolder `python`, you will find a Python script example `main.py`, data to extract `inputData.gpkg` and a parameters GeoJSON file example `parameters.json`. + +!!! warning + + In addition to Python 3, this example uses a third party OGR2OGR executable. The latter must be installed and available on your system if you want to make the example work. + +## Set up the Python script : + +1. First we need to import necessary Python packages to run the script. All used packages are provided by the standard python distribution, so ne need to install them. + + ```python + import subprocess + import json + import os + import sys + ``` + +2. Then we setup 2 global variables, the path where the business data to extract are saved and the path of the OGR2OGR executable tu use. All paths can be absolute or relative to the Python script that will be run. + + ```python + OGR2OGR_PATH = "C:\\OSGeo4W\\bin\\ogr2ogr.exe" + INPUT_DATA = "inputData.gpkg" + ``` + +3. The Python extraction plugin sends all request parameters in a `GeoJSON` file. The path of the latter is given as the first CLI argument when running the python script. + + Therefore, the Python script must retrieve this parameters file by reading the CLI argument and opening and parsing the `GeoJSON` file. + + ```python + # Get parameters file path from command line argument + parameters_file = sys.argv[1] + + # Read and parse the parameters.json file + with open(parameters_file, "r", encoding="utf-8") as json_file: + parameters = json.load(json_file) + ``` + + Once done, all request parameters are available in the `parameters` variable as a python dictionary. + +4. Since our business data are in ``EPSG:2056``, we need to reproject the parameters file from ``EPSG:4326`` to ``EPSG:2056`` in order to use it to clip our data. We will save it as a temporary `clipper.geojson` file in FolderOut. + + ```python + cmd = [ + OGR2OGR_PATH, + "-s_srs", + "EPSG:4326", + "-t_srs", + "EPSG:2056", + os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ), + parameters_file + ] + + # Run the ogr2ogr command while catching and raising errors + try: + subprocess.check_output(cmd) + except subprocess.CalledProcessError as e: + print("Processing parameters file failed") + raise subprocess.CalledProcessError(1, cmd) from e + ``` + +5. Before running the last OGR2OGR command, we need to translate the format and projection from the parameters file into the GDAL vocabulary. + + ```python + # Translate format to OGR format name + if parameters["properties"]["Parameters"]["FORMAT"] == "GEOJSON": + format = "GeoJSON" + elif parameters["properties"]["Parameters"]["FORMAT"] == "GPKG": + format = "GPKG" + elif parameters["properties"]["Parameters"]["FORMAT"] == "DXF": + format = "DXF" + + # Translate Projection to OGR CRS name + if parameters["properties"]["Parameters"]["PROJECTION"] == "SWITZERLAND95": + output_proj = "EPSG:2056" + elif parameters["properties"]["Parameters"]["PROJECTION"] == "SWITZERLAND": + output_proj = "EPSG:21781" + else: + output_proj = "EPSG:2056" + ``` + +6. We can now run the final OGR2GR command to extract the data. The command will clip, reproject and save the data into FolderOut. + + ```python + cmd = [ + OGR2OGR_PATH, + "-s_srs", + "EPSG:2056", + "-t_srs", + output_proj, + "-clipsrc", + os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ), + "-f", + format, + os.path.join( + parameters["properties"]["FolderOut"], + f"result.{parameters["properties"]["Parameters"]["FORMAT"].lower()}" + ), + INPUT_DATA + ] + + # Run the ogr2ogr command while catching and raising errors + try: + subprocess.check_output(cmd) + except subprocess.CalledProcessError as e: + print("Processing data failed") + raise subprocess.CalledProcessError(1, cmd) from e + ``` + +7. Finally, we delete the temporary `clipper.geojson` to avoid sending it to the client. + + ```python + os.remove(os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ) + ) + ``` + +Here is the final Python script : + +```python +import subprocess +import json +import os +import sys + +OGR2OGR_PATH = "C:\\OSGeo4W\\bin\\ogr2ogr.exe" +INPUT_DATA = "inputData.gpkg" + +if __name__ == "__main__": + + # Get parameters file path from command line argument + parameters_file = sys.argv[1] + + # Read and parse the parameters.json file + with open(parameters_file, "r", encoding="utf-8") as json_file: + parameters = json.load(json_file) + + # Reproject the parameters.json to business data CRS (EPSG:2056) + # and save it as a temporary clipper.geojson file in FolderOut + cmd = [ + OGR2OGR_PATH, + "-s_srs", + "EPSG:4326", + "-t_srs", + "EPSG:2056", + os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ), + parameters_file + ] + + # Run the ogr2ogr command while catching and raising errors + try: + subprocess.check_output(cmd) + except subprocess.CalledProcessError as e: + print("Processing parameters file failed") + raise subprocess.CalledProcessError(1, cmd) from e + + # Translate format to OGR format name + if parameters["properties"]["Parameters"]["FORMAT"] == "GEOJSON": + format = "GeoJSON" + elif parameters["properties"]["Parameters"]["FORMAT"] == "GPKG": + format = "GPKG" + elif parameters["properties"]["Parameters"]["FORMAT"] == "DXF": + format = "DXF" + + # Translate Projection to OGR CRS name + if parameters["properties"]["Parameters"]["PROJECTION"] == "SWITZERLAND95": + output_proj = "EPSG:2056" + elif parameters["properties"]["Parameters"]["PROJECTION"] == "SWITZERLAND": + output_proj = "EPSG:21781" + else: + output_proj = "EPSG:2056" + + # Clip, reproject and save the data in FolderOut + cmd = [ + OGR2OGR_PATH, + "-s_srs", + "EPSG:2056", + "-t_srs", + output_proj, + "-clipsrc", + os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ), + "-f", + format, + os.path.join( + parameters["properties"]["FolderOut"], + f"result.{parameters["properties"]["Parameters"]["FORMAT"].lower()}" + ), + INPUT_DATA + ] + + # Run the ogr2ogr command while catching and raising errors + try: + subprocess.check_output(cmd) + except subprocess.CalledProcessError as e: + print("Processing data failed") + raise subprocess.CalledProcessError(1, cmd) from e + + # Delete temporary clipper.geojson file + os.remove(os.path.join( + parameters["properties"]["FolderOut"], + "clipper.geojson" + ) + ) +``` + +To test the script and run it locally, you can use the given example `parameters.json` and run the following command : + +```powershell +python main.py parameters.json +``` + +## Connect the workspace to Extract + +1. In Extract, create a new or use an existing process (see [processes](../features/admin-guide.md#processes)). + +2. Drag and drop the task plugin `Extraction Python` into the process task list. + +3. In the field `Chemin de l'interpréteur Python`, type the path of the Python interpreter you want to use. If you use a virtual environment, you can give the path of the interpreter of the virtual environment, usually something like `.../.venv/Scripts/python.exe`. In the field `Chemin du script python`, type the path of the Python script to run. + + ![plugin](../assets/how-to/python/plugin.png) + +4. To test the extraction, you can link this process to viageo (see [Connect Extract to viageo.ch](../how-to/extract-viageo.md)). With our example, the linked resource can be orderable over the entire Switzerland in the following format : `geopackage`, `GeoJSON` and `DXF`. + +5. Finally, you can order the linked product on viageo, Extract will retrieve the request and run the FME workspace with all request parameters carried out in the GeoJSON parameters file. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/how-to/qgis-server-atlas.md b/docs/how-to/qgis-server-atlas.md new file mode 100644 index 00000000..a8b8f743 --- /dev/null +++ b/docs/how-to/qgis-server-atlas.md @@ -0,0 +1,83 @@ +# Set up an extraction using QGIS Server (atlas print layout) + +!!! info + + This tutorial uses the extraction plugin `Impression Atlas QGIS Server` which allows you to print out a QGIS project (e.g. in `PDF`) using a standard layout size (e.g. `A4`). Since the plugin uses a `GetPrint` request, the QGIS project must be hosted on a QGIS Server instance. + + The QGIS Atlas tool allows you to use a fixed scale and a fixed layout size by using an additional vector layer containing a grid that will be used to divide your project in multiple files. Hence, when a client request your data, the plugin will intersect the desired perimeter with your grid and print out as many files as needed to cover the request's perimeter. Each file with the fixed layout size and scale. + +To follow along with this tutorial, you can download the sample script and data available [here](https://github.com/asit-asso/extraction-samples). Either by cloning the repo : + +``` +git clone https://github.com/asit-asso/extraction-samples.git +``` + +Or downloading a ZIP archive with the following link : https://github.com/asit-asso/extraction-samples/archive/refs/heads/main.zip + +!!! warning + + This tutorial uses `Docker` and `Docker compose` to quickly get a running QGIS Server. Of course, you can use your own server. + +## Set up the QGIS Project : + +1. In the downloaded folder, navigate to `qgis-atlas-server` > `qgis-server` > `data` and open up the QGIS project `sample.qgz`. This project will be hosted on a QGIS Server and be used by the plugin. + + The project contains only 2 layers, the building footprints from swissTLMRegio and the `Division` layer. The latter is our *grid* layer used by the Atlas tool to divide our project in multiple outputted files. + +2. First, we need to create a print layout. In this example, there is already a print layout available (the `atlasLayout` that you can open by going to ``Project`` > ``Layouts`` > ``atlasLayout``). But you can create your own by going to `Project` > `New Print Layout`, give it a name and click `Ok`. + +3. In the print compose, you can add a map, legend, scale bar, some labels etc. You can also set up the map scale and layout size and resolution for export. In our `atlasLayout` example, we set the map scale to 1:100'000 and the export resolution to 300 DPI and the layout size to A4 in landscape orientation. We've added a title and other labels and a scale bar. + +4. Once you have your layout ready, we need to generate an Atlas. Still in the print compose, go to the Altas tab and check `Generate an Atlas`. + + Select the `Division` layer as the `Coverage layer`, it will set up this layer as the *grid* layer to export fixed *page* of our project. + + Make sur to check the option `Hidden coverage layer`, it will hide the ``division`` layer in the outputted result. + + You can set up a name for each generated page under `Page name`. In our example, we set it up to `to_string( "left") + ' - ' + to_string( "bottom" )`. It uses attributes from the `Division` layer to give the bottom left corner coordinates as page name. + + Finally, we sort the `Division` layer by its `id` attribute. + + ![atlas](../assets/how-to/qgis/atlas.png) + +5. Once the Atlas configuration is set, we need to configure the map to use it. In the print compose, select your map item and go to the `Item Properties` tab and check the option `Controlled by Atlas`. + + ![control](../assets/how-to/qgis/control.png) + +6. Now, we can test it out inside the print composer. Go to the `Atlas menu` and click on `Preview Atlas`. You can now click on the left and right arrow to preview the different pages generated by the Atlas. + + ![preview](../assets/how-to/qgis/preview.png) + +7. Go back to your QGIS project and make sure that the properties allow to generate more then 1 feature for Atlas print request. Go to `Project` > `Properties` > `QGIS Server` > `WMS` tab and under `Map and Legend Options`, adapt the number next to `Maximum features for Atlas print request` (e.g. 10). + + ![max-features](../assets/how-to/qgis/max-features.png) + +7. Now that our print layout using the Altas tool is ready, we can publish our QGIS project in QGIS Server. If you have docker installed and running, you can simply do a `Docker compose up` being at the root of the `qgis-atlas-server` folder. + + When all containers are running, we can test an export by calling the following URL in a browser : + + ``` + http://127.0.0.1:8888/qgis-server?SERVICE=WMS&REQUEST=GetPrint&CRS=EPSG:2056&TEMPLATE=atlasLayout&FORMAT=pdf&ATLAS_PK=122,123,124,125 + ``` + + This will export 4 PDF pages corresponding to the id `122`, `123`, `124` and `125` of the `Division` layer. + +## Connect the workspace to Extract + +1. In Extract, create a new or use an existing process (see [processes](../features/admin-guide.md#processes)). + +2. Drag and drop the task plugin `Impression Atlas QGIS Server` into the process task list. + +3. In the field `Url de base QGIS Server`, type the URL of your QGIS Server instance. In our example using Docker, you can type `http://127.0.0.1:8888/qgis-server`. In the field `Template à utiliser pour le layout`, you need to give the print layout name using the Atlas tool. In our example, this is the `altasLayout`. + + ![plugin](../assets/how-to/qgis/plugin.png) + +4. To test the extraction, you can link this process to viageo (see [Connect Extract to viageo.ch](../how-to/extract-viageo.md)). With our example, the linked resource can be orderable over the entire Switzerland. The format does not really matter and won't be read by Extract or QGIS. But the plugin will always export files in PDF. So, for clarity for the clients, the format must be set to PDF in viageo.ch. + +5. Finally, you can order the linked product on viageo, Extract will retrieve the request and call the GetPrint request of your QGIS Server. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..916d8179 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,28 @@ +--- +title: Introduction +--- + +## What is Extract ? + +

Extract is a web application that facilitates the extraction and delivery of your geodata !

+ +The Extract application **imports data requests** from a platform or data portal (such as the ASIT portals viageo.ch and plans-reseaux.ch), then executes a series of pre-configured tasks to **extract the requested data**, then returns the given result back to the client. + +And all of that with or without human intervention, you choose it ! + +## Key features + +* :material-reload: Automate the process of extracting, transforming and delivering your geodata +* :material-truck-fast: reduce order processing times +* :high_brightness: increase delivered data quality +* :smiling_face_with_3_hearts: increase customer satisfaction + +## Open Source project + +Extract is a modular and extensible web application, **to be installed in the IT environment of each data provider**, and can be accessed with a web browser. + +
+
+
+
+
\ No newline at end of file diff --git a/docs/misc/viageo-test.md b/docs/misc/viageo-test.md new file mode 100644 index 00000000..e9ab10e9 --- /dev/null +++ b/docs/misc/viageo-test.md @@ -0,0 +1,23 @@ +# viageo.ch Validation Instance +!!! Warning + This section is only useful if your organization provides geodata on the platform [viageo.ch](https://viageo.ch){target="_blank"} or [plans-reseaux.ch](https://plans-reseaux.ch){target="_blank"}. You must be an [ASIT](https://asit-asso.ch){target="_blank"} member to distribute geodata through these platforms. + +## Fundamentals + +The viageo.ch portal consists of two separate environments: + +* A production instance (PROD) : **[https://viageo.ch](https://viageo.ch){target="_blank"}**
This is available to all users 24/7 (except exceptional maintenance) +* A validation instance (VAL) : **[https://val.viageo.ch](https://val.viageo.ch){target="_blank"}**
+ Its operation is not guaranteed at all times. It is only available to suppliers (ASIT members) for testing purposes. The sending of notification emails is disabled. + +## Request for access to the validation instance + +Access to the validation instance is restricted by IP address. + +Please send any access requests to support@viageo.ch, specifying the public IP address (single or range) of the hosts that will need to access it (your workstation, the server where EXTRACT will be installed, etc.). + +## Data replication between instances + +Occasionally, data from the **production** instance is replicated to the **validation** instance. All your resources will then be updated and replaced with production data. + +![Misc](../assets/misc/data-replication.png){width="300"} \ No newline at end of file diff --git a/doc/mitigations/WP1. Add a timeout (5s) to the getOrder method of the easySDI connector.md b/docs/mitigations/WP1. Add a timeout (5s) to the getOrder method of the easySDI connector.md similarity index 100% rename from doc/mitigations/WP1. Add a timeout (5s) to the getOrder method of the easySDI connector.md rename to docs/mitigations/WP1. Add a timeout (5s) to the getOrder method of the easySDI connector.md diff --git a/doc/mitigations/WP2. Customize the User-Agent of the easySDIv4 connector.md b/docs/mitigations/WP2. Customize the User-Agent of the easySDIv4 connector.md similarity index 100% rename from doc/mitigations/WP2. Customize the User-Agent of the easySDIv4 connector.md rename to docs/mitigations/WP2. Customize the User-Agent of the easySDIv4 connector.md diff --git a/doc/mitigations/WP3. Mitigations/M1. Extraction of information to a vault or external configuration.md b/docs/mitigations/WP3. Mitigations/M1. Extraction of information to a vault or external configuration.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/M1. Extraction of information to a vault or external configuration.md rename to docs/mitigations/WP3. Mitigations/M1. Extraction of information to a vault or external configuration.md diff --git a/doc/mitigations/WP3. Mitigations/M2. Fix command line for FME launch.md b/docs/mitigations/WP3. Mitigations/M2. Fix command line for FME launch.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/M2. Fix command line for FME launch.md rename to docs/mitigations/WP3. Mitigations/M2. Fix command line for FME launch.md diff --git a/doc/mitigations/WP3. Mitigations/M3. XML parser configuration corrected to mitigate XXE risk.md b/docs/mitigations/WP3. Mitigations/M3. XML parser configuration corrected to mitigate XXE risk.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/M3. XML parser configuration corrected to mitigate XXE risk.md rename to docs/mitigations/WP3. Mitigations/M3. XML parser configuration corrected to mitigate XXE risk.md diff --git a/doc/mitigations/WP3. Mitigations/M4. XML parser configuration corrected to prevent Xpath injections.md b/docs/mitigations/WP3. Mitigations/M4. XML parser configuration corrected to prevent Xpath injections.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/M4. XML parser configuration corrected to prevent Xpath injections.md rename to docs/mitigations/WP3. Mitigations/M4. XML parser configuration corrected to prevent Xpath injections.md diff --git a/doc/mitigations/WP3. Mitigations/M5. Download configuration corrected to mitigate risk of SSRF attack.md b/docs/mitigations/WP3. Mitigations/M5. Download configuration corrected to mitigate risk of SSRF attack.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/M5. Download configuration corrected to mitigate risk of SSRF attack.md rename to docs/mitigations/WP3. Mitigations/M5. Download configuration corrected to mitigate risk of SSRF attack.md diff --git a/doc/mitigations/WP3. Mitigations/Workpackage.md b/docs/mitigations/WP3. Mitigations/Workpackage.md similarity index 100% rename from doc/mitigations/WP3. Mitigations/Workpackage.md rename to docs/mitigations/WP3. Mitigations/Workpackage.md diff --git a/doc/mitigations/WP5. Implementing the todos/Classes/PasswordValidator.md b/docs/mitigations/WP5. Implementing the todos/Classes/PasswordValidator.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/Classes/PasswordValidator.md rename to docs/mitigations/WP5. Implementing the todos/Classes/PasswordValidator.md diff --git a/doc/mitigations/WP5. Implementing the todos/T1. Remove user operator.md b/docs/mitigations/WP5. Implementing the todos/T1. Remove user operator.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T1. Remove user operator.md rename to docs/mitigations/WP5. Implementing the todos/T1. Remove user operator.md diff --git a/doc/mitigations/WP5. Implementing the todos/T2. Validation of LDAP parameters.md b/docs/mitigations/WP5. Implementing the todos/T2. Validation of LDAP parameters.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T2. Validation of LDAP parameters.md rename to docs/mitigations/WP5. Implementing the todos/T2. Validation of LDAP parameters.md diff --git a/doc/mitigations/WP5. Implementing the todos/T3. Modify LDAP user synchronization.md b/docs/mitigations/WP5. Implementing the todos/T3. Modify LDAP user synchronization.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T3. Modify LDAP user synchronization.md rename to docs/mitigations/WP5. Implementing the todos/T3. Modify LDAP user synchronization.md diff --git a/doc/mitigations/WP5. Implementing the todos/T4. Notify admins if synchronization is still launched.md b/docs/mitigations/WP5. Implementing the todos/T4. Notify admins if synchronization is still launched.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T4. Notify admins if synchronization is still launched.md rename to docs/mitigations/WP5. Implementing the todos/T4. Notify admins if synchronization is still launched.md diff --git a/doc/mitigations/WP5. Implementing the todos/T5. Centralize the password management.md b/docs/mitigations/WP5. Implementing the todos/T5. Centralize the password management.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T5. Centralize the password management.md rename to docs/mitigations/WP5. Implementing the todos/T5. Centralize the password management.md diff --git a/doc/mitigations/WP5. Implementing the todos/T6. Centralize the task history management.md b/docs/mitigations/WP5. Implementing the todos/T6. Centralize the task history management.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T6. Centralize the task history management.md rename to docs/mitigations/WP5. Implementing the todos/T6. Centralize the task history management.md diff --git a/doc/mitigations/WP5. Implementing the todos/T7. Improving error handling in the TwoFactorController.md b/docs/mitigations/WP5. Implementing the todos/T7. Improving error handling in the TwoFactorController.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/T7. Improving error handling in the TwoFactorController.md rename to docs/mitigations/WP5. Implementing the todos/T7. Improving error handling in the TwoFactorController.md diff --git a/doc/mitigations/WP5. Implementing the todos/Workpackage.md b/docs/mitigations/WP5. Implementing the todos/Workpackage.md similarity index 100% rename from doc/mitigations/WP5. Implementing the todos/Workpackage.md rename to docs/mitigations/WP5. Implementing the todos/Workpackage.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Annotations/FieldsValueMatch.md b/docs/mitigations/WP6. Administration page/Classes/Annotations/FieldsValueMatch.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Annotations/FieldsValueMatch.md rename to docs/mitigations/WP6. Administration page/Classes/Annotations/FieldsValueMatch.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Annotations/PasswordPolicy.md b/docs/mitigations/WP6. Administration page/Classes/Annotations/PasswordPolicy.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Annotations/PasswordPolicy.md rename to docs/mitigations/WP6. Administration page/Classes/Annotations/PasswordPolicy.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Annotations/ReservedWords.md b/docs/mitigations/WP6. Administration page/Classes/Annotations/ReservedWords.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Annotations/ReservedWords.md rename to docs/mitigations/WP6. Administration page/Classes/Annotations/ReservedWords.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Controllers/SetupController.md b/docs/mitigations/WP6. Administration page/Classes/Controllers/SetupController.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Controllers/SetupController.md rename to docs/mitigations/WP6. Administration page/Classes/Controllers/SetupController.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Filters/SetupRedirectFilter.md b/docs/mitigations/WP6. Administration page/Classes/Filters/SetupRedirectFilter.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Filters/SetupRedirectFilter.md rename to docs/mitigations/WP6. Administration page/Classes/Filters/SetupRedirectFilter.md diff --git a/doc/mitigations/WP6. Administration page/Classes/Services/AppInitializationService.md b/docs/mitigations/WP6. Administration page/Classes/Services/AppInitializationService.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Classes/Services/AppInitializationService.md rename to docs/mitigations/WP6. Administration page/Classes/Services/AppInitializationService.md diff --git a/doc/mitigations/WP6. Administration page/How it works.md b/docs/mitigations/WP6. Administration page/How it works.md similarity index 100% rename from doc/mitigations/WP6. Administration page/How it works.md rename to docs/mitigations/WP6. Administration page/How it works.md diff --git a/doc/mitigations/WP6. Administration page/Security.md b/docs/mitigations/WP6. Administration page/Security.md similarity index 100% rename from doc/mitigations/WP6. Administration page/Security.md rename to docs/mitigations/WP6. Administration page/Security.md diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 00000000..70ce1c22 --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,3 @@ +:root { + --md-primary-fg-color: #37a3e1; +} \ No newline at end of file diff --git a/fme/GDB/ASIT-VD.gdb/a00000001.TablesByName.atx b/fme/GDB/ASIT-VD.gdb/a00000001.TablesByName.atx deleted file mode 100644 index 90ecfca7..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000001.TablesByName.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000001.freelist b/fme/GDB/ASIT-VD.gdb/a00000001.freelist deleted file mode 100644 index e8d16899..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000001.freelist and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000001.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000001.gdbindexes deleted file mode 100644 index b02aa751..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000001.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000001.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000001.gdbtable deleted file mode 100644 index 0e443344..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000001.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000001.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000001.gdbtablx deleted file mode 100644 index 051352ee..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000001.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000002.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000002.gdbtable deleted file mode 100644 index a0af90ea..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000002.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000002.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000002.gdbtablx deleted file mode 100644 index 7c12c568..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000002.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000003.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000003.gdbindexes deleted file mode 100644 index 58df68d5..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000003.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000003.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000003.gdbtable deleted file mode 100644 index 41a116dd..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000003.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000003.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000003.gdbtablx deleted file mode 100644 index 2f80ed4f..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000003.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByPhysicalName.atx b/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByPhysicalName.atx deleted file mode 100644 index 433bd9bb..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByPhysicalName.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByType.atx b/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByType.atx deleted file mode 100644 index c2511740..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.CatItemsByType.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.FDO_UUID.atx b/fme/GDB/ASIT-VD.gdb/a00000004.FDO_UUID.atx deleted file mode 100644 index a3f787de..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.FDO_UUID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.freelist b/fme/GDB/ASIT-VD.gdb/a00000004.freelist deleted file mode 100644 index 8f282acf..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.freelist and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000004.gdbindexes deleted file mode 100644 index a4f334d7..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000004.gdbtable deleted file mode 100644 index 979fa9e1..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000004.gdbtablx deleted file mode 100644 index 2251b4bf..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000004.spx b/fme/GDB/ASIT-VD.gdb/a00000004.spx deleted file mode 100644 index fd6dc13d..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000004.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByName.atx b/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByName.atx deleted file mode 100644 index f9ec6abf..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByName.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByParentTypeID.atx b/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByParentTypeID.atx deleted file mode 100644 index 09451e85..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByParentTypeID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByUUID.atx b/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByUUID.atx deleted file mode 100644 index 4afa2fcd..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.CatItemTypesByUUID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000005.gdbindexes deleted file mode 100644 index bc887093..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000005.gdbtable deleted file mode 100644 index 63d1aeb1..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000005.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000005.gdbtablx deleted file mode 100644 index e4b70dc6..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000005.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByDestinationID.atx b/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByDestinationID.atx deleted file mode 100644 index 74bb187b..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByDestinationID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByOriginID.atx b/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByOriginID.atx deleted file mode 100644 index 0c1d5105..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByOriginID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByType.atx b/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByType.atx deleted file mode 100644 index 391b31b6..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.CatRelsByType.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.FDO_UUID.atx b/fme/GDB/ASIT-VD.gdb/a00000006.FDO_UUID.atx deleted file mode 100644 index dec9bfb1..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.FDO_UUID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.freelist b/fme/GDB/ASIT-VD.gdb/a00000006.freelist deleted file mode 100644 index e8d16899..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.freelist and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000006.gdbindexes deleted file mode 100644 index c608a88b..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000006.gdbtable deleted file mode 100644 index 084ca8c4..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000006.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000006.gdbtablx deleted file mode 100644 index e897ecd6..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000006.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByBackwardLabel.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByBackwardLabel.atx deleted file mode 100644 index 96bb2941..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByBackwardLabel.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByDestItemTypeID.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByDestItemTypeID.atx deleted file mode 100644 index ed9036c3..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByDestItemTypeID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByForwardLabel.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByForwardLabel.atx deleted file mode 100644 index 2f7867c6..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByForwardLabel.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByName.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByName.atx deleted file mode 100644 index 36f59f3d..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByName.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByOriginItemTypeID.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByOriginItemTypeID.atx deleted file mode 100644 index 8f009ef3..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByOriginItemTypeID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByUUID.atx b/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByUUID.atx deleted file mode 100644 index e3fc4413..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.CatRelTypesByUUID.atx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000007.gdbindexes deleted file mode 100644 index 2a98c93a..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000007.gdbtable deleted file mode 100644 index becb7ef1..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000007.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000007.gdbtablx deleted file mode 100644 index 5900a7f7..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000007.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000009.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000009.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000009.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000009.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000009.gdbtable deleted file mode 100644 index 42fd8142..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000009.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000009.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000009.gdbtablx deleted file mode 100644 index 1a74b2f9..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000009.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000009.spx b/fme/GDB/ASIT-VD.gdb/a00000009.spx deleted file mode 100644 index aa9852f2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000009.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000a.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtable deleted file mode 100644 index 8b84faf8..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtablx deleted file mode 100644 index 06e6e489..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000a.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000a.spx b/fme/GDB/ASIT-VD.gdb/a0000000a.spx deleted file mode 100644 index 6d236bde..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000a.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000b.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtable deleted file mode 100644 index 6e536cbb..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtablx deleted file mode 100644 index d3f695ce..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000b.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000b.spx b/fme/GDB/ASIT-VD.gdb/a0000000b.spx deleted file mode 100644 index 278b5b5c..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000b.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000c.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtable deleted file mode 100644 index fb0fc046..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtablx deleted file mode 100644 index f0d07ffa..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000c.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000c.spx b/fme/GDB/ASIT-VD.gdb/a0000000c.spx deleted file mode 100644 index 1b442105..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000c.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000d.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtable deleted file mode 100644 index 9f88ed38..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtablx deleted file mode 100644 index d315ac43..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000d.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000d.spx b/fme/GDB/ASIT-VD.gdb/a0000000d.spx deleted file mode 100644 index adc5c641..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000d.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000e.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtable deleted file mode 100644 index 92080b73..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtablx deleted file mode 100644 index be89b288..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000e.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000e.spx b/fme/GDB/ASIT-VD.gdb/a0000000e.spx deleted file mode 100644 index 5ca7ac59..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000e.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbindexes b/fme/GDB/ASIT-VD.gdb/a0000000f.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtable b/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtable deleted file mode 100644 index 43c0efe8..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtablx b/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtablx deleted file mode 100644 index e42f3296..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000f.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a0000000f.spx b/fme/GDB/ASIT-VD.gdb/a0000000f.spx deleted file mode 100644 index b8757238..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a0000000f.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000010.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000010.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000010.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000010.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000010.gdbtable deleted file mode 100644 index ee6750fa..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000010.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000010.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000010.gdbtablx deleted file mode 100644 index a47ca10c..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000010.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000010.spx b/fme/GDB/ASIT-VD.gdb/a00000010.spx deleted file mode 100644 index 171b66f5..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000010.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000011.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000011.gdbindexes deleted file mode 100644 index c9d0caa2..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000011.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000011.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000011.gdbtable deleted file mode 100644 index 2fdb0c0b..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000011.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000011.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000011.gdbtablx deleted file mode 100644 index 99dcee23..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000011.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000011.spx b/fme/GDB/ASIT-VD.gdb/a00000011.spx deleted file mode 100644 index b2071ef0..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000011.spx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000013.freelist b/fme/GDB/ASIT-VD.gdb/a00000013.freelist deleted file mode 100644 index e060cf28..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000013.freelist and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000013.gdbindexes b/fme/GDB/ASIT-VD.gdb/a00000013.gdbindexes deleted file mode 100644 index 0449ced6..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000013.gdbindexes and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000013.gdbtable b/fme/GDB/ASIT-VD.gdb/a00000013.gdbtable deleted file mode 100644 index 42fa0672..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000013.gdbtable and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/a00000013.gdbtablx b/fme/GDB/ASIT-VD.gdb/a00000013.gdbtablx deleted file mode 100644 index 18fd78cd..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/a00000013.gdbtablx and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/gdb b/fme/GDB/ASIT-VD.gdb/gdb deleted file mode 100644 index a786e127..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/gdb and /dev/null differ diff --git a/fme/GDB/ASIT-VD.gdb/timestamps b/fme/GDB/ASIT-VD.gdb/timestamps deleted file mode 100644 index 6243a8a4..00000000 Binary files a/fme/GDB/ASIT-VD.gdb/timestamps and /dev/null differ diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.dbf b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.dbf deleted file mode 100644 index 80e5efc3..00000000 Binary files a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.dbf and /dev/null differ diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.prj b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbn b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbn deleted file mode 100644 index 81036f59..00000000 Binary files a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbn and /dev/null differ diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbx b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbx deleted file mode 100644 index 73c69c1d..00000000 Binary files a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.sbx and /dev/null differ diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp deleted file mode 100644 index 510565da..00000000 Binary files a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp and /dev/null differ diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp.xml b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp.xml deleted file mode 100644 index 3dd18f04..00000000 --- a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20160913080552001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_DGMR_TPR_RC_ARRDT0020.000file://\\T918044\E$\DATA\Telechargement_libre\dgmr\DGMR_TPR_RC_ARRDT\MN95_DGMR_TPR_RC_ARRDT.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\DGMR.MN95_DGMR_TPR_RC_ARRDT" E:\DATA\Telechargement_libre\dgmr\DGMR_TPR_RC_ARRDT\MN95_DGMR_TPR_RC_ARRDT.shp # 0 0 020160913080552002016091308055200Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_DGMR_TPR_RC_ARRDTFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_DGMR_TPR_RC_ARRDTFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.ARRDTARRDTString800SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020160913 diff --git a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shx b/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shx deleted file mode 100644 index 57af8ed7..00000000 Binary files a/fme/SHP/MN95_DGMR_TPR_RC_ARRDT.shx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.dbf b/fme/SHP/MN95_LAD_TPR_CANTON.dbf deleted file mode 100644 index 156e6b45..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_CANTON.dbf and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.prj b/fme/SHP/MN95_LAD_TPR_CANTON.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_LAD_TPR_CANTON.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.sbn b/fme/SHP/MN95_LAD_TPR_CANTON.sbn deleted file mode 100644 index 4c6f4ce4..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_CANTON.sbn and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.sbx b/fme/SHP/MN95_LAD_TPR_CANTON.sbx deleted file mode 100644 index 0f88f1c3..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_CANTON.sbx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.shp b/fme/SHP/MN95_LAD_TPR_CANTON.shp deleted file mode 100644 index 6338797f..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_CANTON.shp and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.shp.xml b/fme/SHP/MN95_LAD_TPR_CANTON.shp.xml deleted file mode 100644 index 1ecbea9c..00000000 --- a/fme/SHP/MN95_LAD_TPR_CANTON.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20170109155447001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_LAD_TPR_CANTON0020.000file://\\T918044\E$\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_CANTON.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\COP.MN95_LAD_TPR_CANTON" E:\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_CANTON.shp # 0 0 020170109155447002017010915544700Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_LAD_TPR_CANTONFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_LAD_TPR_CANTONFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.SIGLE_CANTSIGLE_CANTString300NO_CANTONNO_CANTONDouble10100NOM_LANG_ONOM_LANG_OString1700NOM_LANG_1NOM_LANG_1String1700NOM_FRANCANOM_FRANCAString1900NOM_MAJNOM_MAJString3200NOM_MINNOM_MINString3200NO_CHEF_LINO_CHEF_LIDouble10100PERIMETREPERIMETREDouble191815SURFACESURFACEDouble191815SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020170109 diff --git a/fme/SHP/MN95_LAD_TPR_CANTON.shx b/fme/SHP/MN95_LAD_TPR_CANTON.shx deleted file mode 100644 index 9a385b26..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_CANTON.shx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.dbf b/fme/SHP/MN95_LAD_TPR_COMMUNE.dbf deleted file mode 100644 index 105af858..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_COMMUNE.dbf and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.prj b/fme/SHP/MN95_LAD_TPR_COMMUNE.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_LAD_TPR_COMMUNE.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.sbn b/fme/SHP/MN95_LAD_TPR_COMMUNE.sbn deleted file mode 100644 index 94983fb8..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_COMMUNE.sbn and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.sbx b/fme/SHP/MN95_LAD_TPR_COMMUNE.sbx deleted file mode 100644 index cb0a186b..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_COMMUNE.sbx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.shp b/fme/SHP/MN95_LAD_TPR_COMMUNE.shp deleted file mode 100644 index 7a5ad754..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_COMMUNE.shp and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.shp.xml b/fme/SHP/MN95_LAD_TPR_COMMUNE.shp.xml deleted file mode 100644 index c22a823f..00000000 --- a/fme/SHP/MN95_LAD_TPR_COMMUNE.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20170109155449001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_LAD_TPR_COMMUNE0020.000file://\\T918044\E$\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_COMMUNE.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\COP.MN95_LAD_TPR_COMMUNE" E:\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_COMMUNE.shp # 0 0 020170109155449002017010915544900Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_LAD_TPR_COMMUNEFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_LAD_TPR_COMMUNEFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NO_COM_FEDNO_COM_FEDDouble10100NO_COM_CANNO_COM_CANDouble10100NOM_MAJNOM_MAJString3200NOM_MINNOM_MINString3200N_A_MAJ_15N_A_MAJ_15String1600N_A_MIN_15N_A_MIN_15String1600NO_DIS_CANNO_DIS_CANDouble10100NO_DIS_FEDNO_DIS_FEDDouble10100NO_CANTONNO_CANTONDouble10100NO_SECTEURNO_SECTEURDouble10100NOM_MAJ_OFNOM_MAJ_OFString3200NOM_MIN_OFNOM_MIN_OFString3200NOM_MAJ_15NOM_MAJ_15String1600NOM_MIN_15NOM_MIN_15String1600PERIMETREPERIMETREDouble191815SURFACESURFACEDouble191815SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020170109 diff --git a/fme/SHP/MN95_LAD_TPR_COMMUNE.shx b/fme/SHP/MN95_LAD_TPR_COMMUNE.shx deleted file mode 100644 index 0f108d1c..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_COMMUNE.shx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.dbf b/fme/SHP/MN95_LAD_TPR_DISTRICT.dbf deleted file mode 100644 index 1bc93d12..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_DISTRICT.dbf and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.prj b/fme/SHP/MN95_LAD_TPR_DISTRICT.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_LAD_TPR_DISTRICT.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.sbn b/fme/SHP/MN95_LAD_TPR_DISTRICT.sbn deleted file mode 100644 index 373e5771..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_DISTRICT.sbn and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.sbx b/fme/SHP/MN95_LAD_TPR_DISTRICT.sbx deleted file mode 100644 index 82f43cb1..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_DISTRICT.sbx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.shp b/fme/SHP/MN95_LAD_TPR_DISTRICT.shp deleted file mode 100644 index 01a03cd0..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_DISTRICT.shp and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.shp.xml b/fme/SHP/MN95_LAD_TPR_DISTRICT.shp.xml deleted file mode 100644 index 174469b6..00000000 --- a/fme/SHP/MN95_LAD_TPR_DISTRICT.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20170109155450001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_LAD_TPR_DISTRICT0020.000file://\\T918044\E$\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_DISTRICT.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\COP.MN95_LAD_TPR_DISTRICT" E:\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_DISTRICT.shp # 0 0 020170109155450002017010915545000Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_LAD_TPR_DISTRICTFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_LAD_TPR_DISTRICTFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NO_DIS_FEDNO_DIS_FEDDouble10100NO_DIS_CANNO_DIS_CANDouble10100NOM_MAJNOM_MAJString3200NOM_MINNOM_MINString3200CODECODEString400CODE_AUTOCODE_AUTOString300NO_CANTONNO_CANTONDouble10100NOM_COMPL_NOM_COMPL_String2300NOM_COMPL1NOM_COMPL1String2300PERIMETREPERIMETREDouble191815SURFACESURFACEDouble191815SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020170109 diff --git a/fme/SHP/MN95_LAD_TPR_DISTRICT.shx b/fme/SHP/MN95_LAD_TPR_DISTRICT.shx deleted file mode 100644 index 1ff97ac7..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_DISTRICT.shx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_LAC.dbf b/fme/SHP/MN95_LAD_TPR_LAC.dbf deleted file mode 100644 index 68134ef0..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_LAC.dbf and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_LAC.prj b/fme/SHP/MN95_LAD_TPR_LAC.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_LAD_TPR_LAC.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_LAD_TPR_LAC.sbn b/fme/SHP/MN95_LAD_TPR_LAC.sbn deleted file mode 100644 index 08cbae38..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_LAC.sbn and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_LAC.sbx b/fme/SHP/MN95_LAD_TPR_LAC.sbx deleted file mode 100644 index b0b4e392..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_LAC.sbx and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_LAC.shp b/fme/SHP/MN95_LAD_TPR_LAC.shp deleted file mode 100644 index 340ddc7b..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_LAC.shp and /dev/null differ diff --git a/fme/SHP/MN95_LAD_TPR_LAC.shp.xml b/fme/SHP/MN95_LAD_TPR_LAC.shp.xml deleted file mode 100644 index 83e2c1dc..00000000 --- a/fme/SHP/MN95_LAD_TPR_LAC.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20170109155452001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_LAD_TPR_LAC0020.000file://\\T918044\E$\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_LAC.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\COP.MN95_LAD_TPR_LAC" E:\DATA\Telechargement_libre\cop\COP_LAD_TPR_XXX\SHP\MN95\MN95_LAD_TPR_LAC.shp # 0 0 020170109155452002017010915545200Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_LAD_TPR_LACFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_LAD_TPR_LACFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NOM_MAJNOM_MAJString3200NOM_MINNOM_MINString3200PERIMETREPERIMETREDouble191815SURFACESURFACEDouble191815SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020170109 diff --git a/fme/SHP/MN95_LAD_TPR_LAC.shx b/fme/SHP/MN95_LAD_TPR_LAC.shx deleted file mode 100644 index 2234ba03..00000000 Binary files a/fme/SHP/MN95_LAD_TPR_LAC.shx and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_INST_ACC.dbf b/fme/SHP/MN95_OAJE_TPR_INST_ACC.dbf deleted file mode 100644 index e370a345..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_INST_ACC.dbf and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_INST_ACC.prj b/fme/SHP/MN95_OAJE_TPR_INST_ACC.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_OAJE_TPR_INST_ACC.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp b/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp deleted file mode 100644 index 6f98fa76..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp.xml b/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp.xml deleted file mode 100644 index c1029004..00000000 --- a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20161014095608001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_OAJE_TPR_INST_ACC0020.000file://\\T918044\E$\DATA\Telechargement_libre\oaje\OAJE_TPR_XXX_ACC\MN95_OAJE_TPR_INST_ACC.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>FeatureClassToFeatureClass "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC" E:\DATA\Telechargement_libre\oaje\OAJE_TPR_XXX_ACC MN95_OAJE_TPR_INST_ACC.shp # "NOM "NOM" true true false 100 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,NOM,-1,-1;PLAGE_OUVE "PLAGE_OUVE" true true false 60 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,PLAGE_OUVERTURE,-1,-1;ADRESSE "ADRESSE" true true false 75 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,ADRESSE,-1,-1;NPA "NPA" true true false 8 Double 15 31 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,NPA,-1,-1;LOCALITE "LOCALITE" true true false 30 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,LOCALITE,-1,-1;RESEAU "RESEAU" true true false 20 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,RESEAU,-1,-1;TELEPHONE "TELEPHONE" true true false 50 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,TELEPHONE,-1,-1;FORME_JURI "FORME_JURI" true true false 25 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,FORME_JURIDIQUE,-1,-1;PLAGE_AGE "PLAGE_AGE" true true false 50 Text 0 0 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,PLAGE_AGE,-1,-1;CAPACITE_T "CAPACITE_T" true true false 8 Double 15 31 ,First,#,Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_INST_ACC,CAPACITE_TOTALE,-1,-1" #20161014095608002016101409560800Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_OAJE_TPR_INST_ACCFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_OAJE_TPR_INST_ACCFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NOMNOMString10000PLAGE_OUVEPLAGE_OUVEString6000ADRESSEADRESSEString7500NPANPADouble191815LOCALITELOCALITEString3000RESEAURESEAUString2000TELEPHONETELEPHONEString5000FORME_JURIFORME_JURIString2500PLAGE_AGEPLAGE_AGEString5000CAPACITE_TCAPACITE_TDouble19181520161014 diff --git a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shx b/fme/SHP/MN95_OAJE_TPR_INST_ACC.shx deleted file mode 100644 index 9962bd89..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_INST_ACC.shx and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.dbf b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.dbf deleted file mode 100644 index 831329ca..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.dbf and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.prj b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbn b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbn deleted file mode 100644 index 40c44733..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbn and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbx b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbx deleted file mode 100644 index 860d60a6..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.sbx and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp deleted file mode 100644 index 00a21246..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp and /dev/null differ diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp.xml b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp.xml deleted file mode 100644 index 55387bd4..00000000 --- a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20160819141341001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_OAJE_TPR_RESEAU_ACC0020.000file://\\T918044\E$\DATA\Telechargement_libre\oaje\OAJE_TPR_XXX_ACC\MN95_OAJE_TPR_RESEAU_ACC.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\OAJE.MN95_OAJE_TPR_RESEAU_ACC" E:\DATA\Telechargement_libre\oaje\OAJE_TPR_XXX_ACC\MN95_OAJE_TPR_RESEAU_ACC.shp # 0 0 020160819141341002016081914134100Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_OAJE_TPR_RESEAU_ACCFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_OAJE_TPR_RESEAU_ACCFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.RESEAURESEAUString2000URLURLString20000SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020160819 diff --git a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shx b/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shx deleted file mode 100644 index ac00a037..00000000 Binary files a/fme/SHP/MN95_OAJE_TPR_RESEAU_ACC.shx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.dbf b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.dbf deleted file mode 100644 index 447e5737..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.dbf and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.prj b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbn b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbn deleted file mode 100644 index 3a5e8bc5..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbn and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbx b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbx deleted file mode 100644 index 5b2f8179..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.sbx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp deleted file mode 100644 index a314857d..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp.xml b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp.xml deleted file mode 100644 index 3ce814d4..00000000 --- a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20160913080425001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_SFFN_TPR_ARR_FORESTIER0020.000file://\\T918044\E$\DATA\Telechargement_libre\sffn\SFFN_TPR_ARR_FORESTIER\MN95_SFFN_TPR_ARR_FORESTIER.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion dinf_ims-sde_prod-slv1729p.sde\SFFN.MN95_SFFN_TPR_ARR_FORESTIER" E:\DATA\Telechargement_libre\sffn\SFFN_TPR_ARR_FORESTIER\MN95_SFFN_TPR_ARR_FORESTIER.shp # 0 0 020160913080425002016091308042500Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_SFFN_TPR_ARR_FORESTIERFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_SFFN_TPR_ARR_FORESTIERFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NO_ARR_FORNO_ARR_FORSmallInteger440NOM_ARR_FONOM_ARR_FOString5000SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020160913 diff --git a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shx b/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shx deleted file mode 100644 index 7c379068..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_ARR_FORESTIER.shx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.dbf b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.dbf deleted file mode 100644 index 8d99dc5f..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.dbf and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.prj b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbn b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbn deleted file mode 100644 index bdb4be0d..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbn and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbx b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbx deleted file mode 100644 index cd6f5d5d..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.sbx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp deleted file mode 100644 index 2556b31f..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp.xml b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp.xml deleted file mode 100644 index 62d31ca0..00000000 --- a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20160926155417001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_SFFN_TPR_CIRC_FAUNE0020.000file://\\T918044\E$\DATA\Telechargement_libre\sffn\SFFN_TPR_CIRC_FAUNE\MN95_SFFN_TPR_CIRC_FAUNE.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion sffn-sde_prod-slv1729p.sde\SFFN.MN95_SFFN_TPR_CIRC_FAUNE" E:\DATA\Telechargement_libre\sffn\SFFN_TPR_CIRC_FAUNE\MN95_SFFN_TPR_CIRC_FAUNE.shp # 0 0 020160926155417002016092615541700Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_SFFN_TPR_CIRC_FAUNEFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_SFFN_TPR_CIRC_FAUNEFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NO_CIRC_FANO_CIRC_FASmallInteger440NOM_CIRC_FNOM_CIRC_FString5000SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020160926 diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shx b/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shx deleted file mode 100644 index 3ce05fc8..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_FAUNE.shx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.dbf b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.dbf deleted file mode 100644 index 4ba2fca8..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.dbf and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.prj b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.prj deleted file mode 100644 index 2bcfe09d..00000000 --- a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.prj +++ /dev/null @@ -1 +0,0 @@ -PROJCS["CH1903+_LV95",GEOGCS["GCS_CH1903+",DATUM["D_CH1903+",SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["False_Easting",2600000.0],PARAMETER["False_Northing",1200000.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Azimuth",90.0],PARAMETER["Longitude_Of_Center",7.439583333333333],PARAMETER["Latitude_Of_Center",46.95240555555556],UNIT["Meter",1.0]] \ No newline at end of file diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbn b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbn deleted file mode 100644 index cad2d6c2..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbn and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbx b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbx deleted file mode 100644 index cd6f5d5d..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.sbx and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp deleted file mode 100644 index 5dd3628f..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp and /dev/null differ diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp.xml b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp.xml deleted file mode 100644 index 60b625e4..00000000 --- a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shp.xml +++ /dev/null @@ -1,2 +0,0 @@ - -20160926155459001.0ISO 19139 Metadata Implementation SpecificationFALSEMN95_SFFN_TPR_CIRC_PECHE0020.000file://\\T918044\E$\DATA\Telechargement_libre\sffn\SFFN_TPR_CIRC_PECHE\MN95_SFFN_TPR_CIRC_PECHE.shpLocal Area NetworkProjectedGCS_CH1903+Linear Unit: Meter (1.000000)CH1903+_LV95<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>PROJCS[&quot;CH1903+_LV95&quot;,GEOGCS[&quot;GCS_CH1903+&quot;,DATUM[&quot;D_CH1903+&quot;,SPHEROID[&quot;Bessel_1841&quot;,6377397.155,299.1528128]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Hotine_Oblique_Mercator_Azimuth_Center&quot;],PARAMETER[&quot;False_Easting&quot;,2600000.0],PARAMETER[&quot;False_Northing&quot;,1200000.0],PARAMETER[&quot;Scale_Factor&quot;,1.0],PARAMETER[&quot;Azimuth&quot;,90.0],PARAMETER[&quot;Longitude_Of_Center&quot;,7.439583333333333],PARAMETER[&quot;Latitude_Of_Center&quot;,46.95240555555556],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,2056]]</WKT><XOrigin>-27386400</XOrigin><YOrigin>-32067900</YOrigin><XYScale>140996569.55187955</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>2056</WKID><LatestWKID>2056</LatestWKID></ProjectedCoordinateSystem>CopyFeatures "Connexions aux bases de données\Connexion sffn-sde_prod-slv1729p.sde\SFFN.MN95_SFFN_TPR_CIRC_PECHE" E:\DATA\Telechargement_libre\sffn\SFFN_TPR_CIRC_PECHE\MN95_SFFN_TPR_CIRC_PECHE.shp # 0 0 020160926155459002016092615545900Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.1.1.3143MN95_SFFN_TPR_CIRC_PECHEFichier de formes0.000datasetEPSG7.11.20SimpleFALSE0FALSEFALSEMN95_SFFN_TPR_CIRC_PECHEFeature Class0FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.NO_CIRC_PENO_CIRC_PESmallInteger440NOM_CIRC_PNOM_CIRC_PString5000SHAPE_AREASHAPE_AREADouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.SHAPE_LENSHAPE_LENDouble190020160926 diff --git a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shx b/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shx deleted file mode 100644 index d1cd6ea5..00000000 Binary files a/fme/SHP/MN95_SFFN_TPR_CIRC_PECHE.shx and /dev/null differ diff --git a/fme/SHP/liste_input.txt b/fme/SHP/liste_input.txt deleted file mode 100644 index 1ebf2fb1..00000000 --- a/fme/SHP/liste_input.txt +++ /dev/null @@ -1,8 +0,0 @@ -https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=569&catalog=main&type=complete&preview=search_list - Arrondissements des routes cantonales -https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=336&catalog=main&type=complete&preview=search_list - Arrondissements forestiers territoriaux -https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=36&catalog=main&type=complete&preview=search_list - Circonscriptions de faune -https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=393&catalog=main&type=complete&preview=search_list - Circonscriptions de pche -https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=128ba8b7-1b53-d804-2904-009bc942942e&catalog=main&type=complete&preview=search_list - Institutions d'accueil de jour des enfants - - -+ https://www.asitvd.ch/chercher/catalogue.html?view=sheet&guid=196&catalog=main&type=complete&preview=search_list - Limites administratives du Canton \ No newline at end of file diff --git a/fme/SHP/products.txt b/fme/SHP/products.txt deleted file mode 100644 index a67d593e..00000000 --- a/fme/SHP/products.txt +++ /dev/null @@ -1,7 +0,0 @@ -guid;fc -569;DGMR_TPR_RC_ARRDT -336;SFFN_TPR_ARR_FORESTIER -36;SFFN_TPR_CIRC_FAUNE -393;SFFN_TPR_CIRC_PECHE -128ba8b7-1b53-d804-2904-009bc942942e;OAJE_TPR_INST_ACC -128ba8b7-1b53-d804-2904-009bc942942e;OAJE_TPR_RESEAU_ACC \ No newline at end of file diff --git a/fme/WKT/WKT_MN95.txt b/fme/WKT/WKT_MN95.txt deleted file mode 100644 index 47f7d78d..00000000 --- a/fme/WKT/WKT_MN95.txt +++ /dev/null @@ -1,3 +0,0 @@ -POLYGON((2494305.462 1115147.198,2494305.462 1203315.792,2585466.093 1203315.792,2585466.093 1115147.198)) - Emprise rectangulaire Canton de Vaud -POLYGON((2494305.462 1127836.641,2494305.462 1155695.119,2518659.715 1155695.119,2518659.715 1127836.641)) - Emprise rectangulaire District de Nyon -POLYGON((2534436.993 1150670.462,2534436.993 1161550.702,2544978.907 1161550.702,2544978.907 1150670.462)) - Emprise rectangulaire Commune de Lausanne \ No newline at end of file diff --git a/fme/WKT/canton_Vaud_wkt_WGS84.txt b/fme/WKT/canton_Vaud_wkt_WGS84.txt deleted file mode 100644 index a9761e9b..00000000 --- a/fme/WKT/canton_Vaud_wkt_WGS84.txt +++ /dev/null @@ -1 +0,0 @@ -POLYGON((6.06384544077808 46.1870571742486,6.06384544077808 46.9801999077985,7.24923786069061 46.9801999077985,7.24923786069061 46.1870571742486)) diff --git a/fme/WKT/commune_Lausanne_wkt_WGS84.txt b/fme/WKT/commune_Lausanne_wkt_WGS84.txt deleted file mode 100644 index 12f348c3..00000000 --- a/fme/WKT/commune_Lausanne_wkt_WGS84.txt +++ /dev/null @@ -1 +0,0 @@ -POLYGON((6.5838791192197 46.5044344288519,6.5838791192197 46.6025470130881,6.72082104901253 46.6025470130881,6.72082104901253 46.5044344288519)) diff --git a/fme/WKT/district_Nyon_wkt_WGS84.txt b/fme/WKT/district_Nyon_wkt_WGS84.txt deleted file mode 100644 index 5cd1f505..00000000 --- a/fme/WKT/district_Nyon_wkt_WGS84.txt +++ /dev/null @@ -1 +0,0 @@ -POLYGON((6.06384544077808 46.2948146242382,6.06384544077808 46.5452999981282,6.37970692618508 46.5452999981282,6.37970692618508 46.2948146242382)) diff --git a/fme/WKT/geodatabase_file2textline.fmw b/fme/WKT/geodatabase_file2textline.fmw deleted file mode 100644 index 2379523d..00000000 --- a/fme/WKT/geodatabase_file2textline.fmw +++ /dev/null @@ -1,1591 +0,0 @@ -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -FME_PYTHON_VERSION 27 -GUI IGNORE SourceDataset_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1,_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1,_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1,DestDataset_TEXTLINE_1,TEXTLINE_OUT_OVERWRITE_FILE_TEXTLINE_1,TEXTLINE_OUT_END_OF_LINE_TEXTLINE_1,TEXTLINE_OUT_WRITE_LAST_EOL_TEXTLINE_1,TEXTLINE_OUT_ENCODING_TEXTLINE_1,TEXTLINE_OUT_WRITE_UTF8_BOM_TEXTLINE_1,TEXTLINE_OUT_MIME_TYPE_TEXTLINE_1 -DEFAULT_MACRO SourceDataset_GEODATABASE_FILE D:\Mandats\ASIT-VD\GDB\ASIT-VD.gdb -GUI SOURCE_GEODATABASE SourceDataset_GEODATABASE_FILE Source Esri File Geodatabase: -DEFAULT_MACRO DestDataset_TEXTLINE D:\Mandats\ASIT-VD\WKT\commune_Lausanne_wkt.txt -GUI FILENAME DestDataset_TEXTLINE Text/Compressed_Text_Files(*.txt;*.txt.gz)|*.txt;*.txt.gz|Text_Files(*.txt)|*.txt|Compressed_Text_Files(*.txt.gz)|*.txt.gz|All_files(*)|* Destination Text File: -DEFAULT_MACRO WKT POLYGON((2494305.462 1115147.198,2494305.462 1203315.792,2585466.093 1203315.792,2585466.093 1115147.198)) -GUI TEXT WKT WKT: -INCLUDE [ if {{$(SourceDataset_GEODATABASE_FILE)} == {}} { puts_real {Parameter 'SourceDataset_GEODATABASE_FILE' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(DestDataset_TEXTLINE)} == {}} { puts_real {Parameter 'DestDataset_TEXTLINE' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(WKT)} == {}} { puts_real {Parameter 'WKT' must be given a value.}; exit 1; }; ] -#! START_HEADER -#! START_WB_HEADER -READER_TYPE GEODATABASE_FILE -READER_KEYWORD GEODATABASE_FILE_1 -READER_GEN_DIRECTIVES ALIAS_MODE,NONE,SEARCH_ENVELOPE_MINX,0,SPLIT_COMPLEX_ANNOS,no,RESOLVE_DOMAINS,no,CACHE_MULTIPATCH_TEXTURES,yes,SEARCH_ORDER,SPATIAL_FIRST,END_SQL0,,IGNORE_NETWORK_INFO,yes,WHERE,,SEARCH_FEATURE,,FME_CONNECTION_GROUP,,GEODB_SHARED_RDR_ADV_PARM_GROUP,,IGNORE_RELATIONSHIP_INFO,yes,EXPOSE_ATTRS_GROUP,,FEATURE_READ_MODE,Features,SEARCH_ENVELOPE_MAXX,0,CLIP_TO_ENVELOPE,NO,MERGE_FEAT_LINKED_ANNOS,no,SPLIT_COMPLEX_EDGES,no,SPLIT_MULTI_PART_ANNOS,no,BEGIN_SQL0,,SEARCH_ENVELOPE_MINY,0,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes,CHECK_SIMPLE_GEOM,no,RESOLVE_SUBTYPE_NAMES,yes,TABLELIST,LAD_TPR_CANTONLAD_TPR_COMMUNELAD_TPR_DISTRICT,SEARCH_METHOD,GEODB_INTERSECTS,GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS,,TRANSLATE_SPATIAL_DATA_ONLY,no,SEARCH_ENVELOPE_MAXY,0,USE_SEARCH_ENVELOPE,NO,_MERGE_SCHEMAS,YES - -WRITER_TYPE MULTI_WRITER -MULTI_WRITER_DATASET_ORDER BY_ID -MULTI_WRITER_FIRST_WRITER_ID 0 -MULTI_WRITER_TYPE{0} TEXTLINE -MULTI_WRITER_KEYWORD{0} TEXTLINE_1 -#! END_WB_HEADER - -#! START_WB_HEADER -MACRO WB_KEYWORD "GEODATABASE_FILE_1" -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -# ============================================================================ -# The following GUI line prompts for a file to be used as the source. -# The user input is stored in a macro, which is then used to define -# the dataset to be read. -# The dataset this mapping file was generated from was: -#! END_SOURCE_HEADER -#! START_WB_HEADER -DEFAULT_MACRO SourceDataset -INCLUDE [ if {{$(SourceDataset)} != ""} { \ - puts {DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(SourceDataset)} \ - } ] -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(SourceDataset_GEODATABASE_FILE) -GUI SOURCE_GEODATABASE SourceDataset_GEODATABASE_FILE_1 Source Esri File Geodatabase: -# Translation flags. -# -DEFAULT_MACRO GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1 Yes -GEODATABASE_FILE_1_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS "$(GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1)" -# ======================================================================== -# The following defines the global where clause to be used when retrieving -# features from the Geodatabase. -DEFAULT_MACRO GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_WHERE "$(GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 WHERE Clause: -#============================================================================== -# Determines whether non-spatial data, such as tables, subtypes, domains, -# and relationship classes, get translated -DEFAULT_MACRO GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_TRANSLATE_SPATIAL_DATA_ONLY "$(GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 yes%no Spatial Data Only -#============================================================================== -# Determines whether to resolve domains -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_RESOLVE_DOMAINS "$(GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 yes%no Resolve Domains -#============================================================================== -# Determines whether to resolve the subtype associated with a feature to the -# text description linked to the subtype value. The text description will -# be added as an attribute on the feature. -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_RESOLVE_SUBTYPE_NAMES "$(GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes%no Resolve Subtypes -#============================================================================== -# Determines whether to ignore the network info associated with a feature -# coming from a network feature class. When set to 'yes', junctions will be -# treated as point features, and edges will be treated as polyline features, -# with the geodb_type being set to geodb_point and geodb_polyline, respectively. -# Caution should be taken when changing the value for this keyword to a different -# value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_NETWORK_INFO "$(GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes%no Ignore Network Info -# ======================================================================== -# Determines whether to ignore the relationship info associated with a feature -# coming from a feature class containing relationships. When set to 'yes', simple -# relationships will not be read, and attributed relationships will be read as -# tables. Caution should be taken when changing the value for this keyword to a -# different value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_RELATIONSHIP_INFO "$(GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes%no Ignore Relationship Info -#============================================================================== -# Determines whether complex edge features should be split up and each edge -# element read as a separate feature. The default behaviour is to treat an -# entire edge *feature* as an FME feature, rather than an edge *element* as an -# FME feature. Returning edge elements ensures that all network connectivity -# information on complex edges is read. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_EDGES "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 yes%no Split Complex Edges -#============================================================================== -# Determines whether annotation features should be split up and each part -# element read as a separate feature. The default behaviour is to treat an -# entire annotation *feature* as an FME feature, rather than an annotation -# *element* as an FME feature. Returning edge elements ensures that all -# rotation and positional information for each element is preserved. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_MULTI_PART_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 yes%no Split Multi-Part Annotations -# ======================================================================== -# The following determines whether to read features from the geodatabase -# tables/feature classes or to read metadata from those tables/feature -# classes. -DEFAULT_MACRO GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1 Features -GEODATABASE_FILE_1_FEATURE_READ_MODE "$(GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1)" -DEFAULT_MACRO GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_EXPOSE_ATTRS_GROUP "$(GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1)" -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS%ALIAS_MODE Schema Attributes -# Include this file in source setting section to add native search envelope processing -# Zero as a default means we don't do any search -- this makes workbench happier -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 Minimum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 Minimum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 Maximum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 Maximum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 NO -GEODATABASE_FILE_1_CLIP_TO_ENVELOPE "$(GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1)" -GUI OPTIONAL CHECKBOX GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 YES%NO Clip to Search Envelope -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1 SPLIT_COMPLEX_ANNOS%CACHE_MULTIPATCH_TEXTURES%SEARCH_FEATURE%SEARCH_ORDER%SEARCH_METHOD%CHECK_SIMPLE_GEOM%MERGE_FEAT_LINKED_ANNOS%BEGIN_SQL{0}%END_SQL{0} Advanced -#============================================================================== -# The following specifies that complex representations for annotation should -# be split into simpler representations. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 yes%no Split Complex Annotations: -#============================================================================== -# The following specifies whether to keep (YES) or clean up (NO) Esri's -# multipatch texture caches. -DEFAULT_MACRO GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_CACHE_MULTIPATCH_TEXTURES "$(GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes%no Cache Multipatch Textures: -# ======================================================================== -# The following defines the search feature for the query. It defines an -# arbitrarily complex search feature as a spatial contraint. It is in the -# form _SEARCH_FEATURE [ ]+. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_SEARCH_FEATURE "$(GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 Search Feature: -# ======================================================================== -# Determines whether the spatial component or the attribute component of a -# query is performed first. This is only applicable when both spatial and -# non-spatial searches are being performed. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST -GEODATABASE_FILE_1_SEARCH_ORDER "$(GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST%ATTRIBUTE_FIRST Search Order: -# ======================================================================== -# Determines which type of search method to use. This keyword is only applicable -# when either the keyword SEARCH_ENVELOPE or the keyword SEARCH_FEATURE is specified. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS -GEODATABASE_FILE_1_SEARCH_METHOD "$(GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS%GEODB_ENVELOPE_INTERSECTS%GEODB_TOUCHES%GEODB_OVERLAPS%GEODB_CROSSES%GEODB_WITHIN%GEODB_CONTAINS Search Method -# ======================================================================== -# The following specifies that simple geometry should be checked for -# when reading features. -DEFAULT_MACRO GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_CHECK_SIMPLE_GEOM "$(GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 yes%no Check for Simple Geometry: -# ======================================================================== -# The following specifies whether feature-linked annotations should -# be merged onto the main feature as a text list attribute when reading. -DEFAULT_MACRO GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_MERGE_FEAT_LINKED_ANNOS "$(GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 yes%no Merge Feature Linked Annotations: -# ============================================================================== -# The following specifies an SQL command to execute before opening the first -# Geodatabase table. -DEFAULT_MACRO GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_BEGIN_SQL{0} "$(GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute Before Translation: -# ============================================================================== -# The following specifies an SQL command to execute after closing all the -# Geodatabase tables. -DEFAULT_MACRO GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_END_SQL{0} "$(GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute After Translation: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_NETWORK_AUTHENTICATION "$(GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1)" -GUI OPTIONAL AUTHENTICATOR GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 CONTAINER%GROUP%CONTAINER_TITLE%"Network Authentication"%PROMPT_TYPE%NETWORK Network Authentication -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1 ALL -GEODATABASE_FILE_1_ATTRIBUTE_READING "$(GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1)" -# ============================================================================ -# Search Envelope Coordinate System -DEFAULT_MACRO _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 -GUI OPTIONAL COORDSYS _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 Search Envelope Coordinate System: -GEODATABASE_FILE_1_SEARCH_ENVELOPE_COORDINATE_SYSTEM "$(_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1)" -DEFAULT_MACRO _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 NONE -GUI LOOKUP_CHOICE _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 None,NONE%ExposeAliasesasMetadataAttributesname_alias,ON_DATA_FEATURES Alias Mode: -GEODATABASE_FILE_1_ALIAS_MODE "$(_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1)" -GEODATABASE_FILE_1_DATASET "$(SourceDataset_GEODATABASE_FILE_1)" -#! END_SOURCE_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "TEXTLINE_1" -#! END_WB_HEADER -#! START_DEST_HEADER TEXTLINE TEXTLINE_1 -# ===================================================================== -# The following GUI line prompts for a folder to be used as the -# the destination for the TextLine files. The user input is stored in -# a macro, which is then used to define the dataset to be written. -#! END_DEST_HEADER -#! START_WB_HEADER -DEFAULT_MACRO DestDataset -INCLUDE [ if {"$(DestDataset)" != ""} { \ - puts {DEFAULT_MACRO DestDataset_TEXTLINE_1 $(DestDataset)} \ - } ] -#! END_WB_HEADER -#! START_DEST_HEADER TEXTLINE TEXTLINE_1 -DEFAULT_MACRO DestDataset_TEXTLINE_1 $(DestDataset_TEXTLINE) -GUI FILENAME DestDataset_TEXTLINE_1 Text/Compressed_Text_Files(*.txt;*.txt.gz)|*.txt;*.txt.gz|Text_Files(*.txt)|*.txt|Compressed_Text_Files(*.txt.gz)|*.txt.gz|All_files(*)|* Destination Text File: -# ===================================================================== -# The following GUI line prompts for the overwrite flag. The default is -# YES. Possible values are YES or NO. Any value other than NO or -# no is interpreted as YES. -DEFAULT_MACRO TEXTLINE_OUT_OVERWRITE_FILE_TEXTLINE_1 YES -TEXTLINE_1_OVERWRITE_FILE "$(TEXTLINE_OUT_OVERWRITE_FILE_TEXTLINE_1)" -GUI CHOICE TEXTLINE_OUT_OVERWRITE_FILE_TEXTLINE_1 YES%NO Overwrite Existing File: -# ===================================================================== -# Set the type of encoding for the output file -# ===================================================================== -# Set line termination character -DEFAULT_MACRO TEXTLINE_OUT_END_OF_LINE_TEXTLINE_1 System -TEXTLINE_1_END_OF_LINE "$(TEXTLINE_OUT_END_OF_LINE_TEXTLINE_1)" -GUI CHOICE TEXTLINE_OUT_END_OF_LINE_TEXTLINE_1 System%Windows%Unix%Macintosh%Source%None Line Termination: -# ===================================================================== -# Set whether to write the last line termination character -DEFAULT_MACRO TEXTLINE_OUT_WRITE_LAST_EOL_TEXTLINE_1 YES -TEXTLINE_1_WRITE_LAST_EOL "$(TEXTLINE_OUT_WRITE_LAST_EOL_TEXTLINE_1)" -GUI CHOICE TEXTLINE_OUT_WRITE_LAST_EOL_TEXTLINE_1 YES%NO Write Last Line Terminator: -DEFAULT_MACRO TEXTLINE_OUT_ENCODING_TEXTLINE_1 SYSTEM -TEXTLINE_1_ENCODING "$(TEXTLINE_OUT_ENCODING_TEXTLINE_1)" -GUI STRING_OR_ENCODING TEXTLINE_OUT_ENCODING_TEXTLINE_1 SYSTEM%* Character Encoding: -# ===================================================================== -# The following GUI line prompts for the overwrite flag. The default is -# YES. Possible values are YES or NO. Any value other than NO or -# no is interpreted as YES. -DEFAULT_MACRO TEXTLINE_OUT_WRITE_UTF8_BOM_TEXTLINE_1 YES -TEXTLINE_1_WRITE_UTF8_BOM "$(TEXTLINE_OUT_WRITE_UTF8_BOM_TEXTLINE_1)" -GUI CHOICE TEXTLINE_OUT_WRITE_UTF8_BOM_TEXTLINE_1 YES%NO Write UTF Byte Order Mark -# ===================================================================== -# Set the mime type. Note -- it is actually only used inside of workspaces -# as a FORMAT_PARAMETER which even then is parsed only by the FME Server -# Repository Manager -DEFAULT_MACRO TEXTLINE_OUT_MIME_TYPE_TEXTLINE_1 text/plain -TEXTLINE_1_MIME_TYPE "$(TEXTLINE_OUT_MIME_TYPE_TEXTLINE_1)" -GUI STRING_OR_CHOICE TEXTLINE_OUT_MIME_TYPE_TEXTLINE_1 application/json%application/octet-stream%application/xml%application/x-www-form-urlencoded%image/gif%image/jpeg%image/png%image/tiff%multipart/mixed%multipart/alternative%multipart/related%text/html%text/plain%text/xml MIME Type: -# ============================================================================ -# Opt in for destination dataset type vs format type validation -DEFAULT_MACRO TEXTLINE_OUT_DESTINATION_DATASETTYPE_VALIDATION_TEXTLINE_1 Yes -TEXTLINE_1_DESTINATION_DATASETTYPE_VALIDATION "$(TEXTLINE_OUT_DESTINATION_DATASETTYPE_VALIDATION_TEXTLINE_1)" -TEXTLINE_1_DATASET "$(DestDataset_TEXTLINE_1)" -#! END_DEST_HEADER -#! START_WB_HEADER -#! END_WB_HEADER - -#! END_HEADER - -LOG_FILENAME "$(FME_MF_DIR)geodatabase_file2textline.log" -LOG_APPEND NO -LOG_MAX_FEATURES 200 -LOG_MAX_RECORDED_FEATURES 200 -FME_REPROJECTION_ENGINE FME -FME_IMPLICIT_CSMAP_REPROJECTION_MODE Auto -FME_GEOMETRY_HANDLING Enhanced -FME_STROKE_MAX_DEVIATION 0 -LOG_FILTER_MASK -1 -DEFAULT_MACRO DATASET_KEYWORD_GEODATABASE_FILE_1 GEODATABASE_FILE_1 -DEFAULT_MACRO DATASET_KEYWORD_TEXTLINE_1 TEXTLINE_1 -# ------------------------------------------------------------------------- - -GEODATABASE_FILE_1_READER_META_ATTRIBUTES fme_feature_type - -# ------------------------------------------------------------------------- - -MULTI_READER_CONTINUE_ON_READER_FAILURE No - -# ------------------------------------------------------------------------- - -MACRO WORKSPACE_NAME geodatabase_file2textline -MACRO FME_VIEWER_APP fmedatainspector -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Router and Unexpected Input Remover" COMMAND_PARM_EVALUATION SINGLE_PASS MULTI_READER_KEYWORD GEODATABASE_FILE_1 INPUT FEATURE_TYPE * ROUTE GEODATABASE_FILE GEODATABASE_FILE_1::LAD_TPR_COMMUNE TO FME_GENERIC ::LAD_TPR_COMMUNE ALIAS_GEOMETRY ROUTE GEODATABASE_FILE GEODATABASE_FILE_1::LAD_TPR_DISTRICT TO FME_GENERIC ::LAD_TPR_DISTRICT ALIAS_GEOMETRY ROUTE GEODATABASE_FILE GEODATABASE_FILE_1::LAD_TPR_CANTON TO FME_GENERIC ::LAD_TPR_CANTON ALIAS_GEOMETRY MERGE_INPUT Yes OUTPUT ROUTED FEATURE_TYPE * -GEODATABASE_FILE_1_MERGE_DEF GEODATABASE_FILE_1::LAD_TPR_COMMUNE EXACT LAD_TPR_COMMUNE DEFLINE geodb_type,geodb_polygon,OBJECTID,integer,NO_COM_FED,integer,NO_COM_CAN,integer,NOM_MAJ,char32,NOM_MIN,char32,N_A_MAJ_15,char16,N_A_MIN_15,char16,NO_DIS_CAN,integer,NO_DIS_FED,integer,NO_CANTON,integer,NO_SECTEUR,integer,NOM_MAJ_OF,char32,NOM_MIN_OF,char32,NOM_MAJ_15,char16,NOM_MIN_15,char16,PERIMETRE,double,SURFACE,double,Shape_Length,double,Shape_Area,double -GEODATABASE_FILE_1_MERGE_DEF GEODATABASE_FILE_1::LAD_TPR_DISTRICT EXACT LAD_TPR_DISTRICT DEFLINE geodb_type,geodb_polygon,OBJECTID,integer,NO_DIS_FED,integer,NO_DIS_CAN,integer,NOM_MAJ,char32,NOM_MIN,char32,CODE,char4,CODE_AUTO,char3,NO_CANTON,integer,NOM_COMPL_,char23,NOM_COMPL1,char23,PERIMETRE,double,SURFACE,double,Shape_Length,double,Shape_Area,double -GEODATABASE_FILE_1_MERGE_DEF GEODATABASE_FILE_1::LAD_TPR_CANTON EXACT LAD_TPR_CANTON DEFLINE geodb_type,geodb_polygon,OBJECTID,integer,SIGLE_CANT,char3,NO_CANTON,integer,NOM_LANG_O,char17,NOM_LANG_1,char17,NOM_FRANCA,char19,NOM_MAJ,char32,NOM_MIN,char32,NO_CHEF_LI,integer,PERIMETRE,double,SURFACE,double,Shape_Length,double,Shape_Area,double -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "LAD_TPR_COMMUNE (GEODATABASE_FILE_1) Splitter" INPUT FEATURE_TYPE LAD_TPR_COMMUNE OUTPUT FEATURE_TYPE LAD_TPR_COMMUNE_GEODATABASE_FILE_1 -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "LAD_TPR_DISTRICT (GEODATABASE_FILE_1) Splitter" INPUT FEATURE_TYPE LAD_TPR_DISTRICT OUTPUT FEATURE_TYPE LAD_TPR_DISTRICT_GEODATABASE_FILE_1 -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "LAD_TPR_CANTON (GEODATABASE_FILE_1) Splitter" INPUT FEATURE_TYPE LAD_TPR_CANTON OUTPUT FEATURE_TYPE LAD_TPR_CANTON_GEODATABASE_FILE_1 -FACTORY_DEF * TeeFactory FACTORY_NAME "LAD_TPR_CANTON_GEODATABASE_FILE_1 Disabled Nuker -1 29" INPUT FEATURE_TYPE LAD_TPR_CANTON_GEODATABASE_FILE_1 -DEFAULT_MACRO WB_CURRENT_CONTEXT -# ------------------------------------------------------------------------- - -FACTORY_DEF * TestFactory FACTORY_NAME Tester_2 INPUT FEATURE_TYPE LAD_TPR_COMMUNE_GEODATABASE_FILE_1 TEST @EvaluateExpression(FDIV,STRING_ENCODED,ValueNOM_MAJ,Tester_2) = LAUSANNE ENCODED BOOLEAN_OPERATOR OR OUTPUT PASSED FEATURE_TYPE Tester_2_PASSED -# ------------------------------------------------------------------------- - -FACTORY_DEF * TestFactory FACTORY_NAME Tester INPUT FEATURE_TYPE LAD_TPR_DISTRICT_GEODATABASE_FILE_1 TEST @EvaluateExpression(FDIV,STRING_ENCODED,ValueNOM_MAJ,Tester) = NYON ENCODED BOOLEAN_OPERATOR OR OUTPUT PASSED FEATURE_TYPE Tester_PASSED -FACTORY_DEF * TeeFactory FACTORY_NAME "Tester_PASSED Disabled Nuker -1 37" INPUT FEATURE_TYPE Tester_PASSED -# ------------------------------------------------------------------------- - -INCLUDE [ set macroLine "MACRO AttributeKeeper_4_9910dbb9_9f37_41da_98e2_9a5e3a6e1f5b2_LIST_EXP "; foreach attr [split ""] { set attr [FME_DecodeText $attr]; set attr [regsub "{}$" $attr "{}.*"]; set attr [regsub -all "{}" $attr "\\{\[0-9\]+\\}"]; append macroLine ",^$attr$"; }; puts $macroLine; ] - -FACTORY_DEF * TeeFactory FACTORY_NAME AttributeKeeper_4 INPUT FEATURE_TYPE Tester_PASSED INPUT FEATURE_TYPE Tester_2_PASSED INPUT FEATURE_TYPE LAD_TPR_CANTON_GEODATABASE_FILE_1 OUTPUT FEATURE_TYPE AttributeKeeper_4_OUTPUT @KeepAttributes(fme_encoded,,fme_pcre_match,"^fme_$(AttributeKeeper_4_9910dbb9_9f37_41da_98e2_9a5e3a6e1f5b2_LIST_EXP)") - -# ------------------------------------------------------------------------- - -INCLUDE [ if { {NO} == {YES} } { puts {MACRO DO_TOPFER TOPFER_INDEX }; } else { puts {MACRO DO_TOPFER }; } ] -FACTORY_DEF * BoundingBoxFactory FACTORY_NAME BoundingBoxAccumulator INPUT FEATURE_TYPE AttributeKeeper_4_OUTPUT BOUNDING_BOX_TYPE GEOMETRIC ACCUMULATE_ATTRIBUTES One $(DO_TOPFER) OUTPUT BOUNDING_BOX FEATURE_TYPE BoundingBoxAccumulator_BOUNDING_BOX - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME BoundsExtractor INPUT FEATURE_TYPE BoundingBoxAccumulator_BOUNDING_BOX OUTPUT FEATURE_TYPE BoundsExtractor_OUTPUT @Bounds(fme_encoded,_xmin,_xmax,_ymin,_ymax,,,GEOMETRIC) - -# ------------------------------------------------------------------------- - -FACTORY_DEF * AttrSetFactory FACTORY_NAME AttributeCreator INPUT FEATURE_TYPE BoundsExtractor_OUTPUT MULTI_FEATURE_MODE NO NULL_ATTR_MODE NO_OP ATTRSET_CREATE_DIRECTIVES _PROPAGATE_MISSING_FDIV ATTR_ACTION "" "text_line_data" "SET_TO" "POLYGONValue_xminValue_yminValue_xminValue_ymaxValue_xmaxValue_ymaxValue_xmaxValue_ymin" OUTPUT OUTPUT FEATURE_TYPE AttributeCreator_OUTPUT - - -# ------------------------------------------------------------------------- - -INCLUDE [ set macroLine "MACRO AttributeKeeper_2_37ea2c0b_19ef_4b44_ace3_15aa645751b12_LIST_EXP "; foreach attr [split ""] { set attr [FME_DecodeText $attr]; set attr [regsub "{}$" $attr "{}.*"]; set attr [regsub -all "{}" $attr "\\{\[0-9\]+\\}"]; append macroLine ",^$attr$"; }; puts $macroLine; ] - -FACTORY_DEF * TeeFactory FACTORY_NAME AttributeKeeper_2 INPUT FEATURE_TYPE AttributeCreator_OUTPUT OUTPUT FEATURE_TYPE AttributeKeeper_2_OUTPUT @KeepAttributes(fme_encoded,text_line_data,fme_pcre_match,"^fme_$(AttributeKeeper_2_37ea2c0b_19ef_4b44_ace3_15aa645751b12_LIST_EXP)") - -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Destination Feature Type Routing Correlator" COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE * ROUTE FME_GENERIC AttributeKeeper_2_OUTPUT TO TEXTLINE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,0,SupplyAttributesENCODED__wb_out_feat_type__text_line GEOMETRY FEATURE_TYPE_ATTRIBUTE __wb_out_feat_type__ OUTPUT ROUTED FEATURE_TYPE * OUTPUT NOT_ROUTED FEATURE_TYPE __nuke_me__ @Tcl2("FME_StatMessage 818059 [FME_GetAttribute fme_template_feature_type] 818060 818061 fme_warn") -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Final Output Nuker" INPUT FEATURE_TYPE __nuke_me__ - diff --git a/fme/input_parameters_2.txt b/fme/input_parameters_2.txt deleted file mode 100644 index b18c362f..00000000 --- a/fme/input_parameters_2.txt +++ /dev/null @@ -1,25 +0,0 @@ -Exemples [Product] : - -569 -336 -36 -393 -a049fecb-30d9-9124-ed41-068b566a0855 - - - -Exemples [Perimeter] : - -POLYGON((5.976309680234988 46.20905354672258,5.956271564095702 46.94430720241732,7.578490777845252 46.95383189708638,7.576613659637764 46.21845063501095,5.976309680234988 46.20905354672258)) -POLYGON((6.06384544077808 46.1870571742486,6.06384544077808 46.9801999077985,7.24923786069061 46.9801999077985,7.24923786069061 46.1870571742486)) - Emprise rectangulaire Canton de Vaud -POLYGON((6.06384544077808 46.2948146242382,6.06384544077808 46.5452999981282,6.37970692618508 46.5452999981282,6.37970692618508 46.2948146242382)) - Emprise rectangulaire District de Nyon -POLYGON((6.5838791192197 46.5044344288519,6.5838791192197 46.6025470130881,6.72082104901253 46.6025470130881,6.72082104901253 46.5044344288519)) - Emprise rectangulaire Commune de Lausanne - - -Exemples [Parameters] : - -{"FORMAT" : "SHP","SELECTION" : "PASS_THROUGH","PROJECTION" : "SWITZERLAND"} -{"FORMAT" : "GDB","SELECTION" : "PASS_THROUGH","PROJECTION" : "SWITZERLAND"} -{"FORMAT" : "SHP","SELECTION" : "PASS_THROUGH","PROJECTION" : "SWITZERLAND95"} -{"FORMAT" : "GDB","SELECTION" : "PASS_THROUGH","PROJECTION" : "SWITZERLAND95"} - diff --git a/fme/sample_script_fmeserver.fmw b/fme/sample_script_fmeserver.fmw deleted file mode 100644 index 00483921..00000000 --- a/fme/sample_script_fmeserver.fmw +++ /dev/null @@ -1,3471 +0,0 @@ -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -FME_PYTHON_VERSION 27 -GUI IGNORE SourceDataset_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1,_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1,_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1,SourceDataset_FILEGDB_1,FILEGDB_IN_EXPOSE_ATTRS_GROUP_FILEGDB_1,FILEGDB_IN_SEARCH_ENVELOPE_MINX_FILEGDB_1,FILEGDB_IN_SEARCH_ENVELOPE_MINY_FILEGDB_1,FILEGDB_IN_SEARCH_ENVELOPE_MAXX_FILEGDB_1,FILEGDB_IN_SEARCH_ENVELOPE_MAXY_FILEGDB_1,FILEGDB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_FILEGDB_1,FILEGDB_IN_CLIP_TO_ENVELOPE_FILEGDB_1,FILEGDB_IN_NETWORK_AUTHENTICATION_FILEGDB_1,DestDataset_ESRISHAPE_1,ESRISHAPE_OUT_ENCODING_ESRISHAPE_1,ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1,ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1,ESRISHAPE_OUT_DIMENSION_GROUP_ESRISHAPE_1,ESRISHAPE_OUT_SHAPE_WRT_ADV_PARM_GROUP_ESRISHAPE_1,ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1,ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1,_MEASURES_AS_Z_ESRISHAPE_1,DestDataset_FILEGDB_2,FILEGDB_OUT_OVERWRITE_GEODB_FILEGDB_2,FILEGDB_OUT_DATASET_TEMPLATE_FILEGDB_2 -DEFAULT_MACRO Perimeter POLYGON((6.295363612819898 46.32659377501397,6.288481575413728 46.65130021757865,7.055293317619941 46.656470518789625,7.057591178865101 46.33173342736276,6.295363612819898 46.32659377501397)) -GUI TEXT Perimeter Polygone d'export (au format WKT) : -DEFAULT_MACRO Product 6f5afa6b-7f0a-ec54-a9a7-44f004d6edda -GUI TEXT Product Identifiant du produit : -DEFAULT_MACRO FolderOut C:\Temp\extract\orders\20acf36a-58c6-49d2-ad5b-b27fa1457b91\output -GUI DIRNAME FolderOut Répertoire de sortie : -DEFAULT_MACRO Parameters {"FORMAT":"SHP","PROJECTION":"SWITZERLAND","SELECTION":"PASS_THROUGH","GROUPMENT":"YES","RAISON":"AVARIE","Communes":"","REMARK":"Parcelle N°3658 à Clarens \u0022Montreux\u0022"} -GUI OPTIONAL TEXT Parameters Liste dynamique de paramètres au format json: -DEFAULT_MACRO SourceDataset_FILEGDB .\ASIT-VD.gdb -GUI SOURCE_GEODATABASE SourceDataset_FILEGDB File Geodatabase: -DEFAULT_MACRO OrderLabel 259547 -GUI TEXT OrderLabel Libellé de la commande : -DEFAULT_MACRO Request 365 -GUI TEXT Request Identifiant de la requête : -DEFAULT_MACRO Client 2edc1a50-4837-4c44-1519-3ebc85f14588 -GUI TEXT Client Identifiant du client : -DEFAULT_MACRO Organism 4a142a5d-16bf-84d4-111f-57d76c834493 -GUI TEXT Organism Identifiant de l'organisme : -DEFAULT_MACRO INPUT_GDB $(FME_MF_DIR)ASIT-VD.gdb -GUI IGNORE OPTIONAL SOURCE_GEODATABASE INPUT_GDB Géodatabase Esri Source : -INCLUDE [ if {{$(Perimeter)} == {}} { puts_real {Parameter 'Perimeter' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(Product)} == {}} { puts_real {Parameter 'Product' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(FolderOut)} == {}} { puts_real {Parameter 'FolderOut' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(SourceDataset_FILEGDB)} == {}} { puts_real {Parameter 'SourceDataset_FILEGDB' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(OrderLabel)} == {}} { puts_real {Parameter 'OrderLabel' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(Request)} == {}} { puts_real {Parameter 'Request' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(Client)} == {}} { puts_real {Parameter 'Client' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(Organism)} == {}} { puts_real {Parameter 'Organism' must be given a value.}; exit 1; }; ] -#! START_HEADER -#! START_WB_HEADER -READER_TYPE MULTI_READER -MULTI_READER_TYPE{0} NULL -MULTI_READER_FORMAT{0} GEODATABASE_FILE -MULTI_READER_KEYWORD{0} GEODATABASE_FILE_1 -MULTI_READER_GEN_DIRECTIVES{0} ALIAS_MODE,NONE,SEARCH_ENVELOPE_MINX,0,SPLIT_COMPLEX_ANNOS,no,RESOLVE_DOMAINS,no,CACHE_MULTIPATCH_TEXTURES,yes,SEARCH_ORDER,SPATIAL_FIRST,END_SQL0,,IGNORE_NETWORK_INFO,yes,WHERE,,SEARCH_FEATURE,,FME_CONNECTION_GROUP,,GEODB_SHARED_RDR_ADV_PARM_GROUP,,IGNORE_RELATIONSHIP_INFO,yes,EXPOSE_ATTRS_GROUP,,FEATURE_READ_MODE,Features,SEARCH_ENVELOPE_MAXX,0,CLIP_TO_ENVELOPE,NO,MERGE_FEAT_LINKED_ANNOS,no,SPLIT_COMPLEX_EDGES,no,SPLIT_MULTI_PART_ANNOS,no,BEGIN_SQL0,,SEARCH_ENVELOPE_MINY,0,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes,CHECK_SIMPLE_GEOM,no,RESOLVE_SUBTYPE_NAMES,yes,TABLELIST,Products,SEARCH_METHOD,GEODB_INTERSECTS,GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS,,TRANSLATE_SPATIAL_DATA_ONLY,no,SEARCH_ENVELOPE_MAXY,0,USE_SEARCH_ENVELOPE,NO,_MERGE_SCHEMAS,YES -MULTI_READER_TYPE{1} FILEGDB -MULTI_READER_KEYWORD{1} FILEGDB_1 -MULTI_READER_GEN_DIRECTIVES{1} SEARCH_ENVELOPE_MINX,0,SEARCH_ENVELOPE_COORDINATE_SYSTEM,,FME_CONNECTION_GROUP,,TABLELIST,,SEARCH_ENVELOPE_MAXX,0,_MERGE_SCHEMAS,YES,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes,EXPOSE_ATTRS_GROUP,,USE_SEARCH_ENVELOPE,NO,SEARCH_ENVELOPE_MAXY,0,SEARCH_ENVELOPE_MINY,0,FILEGDB_EXPOSE_FORMAT_ATTRS,,CLIP_TO_ENVELOPE,NO - -WRITER_TYPE MULTI_WRITER -MULTI_WRITER_DATASET_ORDER BY_ID -MULTI_WRITER_FIRST_WRITER_ID 0 -MULTI_WRITER_TYPE{0} ESRISHAPE -MULTI_WRITER_KEYWORD{0} ESRISHAPE_1 -MULTI_WRITER_TYPE{1} FILEGDB -MULTI_WRITER_KEYWORD{1} FILEGDB_2 -#! END_WB_HEADER - -#! START_WB_HEADER -MACRO WB_KEYWORD "GEODATABASE_FILE_1" -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -# ============================================================================ -# The following GUI line prompts for a file to be used as the source. -# The user input is stored in a macro, which is then used to define -# the dataset to be read. -# The dataset this mapping file was generated from was: -#! END_SOURCE_HEADER -#! START_WB_HEADER -DEFAULT_MACRO SourceDataset -INCLUDE [ if {{$(SourceDataset)} != ""} { \ - puts {DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(SourceDataset)} \ - } ] -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(INPUT_GDB) -GUI SOURCE_GEODATABASE SourceDataset_GEODATABASE_FILE_1 Source Esri File Geodatabase: -# Translation flags. -# -DEFAULT_MACRO GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1 Yes -GEODATABASE_FILE_1_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS "$(GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1)" -# ======================================================================== -# The following defines the global where clause to be used when retrieving -# features from the Geodatabase. -DEFAULT_MACRO GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_WHERE "$(GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 WHERE Clause: -#============================================================================== -# Determines whether non-spatial data, such as tables, subtypes, domains, -# and relationship classes, get translated -DEFAULT_MACRO GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_TRANSLATE_SPATIAL_DATA_ONLY "$(GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 yes%no Spatial Data Only -#============================================================================== -# Determines whether to resolve domains -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_RESOLVE_DOMAINS "$(GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 yes%no Resolve Domains -#============================================================================== -# Determines whether to resolve the subtype associated with a feature to the -# text description linked to the subtype value. The text description will -# be added as an attribute on the feature. -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_RESOLVE_SUBTYPE_NAMES "$(GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes%no Resolve Subtypes -#============================================================================== -# Determines whether to ignore the network info associated with a feature -# coming from a network feature class. When set to 'yes', junctions will be -# treated as point features, and edges will be treated as polyline features, -# with the geodb_type being set to geodb_point and geodb_polyline, respectively. -# Caution should be taken when changing the value for this keyword to a different -# value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_NETWORK_INFO "$(GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes%no Ignore Network Info -# ======================================================================== -# Determines whether to ignore the relationship info associated with a feature -# coming from a feature class containing relationships. When set to 'yes', simple -# relationships will not be read, and attributed relationships will be read as -# tables. Caution should be taken when changing the value for this keyword to a -# different value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_RELATIONSHIP_INFO "$(GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes%no Ignore Relationship Info -#============================================================================== -# Determines whether complex edge features should be split up and each edge -# element read as a separate feature. The default behaviour is to treat an -# entire edge *feature* as an FME feature, rather than an edge *element* as an -# FME feature. Returning edge elements ensures that all network connectivity -# information on complex edges is read. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_EDGES "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 yes%no Split Complex Edges -#============================================================================== -# Determines whether annotation features should be split up and each part -# element read as a separate feature. The default behaviour is to treat an -# entire annotation *feature* as an FME feature, rather than an annotation -# *element* as an FME feature. Returning edge elements ensures that all -# rotation and positional information for each element is preserved. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_MULTI_PART_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 yes%no Split Multi-Part Annotations -# ======================================================================== -# The following determines whether to read features from the geodatabase -# tables/feature classes or to read metadata from those tables/feature -# classes. -DEFAULT_MACRO GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1 Features -GEODATABASE_FILE_1_FEATURE_READ_MODE "$(GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1)" -DEFAULT_MACRO GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_EXPOSE_ATTRS_GROUP "$(GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1)" -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS%ALIAS_MODE Schema Attributes -# Include this file in source setting section to add native search envelope processing -# Zero as a default means we don't do any search -- this makes workbench happier -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 Minimum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 Minimum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 Maximum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 Maximum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 NO -GEODATABASE_FILE_1_CLIP_TO_ENVELOPE "$(GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1)" -GUI OPTIONAL CHECKBOX GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 YES%NO Clip to Search Envelope -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1 SPLIT_COMPLEX_ANNOS%CACHE_MULTIPATCH_TEXTURES%SEARCH_FEATURE%SEARCH_ORDER%SEARCH_METHOD%CHECK_SIMPLE_GEOM%MERGE_FEAT_LINKED_ANNOS%BEGIN_SQL{0}%END_SQL{0} Advanced -#============================================================================== -# The following specifies that complex representations for annotation should -# be split into simpler representations. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 yes%no Split Complex Annotations: -#============================================================================== -# The following specifies whether to keep (YES) or clean up (NO) Esri's -# multipatch texture caches. -DEFAULT_MACRO GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_CACHE_MULTIPATCH_TEXTURES "$(GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes%no Cache Multipatch Textures: -# ======================================================================== -# The following defines the search feature for the query. It defines an -# arbitrarily complex search feature as a spatial contraint. It is in the -# form _SEARCH_FEATURE [ ]+. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_SEARCH_FEATURE "$(GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 Search Feature: -# ======================================================================== -# Determines whether the spatial component or the attribute component of a -# query is performed first. This is only applicable when both spatial and -# non-spatial searches are being performed. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST -GEODATABASE_FILE_1_SEARCH_ORDER "$(GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST%ATTRIBUTE_FIRST Search Order: -# ======================================================================== -# Determines which type of search method to use. This keyword is only applicable -# when either the keyword SEARCH_ENVELOPE or the keyword SEARCH_FEATURE is specified. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS -GEODATABASE_FILE_1_SEARCH_METHOD "$(GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS%GEODB_ENVELOPE_INTERSECTS%GEODB_TOUCHES%GEODB_OVERLAPS%GEODB_CROSSES%GEODB_WITHIN%GEODB_CONTAINS Search Method -# ======================================================================== -# The following specifies that simple geometry should be checked for -# when reading features. -DEFAULT_MACRO GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_CHECK_SIMPLE_GEOM "$(GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 yes%no Check for Simple Geometry: -# ======================================================================== -# The following specifies whether feature-linked annotations should -# be merged onto the main feature as a text list attribute when reading. -DEFAULT_MACRO GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_MERGE_FEAT_LINKED_ANNOS "$(GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 yes%no Merge Feature Linked Annotations: -# ============================================================================== -# The following specifies an SQL command to execute before opening the first -# Geodatabase table. -DEFAULT_MACRO GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_BEGIN_SQL{0} "$(GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute Before Translation: -# ============================================================================== -# The following specifies an SQL command to execute after closing all the -# Geodatabase tables. -DEFAULT_MACRO GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_END_SQL{0} "$(GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute After Translation: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_NETWORK_AUTHENTICATION "$(GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1)" -GUI OPTIONAL AUTHENTICATOR GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 CONTAINER%GROUP%CONTAINER_TITLE%"Network Authentication"%PROMPT_TYPE%NETWORK Network Authentication -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1 ALL -GEODATABASE_FILE_1_ATTRIBUTE_READING "$(GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1)" -# ============================================================================ -# Search Envelope Coordinate System -DEFAULT_MACRO _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 -GUI OPTIONAL COORDSYS _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 Search Envelope Coordinate System: -GEODATABASE_FILE_1_SEARCH_ENVELOPE_COORDINATE_SYSTEM "$(_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1)" -DEFAULT_MACRO _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 NONE -GUI LOOKUP_CHOICE _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 None,NONE%ExposeAliasesasMetadataAttributesname_alias,ON_DATA_FEATURES Alias Mode: -GEODATABASE_FILE_1_ALIAS_MODE "$(_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1)" -GEODATABASE_FILE_1_DATASET "$(SourceDataset_GEODATABASE_FILE_1)" -#! END_SOURCE_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "FILEGDB_1" -#! END_WB_HEADER -#! START_SOURCE_HEADER FILEGDB FILEGDB_1 -# The dataset this mapping file was generated from was: -#! END_SOURCE_HEADER -#! START_WB_HEADER -DEFAULT_MACRO SourceDataset -INCLUDE [ if {{$(SourceDataset)} != ""} { \ - puts {DEFAULT_MACRO SourceDataset_FILEGDB_1 $(SourceDataset)} \ - } ] -#! END_WB_HEADER -#! START_SOURCE_HEADER FILEGDB FILEGDB_1 -DEFAULT_MACRO SourceDataset_FILEGDB_1 $(SourceDataset_FILEGDB) -GUI SOURCE_GEODATABASE SourceDataset_FILEGDB_1 File Geodatabase: -DEFAULT_MACRO FILEGDB_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_FILEGDB_1 Yes -FILEGDB_1_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS "$(FILEGDB_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_FILEGDB_1)" -DEFAULT_MACRO FILEGDB_IN_EXPOSE_ATTRS_GROUP_FILEGDB_1 -FILEGDB_1_EXPOSE_ATTRS_GROUP "$(FILEGDB_IN_EXPOSE_ATTRS_GROUP_FILEGDB_1)" -GUI DISCLOSUREGROUP FILEGDB_IN_EXPOSE_ATTRS_GROUP_FILEGDB_1 FILEGDB_EXPOSE_FORMAT_ATTRS Schema Attributes -# Include this file in source setting section to add native search envelope processing -# Zero as a default means we don't do any search -- this makes workbench happier -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_SEARCH_ENVELOPE_MINX_FILEGDB_1 0 -FILEGDB_1_SEARCH_ENVELOPE "$(FILEGDB_IN_SEARCH_ENVELOPE_MINX_FILEGDB_1)" -GUI OPTIONAL FLOAT FILEGDB_IN_SEARCH_ENVELOPE_MINX_FILEGDB_1 Minimum X: -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_SEARCH_ENVELOPE_MINY_FILEGDB_1 0 -FILEGDB_1_SEARCH_ENVELOPE "$(FILEGDB_IN_SEARCH_ENVELOPE_MINY_FILEGDB_1)" -GUI OPTIONAL FLOAT FILEGDB_IN_SEARCH_ENVELOPE_MINY_FILEGDB_1 Minimum Y: -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_SEARCH_ENVELOPE_MAXX_FILEGDB_1 0 -FILEGDB_1_SEARCH_ENVELOPE "$(FILEGDB_IN_SEARCH_ENVELOPE_MAXX_FILEGDB_1)" -GUI OPTIONAL FLOAT FILEGDB_IN_SEARCH_ENVELOPE_MAXX_FILEGDB_1 Maximum X: -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_SEARCH_ENVELOPE_MAXY_FILEGDB_1 0 -FILEGDB_1_SEARCH_ENVELOPE "$(FILEGDB_IN_SEARCH_ENVELOPE_MAXY_FILEGDB_1)" -GUI OPTIONAL FLOAT FILEGDB_IN_SEARCH_ENVELOPE_MAXY_FILEGDB_1 Maximum Y: -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_FILEGDB_1 -FILEGDB_1_SEARCH_ENVELOPE "$(FILEGDB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_FILEGDB_1)" -GUI OPTIONAL COORDSYS FILEGDB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_FILEGDB_1 Search Envelope Coordinate System: -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_CLIP_TO_ENVELOPE_FILEGDB_1 NO -FILEGDB_1_CLIP_TO_ENVELOPE "$(FILEGDB_IN_CLIP_TO_ENVELOPE_FILEGDB_1)" -GUI OPTIONAL CHECKBOX FILEGDB_IN_CLIP_TO_ENVELOPE_FILEGDB_1 YES%NO Clip to Search Envelope -# -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_NETWORK_AUTHENTICATION_FILEGDB_1 -FILEGDB_1_NETWORK_AUTHENTICATION "$(FILEGDB_IN_NETWORK_AUTHENTICATION_FILEGDB_1)" -GUI OPTIONAL AUTHENTICATOR FILEGDB_IN_NETWORK_AUTHENTICATION_FILEGDB_1 CONTAINER%GROUP%CONTAINER_TITLE%"Network Authentication"%PROMPT_TYPE%NETWORK Network Authentication -# =========================================================================== -DEFAULT_MACRO FILEGDB_IN_ATTRIBUTE_READING_FILEGDB_1 ALL -FILEGDB_1_ATTRIBUTE_READING "$(FILEGDB_IN_ATTRIBUTE_READING_FILEGDB_1)" -FILEGDB_1_DATASET "$(SourceDataset_FILEGDB_1)" -#! END_SOURCE_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "ESRISHAPE_1" -#! END_WB_HEADER -#! START_DEST_HEADER ESRISHAPE ESRISHAPE_1 -# ============================================================================ -# The following GUI line prompts for a folder to be used as the -# the destination for the Esri Shapefiles. -# The user input is stored in a macro, which is then used to define -# the dataset to be written. -#! END_DEST_HEADER -#! START_WB_HEADER -DEFAULT_MACRO DestDataset -INCLUDE [ if {"$(DestDataset)" != ""} { \ - puts {DEFAULT_MACRO DestDataset_ESRISHAPE_1 $(DestDataset)} \ - } ] -#! END_WB_HEADER -#! START_DEST_HEADER ESRISHAPE ESRISHAPE_1 -DEFAULT_MACRO DestDataset_ESRISHAPE_1 $(FolderOut) -GUI DIRNAME DestDataset_ESRISHAPE_1 Destination Esri Shapefile Folder: -ESRISHAPE_1_COORDINATE_SYSTEM_GRANULARITY FEATURE_TYPE -# ============================================================================ -# Determines whether the attribute names should be uppercased, or whether they -# should stay as specified in the shapefile. The default will be Yes for -# backwards compatibility. Once the mapping file/workspace has been generated, -# the value for this keyword should not be changed. -DEFAULT_MACRO ESRISHAPE_OUT_UPPER_CASE_ATTR_NAMES_ESRISHAPE_1 No -ESRISHAPE_1_UPPER_CASE_ATTR_NAMES "$(ESRISHAPE_OUT_UPPER_CASE_ATTR_NAMES_ESRISHAPE_1)" -# ============================================================================ -# The following keyword allows the user to choose what encoding to -# use for outputting the shapefile -# BUG31194: For backwards compatibility and not outputting a .cpg file -# we have chosen to make the writer default encoding system (ANSI) -DEFAULT_MACRO ESRISHAPE_OUT_ENCODING_ESRISHAPE_1 SYSTEM -ESRISHAPE_1_ENCODING "$(ESRISHAPE_OUT_ENCODING_ESRISHAPE_1)" -GUI OPTIONAL ENCODING ESRISHAPE_OUT_ENCODING_ESRISHAPE_1 ANSI%SYSTEM%BIG5%EUC%HKBIG5%ISO%OEM%SJIS%UTF-8%CP437%CP708%CP720%CP737%CP775%CP850%CP852%CP855%CP857%CP860%CP861%CP862%CP863%CP864%CP865%CP866%CP869%CP874%CP932%CP936%CP950%CP1250%CP1251%CP1252%CP1253%CP1254%CP1255%CP1256%CP1257%CP1258%ISO8859-1%ISO8859-2%ISO8859-3%ISO8859-4%ISO8859-5%ISO8859-6%ISO8859-7%ISO8859-8%ISO8859-9%ISO-8859-11%ISO8859-13%ISO8859-15%WINDOWS-874 Character Encoding: -# ============================================================================ -# PR2557: Specifies whether or not the reader will generate spatial index files -DEFAULT_MACRO ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1 No -ESRISHAPE_1_WRITE_SPATIAL_INDEX "$(ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1 Yes%No Write Spatial Index -DEFAULT_MACRO ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1 No -ESRISHAPE_1_COMPRESSED_SHAPE_FILE "$(ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1 Yes%No Create Compressed Shapefile (.shz) -GUI DISCLOSUREGROUP ESRISHAPE_OUT_DIMENSION_GROUP_ESRISHAPE_1 DIMENSION Dimension Settings -DEFAULT_MACRO ESRISHAPE_OUT_DIMENSION_ESRISHAPE_1 auto -ESRISHAPE_1_DIMENSION "$(ESRISHAPE_OUT_DIMENSION_ESRISHAPE_1)" -GUI DISCLOSUREGROUP ESRISHAPE_OUT_SHAPE_WRT_ADV_PARM_GROUP_ESRISHAPE_1 STRICT_COMPATIBILITY%PRESERVE_RING_VERTEX_ORDER Advanced -# ============================================================================ -# BUG31474: Add an Strict compatibility flag to prevent some apps from -# crashing when record lengths are too long. By default, we want to write -# compatible files going forward. -DEFAULT_MACRO ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1 Yes -ESRISHAPE_1_STRICT_COMPATIBILITY "$(ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1 Yes%No Strict Compatibility -# ============================================================ -# BUG39095: Add an option to preserve input ring vertex order. -DEFAULT_MACRO ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1 No -ESRISHAPE_1_PRESERVE_RING_VERTEX_ORDER "$(ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1 Yes%No Preserve Ring Vertex Order -# ============================================================================ -# Opt in for destination dataset type vs format type validation -DEFAULT_MACRO ESRISHAPE_OUT_DESTINATION_DATASETTYPE_VALIDATION_ESRISHAPE_1 Yes -ESRISHAPE_1_DESTINATION_DATASETTYPE_VALIDATION "$(ESRISHAPE_OUT_DESTINATION_DATASETTYPE_VALIDATION_ESRISHAPE_1)" -# ============================================================================ -# The following GUI line sets whether measure values in the shapefiles should -# be treated as elevations. -DEFAULT_MACRO _MEASURES_AS_Z_ESRISHAPE_1 no -GUI CHOICE _MEASURES_AS_Z_ESRISHAPE_1 yes%no Treat Measures as Elevation: -ESRISHAPE_1_MEASURES_AS_Z $(_MEASURES_AS_Z_ESRISHAPE_1) -ESRISHAPE_1_ENCODING $(ESRISHAPE_OUT_ENCODING_ESRISHAPE_1) -ESRISHAPE_1_DATASET "$(DestDataset_ESRISHAPE_1)" -#! END_DEST_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "FILEGDB_2" -#! END_WB_HEADER -#! START_DEST_HEADER FILEGDB FILEGDB_2 -GUI SOURCE_GEODATABASE DestDataset_FILEGDB_2 File Geodatabase: -DEFAULT_MACRO FILEGDB_OUT_OVERWRITE_GEODB_FILEGDB_2 YES -FILEGDB_2_OVERWRITE_GEODB "$(FILEGDB_OUT_OVERWRITE_GEODB_FILEGDB_2)" -GUI ACTIVECHECK FILEGDB_OUT_OVERWRITE_GEODB_FILEGDB_2 YES%NO,FILEGDB_OUT_DATASET_TEMPLATE Overwrite Existing Geodatabase: -DEFAULT_MACRO FILEGDB_OUT_DATASET_TEMPLATE_FILEGDB_2 -FILEGDB_2_DATASET_TEMPLATE "$(FILEGDB_OUT_DATASET_TEMPLATE_FILEGDB_2)" -GUI OPTIONAL SOURCE_GEODATABASE_CFG FILEGDB_OUT_DATASET_TEMPLATE_FILEGDB_2 SWIZZLER:NO Template File Geodatabase: -# ============================================================================ -# Opt in for destination dataset type vs format type validation -DEFAULT_MACRO FILEGDB_OUT_DESTINATION_DATASETTYPE_VALIDATION_FILEGDB_2 Yes -FILEGDB_2_DESTINATION_DATASETTYPE_VALIDATION "$(FILEGDB_OUT_DESTINATION_DATASETTYPE_VALIDATION_FILEGDB_2)" -DEFAULT_MACRO FILEGDB_OUT_COORDINATE_SYSTEM_GRANULARITY_FILEGDB_2 FEATURE_TYPE -FILEGDB_2_COORDINATE_SYSTEM_GRANULARITY "$(FILEGDB_OUT_COORDINATE_SYSTEM_GRANULARITY_FILEGDB_2)" -#! END_DEST_HEADER -#! START_WB_HEADER -DEFAULT_MACRO DestDataset -INCLUDE [ if {"$(DestDataset)" != ""} { \ - puts {DEFAULT_MACRO DestDataset_FILEGDB_2 $(DestDataset)} \ - } ] -#! END_WB_HEADER -#! START_DEST_HEADER FILEGDB FILEGDB_2 -DEFAULT_MACRO DestDataset_FILEGDB_2 $(FolderOut)\output -FILEGDB_2_DATASET "$(DestDataset_FILEGDB_2)" -#! END_DEST_HEADER -#! START_WB_HEADER -#! END_WB_HEADER - -#! END_HEADER - -LOG_FILENAME "$(FME_MF_DIR)fme_extraction_new_params.log" -LOG_APPEND NO -LOG_MAX_FEATURES 200 -LOG_MAX_RECORDED_FEATURES 200 -FME_REPROJECTION_ENGINE FME -FME_IMPLICIT_CSMAP_REPROJECTION_MODE Auto -FME_GEOMETRY_HANDLING Enhanced -FME_STROKE_MAX_DEVIATION 0 -LOG_FILTER_MASK -1 -DEFAULT_MACRO DATASET_KEYWORD_GEODATABASE_FILE_1 GEODATABASE_FILE_1 -DEFAULT_MACRO DATASET_KEYWORD_FILEGDB_1 FILEGDB_1 -DEFAULT_MACRO DATASET_KEYWORD_ESRISHAPE_1 ESRISHAPE_1 -DEFAULT_MACRO DATASET_KEYWORD_FILEGDB_2 FILEGDB_2 -# ------------------------------------------------------------------------- - -GEODATABASE_FILE_1_READER_META_ATTRIBUTES fme_feature_type - -# ------------------------------------------------------------------------- - -FILEGDB_1_READER_META_ATTRIBUTES fme_feature_type - -# ------------------------------------------------------------------------- - -MULTI_READER_CONTINUE_ON_READER_FAILURE No - -# ------------------------------------------------------------------------- - -MACRO WORKSPACE_NAME fme_extraction_new_params -MACRO FME_VIEWER_APP fmedatainspector -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Router and Unexpected Input Remover" COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE * ROUTE FILEGDB FILEGDB_1::Products multi_reader_keyword,$(DATASET_KEYWORD_FILEGDB_1) TO FME_GENERIC ::Products ALIAS_GEOMETRY MERGE_INPUT Yes OUTPUT ROUTED FEATURE_TYPE * -GEODATABASE_FILE_1_MERGE_DEF GEODATABASE_FILE_1::Products EXACT Products -FILEGDB_1_MERGE_DEF FILEGDB_1::Products EXACT Products DEFLINE filegdb_type,geodb_no_geom,filegdb_where,,OBJECTID,objectid,guid,text8000,fc,text8000 -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Products (FILEGDB_1) Splitter" INPUT FEATURE_TYPE Products multi_reader_keyword $(DATASET_KEYWORD_FILEGDB_1) OUTPUT FEATURE_TYPE Products_FILEGDB_1 -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Products (GEODATABASE_FILE_1) Splitter" INPUT FEATURE_TYPE Products multi_reader_keyword $(DATASET_KEYWORD_GEODATABASE_FILE_1) OUTPUT FEATURE_TYPE Products_GEODATABASE_FILE_1 -DEFAULT_MACRO WB_CURRENT_CONTEXT -# ------------------------------------------------------------------------- - -Tcl2 proc Creator_CoordSysRemover {} { global FME_CoordSys; set FME_CoordSys {}; } -MACRO Creator_XML NOT_ACTIVATED -MACRO Creator_CLASSIC NOT_ACTIVATED -MACRO Creator_2D3D 2D_GEOMETRY -MACRO Creator_COORDS -INCLUDE [ if { {Geometry Object} == {Geometry Object} } { puts {MACRO Creator_XML *} } ] -INCLUDE [ if { {Geometry Object} == {2D Coordinate List} } { puts {MACRO Creator_2D3D 2D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {3D Coordinate List} } { puts {MACRO Creator_2D3D 3D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {2D Min/Max Box} } { set comment { We need to turn the COORDS which are minX minY maxX maxY into a full polygon list of coordinates }; set splitCoords [split [string trim {}]]; if { [llength $splitCoords] > 4} { set trimmedCoords {}; foreach item $splitCoords { if { $item != {} } {lappend trimmedCoords $item} }; set splitCoords $trimmedCoords; }; if { [llength $splitCoords] != 4 } { error {Creator: Coordinate list is expected to be a space delimited list of four numbers as 'minx miny maxx maxy' - `' is invalid}; }; set minX [lindex $splitCoords 0]; set minY [lindex $splitCoords 1]; set maxX [lindex $splitCoords 2]; set maxY [lindex $splitCoords 3]; puts "MACRO Creator_COORDS $minX $minY $minX $maxY $maxX $maxY $maxX $minY $minX $minY"; puts {MACRO Creator_2D3D 2D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -FACTORY_DEF $(Creator_XML) CreationFactory FACTORY_NAME Creator_XML_Creator CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ @Geometry(FROM_ENCODED_STRING,?xmlversion=1.0encoding=US_ASCIIstandalone=no?geometrydimension=2nullgeometry) -FACTORY_DEF $(Creator_CLASSIC) CreationFactory FACTORY_NAME Creator_CLASSIC_Creator $(Creator_2D3D) $(Creator_COORDS) CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ -FACTORY_DEF * TeeFactory FACTORY_NAME Creator_Cloner INPUT FEATURE_TYPE _____CREATED______ NUMBER_OF_COPIES 1 COPY_NUMBER_ATTRIBUTE "_creation_instance" OUTPUT FEATURE_TYPE Creator_CREATED @Tcl2(Creator_CoordSysRemover) @CoordSys() fme_feature_type Creator - -FACTORY_DEF * BranchingFactory FACTORY_NAME "Creator_CREATED Brancher -1 4" INPUT FEATURE_TYPE Creator_CREATED TARGET_FACTORY "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" OUTPUT PASSED FEATURE_TYPE * @RemoveAttributes("Creator_CREATED Brancher -1 4".BranchingFactory.Count) -# ------------------------------------------------------------------------- - -Tcl2 proc Creator_2_CoordSysRemover {} { global FME_CoordSys; set FME_CoordSys {}; } -MACRO Creator_2_XML NOT_ACTIVATED -MACRO Creator_2_CLASSIC NOT_ACTIVATED -MACRO Creator_2_2D3D 2D_GEOMETRY -MACRO Creator_2_COORDS -INCLUDE [ if { {Geometry Object} == {Geometry Object} } { puts {MACRO Creator_2_XML *} } ] -INCLUDE [ if { {Geometry Object} == {2D Coordinate List} } { puts {MACRO Creator_2_2D3D 2D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {3D Coordinate List} } { puts {MACRO Creator_2_2D3D 3D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {2D Min/Max Box} } { set comment { We need to turn the COORDS which are minX minY maxX maxY into a full polygon list of coordinates }; set splitCoords [split [string trim {}]]; if { [llength $splitCoords] > 4} { set trimmedCoords {}; foreach item $splitCoords { if { $item != {} } {lappend trimmedCoords $item} }; set splitCoords $trimmedCoords; }; if { [llength $splitCoords] != 4 } { error {Creator_2: Coordinate list is expected to be a space delimited list of four numbers as 'minx miny maxx maxy' - `' is invalid}; }; set minX [lindex $splitCoords 0]; set minY [lindex $splitCoords 1]; set maxX [lindex $splitCoords 2]; set maxY [lindex $splitCoords 3]; puts "MACRO Creator_2_COORDS $minX $minY $minX $maxY $maxX $maxY $maxX $minY $minX $minY"; puts {MACRO Creator_2_2D3D 2D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -FACTORY_DEF $(Creator_2_XML) CreationFactory FACTORY_NAME Creator_2_XML_Creator CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ @Geometry(FROM_ENCODED_STRING,?xmlversion=1.0encoding=US_ASCIIstandalone=no?geometrydimension=2nullgeometry) -FACTORY_DEF $(Creator_2_CLASSIC) CreationFactory FACTORY_NAME Creator_2_CLASSIC_Creator $(Creator_2_2D3D) $(Creator_2_COORDS) CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ -FACTORY_DEF * TeeFactory FACTORY_NAME Creator_2_Cloner INPUT FEATURE_TYPE _____CREATED______ NUMBER_OF_COPIES 1 COPY_NUMBER_ATTRIBUTE "_creation_instance" OUTPUT FEATURE_TYPE Creator_2_CREATED @Tcl2(Creator_2_CoordSysRemover) @CoordSys() fme_feature_type Creator_2 - -FACTORY_DEF * BranchingFactory FACTORY_NAME "Creator_2_CREATED Brancher -1 37" INPUT FEATURE_TYPE Creator_2_CREATED TARGET_FACTORY "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" OUTPUT PASSED FEATURE_TYPE * @RemoveAttributes("Creator_2_CREATED Brancher -1 37".BranchingFactory.Count) -# ------------------------------------------------------------------------- -FACTORY_DEF * TeeFactory FACTORY_NAME "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" INPUT FEATURE_TYPE * OUTPUT FEATURE_TYPE * -# ------------------------------------------------------------------------- - -FACTORY_DEF * JSONQueryFactory FACTORY_NAME JSONFlattener INPUT FEATURE_TYPE Creator_2_CREATED MODE FLATTEN PREF_STRING "json_" JSON_DOCUMENT "@EvaluateExpression(FDIV,STRING_ENCODED,$(Parameters$encode),JSONFlattener)" EXPLODE_QUERY "json" RECURSIVE_FLATTEN YES ERROR_LIST_ATTR "_json_error" OUTPUT EXPLODED FEATURE_TYPE JSONFlattener_OUTPUT OUTPUT REJECTED FEATURE_TYPE JSONFlattener_ - -FACTORY_DEF * TeeFactory FACTORY_NAME "JSONFlattener Output Nuker" INPUT FEATURE_TYPE JSONFlattener_ -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME JSONFlattener_ INPUT FEATURE_TYPE JSONFlattener_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, JSONFlatteneroutputaRejectedfeature.TocontinueiffeaturesarerejectedchangetheworkspaceparameterRejectedFeatureHandlingtoContinueTranslation) -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_format_VAR FORMAT -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_format_VAR $(WB_CURRENT_CONTEXT)_FORMAT}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_format INPUT FEATURE_TYPE JSONFlattener_OUTPUT OUTPUT FEATURE_TYPE Parameter_format_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_format_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,Valuejson_FORMAT,Parameter_format)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_format_2_VAR SELECTION -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_format_2_VAR $(WB_CURRENT_CONTEXT)_SELECTION}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_format_2 INPUT FEATURE_TYPE Parameter_format_OUTPUT OUTPUT FEATURE_TYPE Parameter_format_2_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_format_2_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,Valuejson_SELECTION,Parameter_format_2)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_projection_VAR PROJECTION_EPSG -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_projection_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_EPSG}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_projection INPUT FEATURE_TYPE Parameter_format_2_OUTPUT OUTPUT FEATURE_TYPE Parameter_projection_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_projection_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,FME_CONDITIONAL:DEFAULT_VALUE'AbortNovalidprojectiondefined'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND'EPSG:21781'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND95'EPSG:2056'FME_NUM_CONDITIONS3___,Parameter_projection)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_projection_2_VAR PROJECTION_NOM -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_projection_2_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_NOM}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_projection_2 INPUT FEATURE_TYPE Parameter_projection_OUTPUT OUTPUT FEATURE_TYPE Parameter_projection_2_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_projection_2_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,FME_CONDITIONAL:DEFAULT_VALUE'AbortNovalidprojectiondefined'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND'MN03'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND95'MN95'FME_NUM_CONDITIONS3___,Parameter_projection_2)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_format_3_VAR REMARK -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_format_3_VAR $(WB_CURRENT_CONTEXT)_REMARK}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_format_3 INPUT FEATURE_TYPE Parameter_projection_2_OUTPUT OUTPUT FEATURE_TYPE Parameter_format_3_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_format_3_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,Valuejson_REMARK,Parameter_format_3)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_4_VAR REMARK -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_4_VAR $(WB_CURRENT_CONTEXT)_REMARK}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever_4 INPUT FEATURE_TYPE Parameter_format_3_OUTPUT OUTPUT FEATURE_TYPE VariableRetriever_4_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_4_VAR),fme_result_attribute,REMARK) - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME Logger_Logger INPUT FEATURE_TYPE VariableRetriever_4_OUTPUT OUTPUT FEATURE_TYPE Logger_LOGGED @Log("Logger: @EvaluateExpression(FDIV,STRING,Remarkis:ValueREMARK,Logger)","20","20") @FeatureType(Logger_LOGGED) - -FACTORY_DEF * TeeFactory FACTORY_NAME "Logger LOGGED Output Nuker" INPUT FEATURE_TYPE Logger_LOGGED -# ------------------------------------------------------------------------- - -# First determine which function we are going to be using -- we can do -# this statically at parse time for efficiency sake - -MACRO GeometryReplacer_SetTempAttr -MACRO GeometryReplacer_Function -MACRO GeometryReplacer_RemoveTempAttr -MACRO GeometryReplacer_RemoveSourceAttr -INCLUDE [ set sourceText {@EvaluateExpression(FDIV,STRING_ENCODED,$(Perimeter$encode),GeometryReplacer)}; set decodedText [FME_DecodeText "$sourceText"]; if ![regexp {@EvaluateExpression\([^)]*STRING_ENCODED,@Value\(([^()]*)\),[^()]*\)} "$decodedText" dummy geomSrcAttr] { puts "MACRO GeometryReplacer_SetTempAttr @SupplyAttributes(ENCODED,__GeometryReplacerTemp__,\"$sourceText\")"; puts "MACRO GeometryReplacer_RemoveTempAttr @RemoveAttributes(__GeometryReplacerTemp__)"; set geomSrcAttr __GeometryReplacerTemp__; } elseif {{Yes} == {Yes} } { puts "MACRO GeometryReplacer_RemoveSourceAttr @RemoveAttributes(\"$geomSrcAttr\")"; }; if {{wkt} == {fmebinary} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_BINARY,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmehex} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_BINARY_HEX,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmexml} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmexmlencoded} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ENCODED_STRING,&\"$geomSrcAttr\")"; } elseif {{wkt} == {polyline} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_POLYLINE,\"$geomSrcAttr\")"; } elseif {{wkt} == {wkt} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\",FLOAT_64)"; } elseif {{wkt} == {wkb} || {wkt} == {wkbhex} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {GEOJSON} || {wkt} == {ESRIJSON} } { puts "MACRO GeometryReplacer_Function @JSONGeometry(FROM_ATTRIBUTE,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {GEORSS} } { puts "MACRO GeometryReplacer_Function @GeoRSSGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {KML} } { puts "MACRO GeometryReplacer_Function @KMLGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {GML} } { puts "MACRO GeometryReplacer_Function @GMLGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\",)"; } elseif {{wkt} == {geohash} } { puts "MACRO GeometryReplacer_Function @GeoHash(FROM_ATTRIBUTE,\"$geomSrcAttr\",)"; } elseif {{wkt} == {ogeosms} || {wkt} == {geosms} || {wkt} == {geo} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {mgrs}} { puts "MACRO GeometryReplacer_Function @MGRS(FROM_MGRS, , ,\"@EvaluateExpression(FDIV,STRING_ENCODED,$(Perimeter$encode),GeometryReplacer)\")" } elseif {{wkt} == {mssql} } { puts "MACRO GeometryReplacer_Function @SerializeGeometry(from_attribute,wkt,\"$geomSrcAttr\",)"; } elseif {{wkt} == {iso6709} } { puts "MACRO GeometryReplacer_Function @GeographicPoint(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {qlikmaps} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_QLIKMAPS_POLYLINE,\"$geomSrcAttr\")"; }; ] -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryReplacer INPUT FEATURE_TYPE Creator_CREATED OUTPUT FEATURE_TYPE ___TOREJECTOR___ @RenameAttributes(FME_STRICT,___fme_rejection_code___,fme_rejection_code) $(GeometryReplacer_SetTempAttr) $(GeometryReplacer_Function) $(GeometryReplacer_RemoveTempAttr) -FACTORY_DEF * TestFactory FACTORY_NAME GeometryReplacer_Rejector INPUT FEATURE_TYPE ___TOREJECTOR___ TEST @Value(fme_rejection_code) != "" OUTPUT PASSED FEATURE_TYPE GeometryReplacer_ @RemoveAttributes(___fme_rejection_code___) OUTPUT FAILED FEATURE_TYPE GeometryReplacer_OUTPUT @RenameAttributes(FME_STRICT,fme_rejection_code,___fme_rejection_code___) $(GeometryReplacer_RemoveSourceAttr) - -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryReplacer_ INPUT FEATURE_TYPE GeometryReplacer_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, GeometryReplaceroutputaRejectedfeature.TocontinueiffeaturesarerejectedchangetheworkspaceparameterRejectedFeatureHandlingtoContinueTranslation) -# ------------------------------------------------------------------------- - -# Wipe out the source setting if it was untouched from the default setting - -DEFAULT_MACRO Reprojection_WGS84_MN95_SOURCE "EPSG:4326" -INCLUDE [if { {EPSG:4326} == {Read from feature} } { puts {MACRO Reprojection_WGS84_MN95_SOURCE} } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Reprojection_WGS84_MN95 INPUT FEATURE_TYPE GeometryReplacer_OUTPUT OUTPUT FEATURE_TYPE Reprojection_WGS84_MN95_REPROJECTED @Reproject($(Reprojection_WGS84_MN95_SOURCE),"EPSG:2056",NearestNeighbor,PreserveCells,Reprojection_WGS84_MN95,"COORD_SYS_WARNING") - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TestFactory FACTORY_NAME Tester INPUT FEATURE_TYPE Products_GEODATABASE_FILE_1 INPUT FEATURE_TYPE Products_FILEGDB_1 TEST @EvaluateExpression(FDIV,STRING_ENCODED,Valueguid,Tester) = @EvaluateExpression(FDIV,STRING_ENCODED,$(Product$encode),Tester) ENCODED BOOLEAN_OPERATOR OR OUTPUT PASSED FEATURE_TYPE Tester_PASSED -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME ListBuilder_fme_type_remover INPUT FEATURE_TYPE Tester_PASSED OUTPUT FEATURE_TYPE ListBuilder_no_fme_type @RemoveAttributes(fme_type,fme_geometry) -FACTORY_DEF * ListFactory FACTORY_NAME ListBuilder INPUT FEATURE_TYPE ListBuilder_no_fme_type LIST_NAME "list_fc{}" OUTPUT LIST FEATURE_TYPE ListBuilder_OUTPUT - -# ------------------------------------------------------------------------- - -Tcl2 set ListConcatenator__separator [FME_DecodeText {}]; regsub -all \"{}\" [FME_DecodeText {list_fc.fc}] \"{*}\" ListConcatenator__listPattern; -Tcl2 proc ListConcatenator__Concatenate {} { upvar \#0 ListConcatenator__separator separator ListConcatenator__listPattern listPattern; set allAttrs [lsort -dictionary [FME_AttributeNames]]; set keepEmptyParts [string equal {Yes} {No}]; set result {}; foreach attrName $allAttrs { if {[string match $listPattern $attrName]} { set attrValue [FME_GetAttribute $attrName]; if {$keepEmptyParts || $attrValue != {}} { lappend result $attrValue; }; }; }; FME_SetAttribute {"feature_type_to_read"} [join $result $separator]; } -FACTORY_DEF * TeeFactory FACTORY_NAME ListConcatenator INPUT FEATURE_TYPE ListBuilder_OUTPUT OUTPUT FEATURE_TYPE ListConcatenator_OUTPUT @Tcl2(ListConcatenator__Concatenate) - -# ------------------------------------------------------------------------- -MACRO FeatureReader_OUTPUT_PORTS_ENCODED -MACRO FeatureReader_DIRECTIVES USE_SEARCH_ENVELOPE,NO,CLIP_TO_ENVELOPE,NO,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes - -# Values come in encoded and come out decoded. -Tcl2 proc FeatureReader_findInListElseDefault { valueEncoded defaultValue } { if { [lsearch [split {$(FeatureReader_OUTPUT_PORTS_ENCODED)}] $valueEncoded] >= 0} { return FeatureReader_[FME_DecodeText $valueEncoded] } else { return FeatureReader_$defaultValue } } -# Always provide an INTERACTION, otherwise the factory defaults to ENVELOPE_INTERSECTS -INCLUDE [if { ( {NONE} == {} ) || ( {($INTERACT_OPTIONS)} == {} ) } { puts {MACRO FCTQUERY_INTERACTION_LINE FCTQUERY_INTERACTION NONE}; } else { puts {MACRO FCTQUERY_INTERACTION_LINE FCTQUERY_INTERACTION "NONE"}; } ] -# Consolidate the attribute merge options to what the factory expects -DEFAULT_MACRO FeatureReader_COMBINE_ATTRS -INCLUDE [ if { {RESULT_ONLY} == {MERGE} } { puts "MACRO FeatureReader_COMBINE_ATTRS "; } else { puts "MACRO FeatureReader_COMBINE_ATTRS RESULT_ONLY"; }; ] -FACTORY_DEF * QueryFactory FACTORY_NAME FeatureReader INPUT FEATURE_TYPE ListConcatenator_OUTPUT $(FCTQUERY_INTERACTION_LINE) QUERYFCT_TABLE_SEPARATOR SPACE COMBINE_ATTRIBUTES $(FeatureReader_COMBINE_ATTRS) QUERYFCT_ATTRIBUTE_PREFIX COMBINE_GEOMETRY RESULT_ONLY ENABLE_CACHE NO READER_TYPE FILEGDB READER_DATASET "@EvaluateExpression(FDIV,STRING_ENCODED,$(INPUT_GDB$encode),FeatureReader)" QUERYFCT_IDS "@EvaluateExpression(FDIV,STRING_ENCODED,Valuefeature_type_to_read,FeatureReader)" READER_DIRECTIVES META_MACROS,SourceQUERY_FEATURE_TYPES_FOR_MERGE_FILTERSYesSourceFME_CONNECTION_GROUPSourceEXPOSE_ATTRS_GROUPSourceFILEGDB_EXPOSE_FORMAT_ATTRSSourceUSE_SEARCH_ENVELOPENOSourceSEARCH_ENVELOPE_MINX0SourceSEARCH_ENVELOPE_MINY0SourceSEARCH_ENVELOPE_MAXX0SourceSEARCH_ENVELOPE_MAXY0SourceSEARCH_ENVELOPE_COORDINATE_SYSTEMSourceCLIP_TO_ENVELOPENO,METAFILE,FILEGDB QUERYFCT_OUTPUT "BASED_ON_CONNECTIONS" CONTINUE_ON_READER_ERROR YES READER_PARAMS_WWJD $(FeatureReader_DIRECTIVES) OUTPUT RESULT FEATURE_TYPE * @SupplyAttributes(fme_feature_type,@FeatureType()) @Tcl2("set FME_FeatureType [FeatureReader_findInListElseDefault [FME_EncodeText $FME_FeatureType] {}]") @Transform(FILEGDB,FME_GENERIC,ALIAS_GEOMETRY) OUTPUT SCHEMA FEATURE_TYPE FeatureReader_ OUTPUT READER_ERROR FEATURE_TYPE FeatureReader_ - -FACTORY_DEF * TeeFactory FACTORY_NAME "FeatureReader Splitter" INPUT FEATURE_TYPE FeatureReader_ OUTPUT FEATURE_TYPE FeatureReader__0_DYpm1DTmZVo= OUTPUT FEATURE_TYPE FeatureReader__1_O2b4R8WeWNw= -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME FeatureReader_ INPUT FEATURE_TYPE FeatureReader_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, FeatureReaderoutputaRejectedfeature.TocontinueiffeaturesarerejectedchangetheworkspaceparameterRejectedFeatureHandlingtoContinueTranslation) -# ------------------------------------------------------------------------- - -FACTORY_DEF * SpatialFilterFactory FACTORY_NAME SpatialFilter INPUT BASE FEATURE_TYPE Reprojection_WGS84_MN95_REPROJECTED INPUT CANDIDATE FEATURE_TYPE FeatureReader_ PREDICATE "INTERSECTS" USE_BOUNDING_BOX NO MULTIPLE_BASES YES MERGE_BASE_ATTR YES ATTR_ACCUM_MODE "HANDLE_CONFLICT" ATTR_CONFLICT_RES "CANDIDATE_IF_CONFLICT" BASE_ATTR_PREFIX "" PREDICATE_ATTR "_predicate" BOOLEAN_OPERATOR OR REJECT_INVALID_GEOM Yes REJECT_INVALID_PREDICATES Yes CUSTOM_MULTI_HANDLING Yes DIMENSION 2 CURVE_BOUNDARY_RULE ENDPOINTS_MOD2 OUTPUT PASSED FEATURE_TYPE SpatialFilter_PASSED OUTPUT REJECTED FEATURE_TYPE SpatialFilter_ - -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME SpatialFilter_ INPUT FEATURE_TYPE SpatialFilter_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, SpatialFilteroutputaRejectedfeature.TocontinueiffeaturesarerejectedchangetheworkspaceparameterRejectedFeatureHandlingtoContinueTranslation) -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_VAR PROJECTION_EPSG -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_EPSG}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever INPUT FEATURE_TYPE SpatialFilter_PASSED OUTPUT FEATURE_TYPE VariableRetriever_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_VAR),fme_result_attribute,PROJECTION_EPSG) - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_3_VAR PROJECTION_NOM -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_3_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_NOM}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever_3 INPUT FEATURE_TYPE VariableRetriever_OUTPUT OUTPUT FEATURE_TYPE VariableRetriever_3_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_3_VAR),fme_result_attribute,PROJECTION_NOM) - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_2_VAR FORMAT -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_2_VAR $(WB_CURRENT_CONTEXT)_FORMAT}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever_2 INPUT FEATURE_TYPE VariableRetriever_3_OUTPUT OUTPUT FEATURE_TYPE VariableRetriever_2_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_2_VAR),fme_result_attribute,FORMAT) - -# ------------------------------------------------------------------------- - -# Wipe out the source setting if it was untouched from the default setting - -DEFAULT_MACRO Reprojector_SOURCE "Read from feature" -INCLUDE [if { {Read from feature} == {Read from feature} } { puts {MACRO Reprojector_SOURCE} } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Reprojector INPUT FEATURE_TYPE VariableRetriever_2_OUTPUT OUTPUT FEATURE_TYPE Reprojector_REPROJECTED @Reproject($(Reprojector_SOURCE),"@EvaluateExpression(FDIV,STRING,ValuePROJECTION_EPSG,Reprojector)",NearestNeighbor,PreserveCells,Reprojector,"COORD_SYS_WARNING") - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME AttributeFilter INPUT FEATURE_TYPE Reprojector_REPROJECTED OUTPUT FEATURE_TYPE AttributeFilter_FILTER - -FACTORY_DEF * AttributeFilterFactory FACTORY_NAME AttributeFilter_Filter COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE AttributeFilter_FILTER FILTER_ATTRIBUTE FORMAT FILTER_VALUES EMPTY MISSING NULL UNFILTERED GDB SHP OUTPUT EMPTY FEATURE_TYPE AttributeFilter_EMPTY OUTPUT MISSING FEATURE_TYPE AttributeFilter_MISSING OUTPUT NULL FEATURE_TYPE AttributeFilter_NULL OUTPUT UNFILTERED FEATURE_TYPE AttributeFilter_UNFILTERED OUTPUT GDB FEATURE_TYPE AttributeFilter_GDB OUTPUT SHP FEATURE_TYPE AttributeFilter_SHP -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter EMPTY Output Nuker" INPUT FEATURE_TYPE AttributeFilter_EMPTY -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter MISSING Output Nuker" INPUT FEATURE_TYPE AttributeFilter_MISSING -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter NULL Output Nuker" INPUT FEATURE_TYPE AttributeFilter_NULL -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter UNFILTERED Output Nuker" INPUT FEATURE_TYPE AttributeFilter_UNFILTERED -# ------------------------------------------------------------------------- -MACRO OUTPUT_POINT_FTYPE not_selected -MACRO OUTPUT_POINT_FTYPE GeometryFilter_POINT -MACRO OUTPUT_INSTANCE_FTYPE not_selected - -Lookup GeometryFilter_RouterTable "" "not_selected" fme_point $(OUTPUT_POINT_FTYPE) - -MACRO COLLECTION_HANDLING NOT_THIS_TIME -MACRO INSTANCE_FILTERING * -INCLUDE [ if 0 { puts "MACRO COLLECTION_HANDLING *"; if { puts "MACRO INSTANCE_FILTERING NOT_THIS_TIME"; }; }; if {{$(OUTPUT_INSTANCE_FTYPE)} == "not_selected"} { puts "MACRO INSTANCE_FILTERING NOT_THIS_TIME"; }; ] -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryFilter_InputPassThrough INPUT FEATURE_TYPE AttributeFilter_GDB OUTPUT FEATURE_TYPE GeometryFilter___Input___ @Geometry(FIX_FMETYPE_FMEGEOMETRY) -FACTORY_DEF $(INSTANCE_FILTERING) TestFactory FACTORY_NAME GeometryFilter_InstanceTest INPUT FEATURE_TYPE GeometryFilter___Input___ TEST @Geometry(IS_GEOMETRY_INSTANCE) == 1 OUTPUT PASSED FEATURE_TYPE $(OUTPUT_INSTANCE_FTYPE) OUTPUT FAILED FEATURE_TYPE GeometryFilter___Input___ -FACTORY_DEF $(COLLECTION_HANDLING) TestFactory FACTORY_NAME GeometryFilter_CollectionTest INPUT FEATURE_TYPE GeometryFilter___Input___ TEST &fme_type == "fme_collection" OUTPUT FAILED FEATURE_TYPE GeometryFilter___Not_Collection___ OUTPUT PASSED FEATURE_TYPE GeometryFilter___Collection___ -FACTORY_DEF $(COLLECTION_HANDLING) DeaggregateFactory FACTORY_NAME GeometryFilter_Deagg INPUT FEATURE_TYPE GeometryFilter___Collection___ HOMOGENIZE_COLLECTIONS YES RECURSIVE YES SET_FME_TYPE YES INSTANTIATE_GEOMETRY_INSTANCES_NEW OUTPUT POINT FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ OUTPUT LINE FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ OUTPUT POLYGON FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ OUTPUT DONUT FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ OUTPUT AGGREGATE FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryFilter_RouterPrepper INPUT FEATURE_TYPE GeometryFilter___HomogeneousAggregate___ INPUT FEATURE_TYPE GeometryFilter___Not_Collection___ INPUT FEATURE_TYPE GeometryFilter___Input___ OUTPUT FEATURE_TYPE GeometryFilter___Prepped___ GeometryFilter_7dd27183_2f22_4f9a_b0a0_67f98fb533332_targetFeatureType @Lookup(GeometryFilter_RouterTable,&fme_type) -FACTORY_DEF * TestFactory FACTORY_NAME GeometryFilter_Router INPUT FEATURE_TYPE GeometryFilter___Prepped___ TEST @Value(GeometryFilter_7dd27183_2f22_4f9a_b0a0_67f98fb533332_targetFeatureType) != "not_selected" OUTPUT PASSED FEATURE_TYPE * @FeatureType(@Value(GeometryFilter_7dd27183_2f22_4f9a_b0a0_67f98fb533332_targetFeatureType)) @RemoveAttributes(GeometryFilter_7dd27183_2f22_4f9a_b0a0_67f98fb533332_targetFeatureType) OUTPUT FAILED FEATURE_TYPE GeometryFilter_ @RemoveAttributes(GeometryFilter_7dd27183_2f22_4f9a_b0a0_67f98fb533332_targetFeatureType) - -# ------------------------------------------------------------------------- - -FACTORY_DEF * DeaggregateFactory FACTORY_NAME Deaggregator INPUT FEATURE_TYPE GeometryFilter_POINT RECURSIVE No SPLIT_COMPOSITES No INSTANTIATE_GEOMETRY_INSTANCES_NEW No SET_FME_TYPE Yes PART_NUMBER_FIELD _part_number GEOMETRY_NAME_FIELD _geometry_name OUTPUT POINT FEATURE_TYPE Deaggregator_DEAGGREGATED OUTPUT LINE FEATURE_TYPE Deaggregator_DEAGGREGATED OUTPUT POLYGON FEATURE_TYPE Deaggregator_DEAGGREGATED OUTPUT DONUT FEATURE_TYPE Deaggregator_DEAGGREGATED OUTPUT AGGREGATE FEATURE_TYPE Deaggregator_DEAGGREGATED - -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Destination Feature Type Routing Correlator" COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE * ROUTE FME_GENERIC FeatureReader__0_DYpm1DTmZVo= TO ESRISHAPE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,0,SupplyAttributesENCODEDfme_template_feature_typeShapefile1,SupplyAttributesENCODED__wb_out_feat_type__Shapefile1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparen_$(OrderLabel$encode$encode)Shapefile1 GEOMETRY ROUTE FME_GENERIC AttributeFilter_SHP TO ESRISHAPE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,0,SupplyAttributesENCODEDfme_template_feature_typeShapefile1,SupplyAttributesENCODED__wb_out_feat_type__Shapefile1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparen_$(OrderLabel$encode$encode)Shapefile1 GEOMETRY ROUTE FME_GENERIC FeatureReader__1_O2b4R8WeWNw= TO FILEGDB __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,1,SupplyAttributesENCODEDfme_template_feature_typeProducts,SupplyAttributesENCODED__wb_out_feat_type__Products,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparen_$(OrderLabel$encode$encode)Products GEOMETRY ROUTE FME_GENERIC Deaggregator_DEAGGREGATED TO FILEGDB __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,1,SupplyAttributesENCODEDfme_template_feature_typeProducts,SupplyAttributesENCODED__wb_out_feat_type__Products,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparen_$(OrderLabel$encode$encode)Products GEOMETRY ROUTE FME_GENERIC GeometryFilter_ TO FILEGDB __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,1,SupplyAttributesENCODEDfme_template_feature_typeProducts,SupplyAttributesENCODED__wb_out_feat_type__Products,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparen_$(OrderLabel$encode$encode)Products GEOMETRY FEATURE_TYPE_ATTRIBUTE __wb_out_feat_type__ OUTPUT ROUTED FEATURE_TYPE * OUTPUT NOT_ROUTED FEATURE_TYPE __nuke_me__ @Tcl2("FME_StatMessage 818059 [FME_GetAttribute fme_template_feature_type] 818060 818061 fme_warn") -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Final Output Nuker" INPUT FEATURE_TYPE __nuke_me__ - -# ------------------------------------------------------------------------- -ESRISHAPE_1_DEF_TEMPLATE Shapefile1 SHAPE_GEOMETRY {FME_GEN_GEOMETRY} shape_dimension auto fme_schema_feature_first Yes fme_schema_name_expression @EvaluateExpression(FDIV,STRING,Valuefme_feature_type,Valuefme_feature_type_ValuePROJECTION_NOM_$(OrderLabel$encode)) fme_schema_attributes_to_remove "" -# ------------------------------------------------------------------------- -FILEGDB_2_DEF_TEMPLATE Products filegdb_type {FME_GEN_GEOMETRY} filegdb_drop_table "" filegdb_truncate_table "" fme_feature_operation INSERT fme_table_handling CREATE_IF_MISSING filegdb_object_id_field OBJECTID filegdb_object_id_alias OBJECTID filegdb_shape_field SHAPE filegdb_shape_alias SHAPE filegdb_config_keyword DEFAULTS filegdb_xy_tolerance "" filegdb_z_tolerance 0.001 filegdb_m_tolerance 0.001 fme_schema_feature_first Yes fme_schema_name_expression @EvaluateExpression(FDIV,STRING,Valuefme_feature_type,Valuefme_feature_type_ValuePROJECTION_NOM_$(OrderLabel$encode)) fme_schema_attributes_to_remove "" diff --git a/fme/sample_script_fmeserver_old_params.fmw b/fme/sample_script_fmeserver_old_params.fmw deleted file mode 100644 index ca2bebfc..00000000 --- a/fme/sample_script_fmeserver_old_params.fmw +++ /dev/null @@ -1,2989 +0,0 @@ -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -#! -FME_PYTHON_VERSION 27 -GUI IGNORE SourceDataset_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1,GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1,_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1,_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1,DestDataset_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_OVERWRITE_GEODB_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_DATASET_TEMPLATE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_TEMPLATEFILE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_TRANSACTION_TYPE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_SIMPLIFY_GEOM_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_HAS_Z_VALUES_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_GEODB_SHARED_WRT_ADV_PARM_GROUP_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_REQUESTED_GEODATABASE_VERSION_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_DEFAULT_Z_VALUE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_WRITER_MODE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_TRANSACTION_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_TRANSACTION_INTERVAL_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_IGNORE_FAILED_FEATURE_ENTRY_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_MAX_NUMBER_FAILED_FEATURES_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_DUMP_FAILED_FEATURES_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_FFS_DUMP_FILE_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_ANNOTATION_UNITS_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_COMPRESS_AT_END_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_VALIDATE_FEATURES_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_SIMPLIFY_NETWORK_FEATURES_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_BEGIN_SQL{0}_GEODATABASE_FILE_2,GEODATABASE_FILE_OUT_END_SQL{0}_GEODATABASE_FILE_2,_GEODBOutMeasures_GEODATABASE_FILE_2,DestDataset_ESRISHAPE_1,ESRISHAPE_OUT_ENCODING_ESRISHAPE_1,ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1,ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1,ESRISHAPE_OUT_DIMENSION_GROUP_ESRISHAPE_1,ESRISHAPE_OUT_SHAPE_WRT_ADV_PARM_GROUP_ESRISHAPE_1,ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1,ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1,_MEASURES_AS_Z_ESRISHAPE_1 -DEFAULT_MACRO Perimeter POLYGON((6.5838791192197 46.5044344288519,6.5838791192197 46.6025470130881,6.72082104901253 46.6025470130881,6.72082104901253 46.5044344288519)) -GUI TEXT Perimeter Polygone d'export (au format WKT) : -DEFAULT_MACRO Product a049fecb-30d9-9124-ed41-068b566a0855 -GUI TEXT Product Identifiant du produit : -DEFAULT_MACRO FolderOut D:\Temp\FME_test\OUTPUT -GUI DIRNAME FolderOut Répertoire de sortie : -DEFAULT_MACRO Parameters {"FORMAT" : "SHP","SELECTION" : "PASS_THROUGH","PROJECTION" : "SWITZERLAND95"} -GUI OPTIONAL TEXT Parameters Liste dynamique de paramètres au format json: -DEFAULT_MACRO INPUT_GDB D:\Temp\FME_test\GDB_INPUT\ASIT-VD.gdb -GUI IGNORE OPTIONAL SOURCE_GEODATABASE INPUT_GDB Géodatabase Esri Source : -INCLUDE [ if {{$(Perimeter)} == {}} { puts_real {Parameter 'Perimeter' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(Product)} == {}} { puts_real {Parameter 'Product' must be given a value.}; exit 1; }; ] -INCLUDE [ if {{$(FolderOut)} == {}} { puts_real {Parameter 'FolderOut' must be given a value.}; exit 1; }; ] -#! START_HEADER -#! START_WB_HEADER -READER_TYPE GEODATABASE_FILE -READER_KEYWORD GEODATABASE_FILE_1 -READER_GEN_DIRECTIVES ALIAS_MODE,NONE,SEARCH_ENVELOPE_MINX,0,SPLIT_COMPLEX_ANNOS,no,RESOLVE_DOMAINS,no,CACHE_MULTIPATCH_TEXTURES,yes,SEARCH_ORDER,SPATIAL_FIRST,END_SQL0,,IGNORE_NETWORK_INFO,yes,WHERE,,SEARCH_FEATURE,,FME_CONNECTION_GROUP,,GEODB_SHARED_RDR_ADV_PARM_GROUP,,IGNORE_RELATIONSHIP_INFO,yes,EXPOSE_ATTRS_GROUP,,FEATURE_READ_MODE,Features,SEARCH_ENVELOPE_MAXX,0,CLIP_TO_ENVELOPE,NO,MERGE_FEAT_LINKED_ANNOS,no,SPLIT_COMPLEX_EDGES,no,SPLIT_MULTI_PART_ANNOS,no,BEGIN_SQL0,,SEARCH_ENVELOPE_MINY,0,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes,CHECK_SIMPLE_GEOM,no,RESOLVE_SUBTYPE_NAMES,yes,TABLELIST,Products,SEARCH_METHOD,GEODB_INTERSECTS,GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS,,TRANSLATE_SPATIAL_DATA_ONLY,no,SEARCH_ENVELOPE_MAXY,0,USE_SEARCH_ENVELOPE,NO,_MERGE_SCHEMAS,YES - -WRITER_TYPE MULTI_WRITER -MULTI_WRITER_DATASET_ORDER BY_ID -MULTI_WRITER_FIRST_WRITER_ID 0 -MULTI_WRITER_TYPE{0} GEODATABASE_FILE -MULTI_WRITER_KEYWORD{0} GEODATABASE_FILE_2 -MULTI_WRITER_TYPE{1} ESRISHAPE -MULTI_WRITER_KEYWORD{1} ESRISHAPE_1 -#! END_WB_HEADER - -#! START_WB_HEADER -MACRO WB_KEYWORD "GEODATABASE_FILE_1" -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -# ============================================================================ -# The following GUI line prompts for a file to be used as the source. -# The user input is stored in a macro, which is then used to define -# the dataset to be read. -# The dataset this mapping file was generated from was: -#! END_SOURCE_HEADER -#! START_WB_HEADER -DEFAULT_MACRO SourceDataset -INCLUDE [ if {{$(SourceDataset)} != ""} { \ - puts {DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(SourceDataset)} \ - } ] -#! END_WB_HEADER -#! START_SOURCE_HEADER GEODATABASE_FILE GEODATABASE_FILE_1 -DEFAULT_MACRO SourceDataset_GEODATABASE_FILE_1 $(INPUT_GDB) -GUI SOURCE_GEODATABASE SourceDataset_GEODATABASE_FILE_1 Source Esri File Geodatabase: -# Translation flags. -# -DEFAULT_MACRO GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1 Yes -GEODATABASE_FILE_1_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS "$(GEODATABASE_FILE_IN_QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS_GEODATABASE_FILE_1)" -# ======================================================================== -# The following defines the global where clause to be used when retrieving -# features from the Geodatabase. -DEFAULT_MACRO GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_WHERE "$(GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_WHERE_GEODATABASE_FILE_1 WHERE Clause: -#============================================================================== -# Determines whether non-spatial data, such as tables, subtypes, domains, -# and relationship classes, get translated -DEFAULT_MACRO GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_TRANSLATE_SPATIAL_DATA_ONLY "$(GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_TRANSLATE_SPATIAL_DATA_ONLY_GEODATABASE_FILE_1 yes%no Spatial Data Only -#============================================================================== -# Determines whether to resolve domains -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_RESOLVE_DOMAINS "$(GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_DOMAINS_GEODATABASE_FILE_1 yes%no Resolve Domains -#============================================================================== -# Determines whether to resolve the subtype associated with a feature to the -# text description linked to the subtype value. The text description will -# be added as an attribute on the feature. -DEFAULT_MACRO GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_RESOLVE_SUBTYPE_NAMES "$(GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_RESOLVE_SUBTYPE_NAMES_GEODATABASE_FILE_1 yes%no Resolve Subtypes -#============================================================================== -# Determines whether to ignore the network info associated with a feature -# coming from a network feature class. When set to 'yes', junctions will be -# treated as point features, and edges will be treated as polyline features, -# with the geodb_type being set to geodb_point and geodb_polyline, respectively. -# Caution should be taken when changing the value for this keyword to a different -# value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_NETWORK_INFO "$(GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_NETWORK_INFO_GEODATABASE_FILE_1 yes%no Ignore Network Info -# ======================================================================== -# Determines whether to ignore the relationship info associated with a feature -# coming from a feature class containing relationships. When set to 'yes', simple -# relationships will not be read, and attributed relationships will be read as -# tables. Caution should be taken when changing the value for this keyword to a -# different value than was specified when creating the mapping file/workspace. -DEFAULT_MACRO GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_IGNORE_RELATIONSHIP_INFO "$(GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_IGNORE_RELATIONSHIP_INFO_GEODATABASE_FILE_1 yes%no Ignore Relationship Info -#============================================================================== -# Determines whether complex edge features should be split up and each edge -# element read as a separate feature. The default behaviour is to treat an -# entire edge *feature* as an FME feature, rather than an edge *element* as an -# FME feature. Returning edge elements ensures that all network connectivity -# information on complex edges is read. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_EDGES "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_COMPLEX_EDGES_GEODATABASE_FILE_1 yes%no Split Complex Edges -#============================================================================== -# Determines whether annotation features should be split up and each part -# element read as a separate feature. The default behaviour is to treat an -# entire annotation *feature* as an FME feature, rather than an annotation -# *element* as an FME feature. Returning edge elements ensures that all -# rotation and positional information for each element is preserved. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_MULTI_PART_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1)" -GUI CHECKBOX GEODATABASE_FILE_IN_SPLIT_MULTI_PART_ANNOS_GEODATABASE_FILE_1 yes%no Split Multi-Part Annotations -# ======================================================================== -# The following determines whether to read features from the geodatabase -# tables/feature classes or to read metadata from those tables/feature -# classes. -DEFAULT_MACRO GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1 Features -GEODATABASE_FILE_1_FEATURE_READ_MODE "$(GEODATABASE_FILE_IN_FEATURE_READ_MODE_GEODATABASE_FILE_1)" -DEFAULT_MACRO GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_EXPOSE_ATTRS_GROUP "$(GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1)" -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_EXPOSE_ATTRS_GROUP_GEODATABASE_FILE_1 GEODATABASE_FILE_EXPOSE_FORMAT_ATTRS%ALIAS_MODE Schema Attributes -# Include this file in source setting section to add native search envelope processing -# Zero as a default means we don't do any search -- this makes workbench happier -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINX_GEODATABASE_FILE_1 Minimum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MINY_GEODATABASE_FILE_1 Minimum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXX_GEODATABASE_FILE_1 Maximum X: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 0 -GEODATABASE_FILE_1_SEARCH_ENVELOPE "$(GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1)" -GUI OPTIONAL FLOAT GEODATABASE_FILE_IN_SEARCH_ENVELOPE_MAXY_GEODATABASE_FILE_1 Maximum Y: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 NO -GEODATABASE_FILE_1_CLIP_TO_ENVELOPE "$(GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1)" -GUI OPTIONAL CHECKBOX GEODATABASE_FILE_IN_CLIP_TO_ENVELOPE_GEODATABASE_FILE_1 YES%NO Clip to Search Envelope -GUI DISCLOSUREGROUP GEODATABASE_FILE_IN_GEODB_SHARED_RDR_ADV_PARM_GROUP_GEODATABASE_FILE_1 SPLIT_COMPLEX_ANNOS%CACHE_MULTIPATCH_TEXTURES%SEARCH_FEATURE%SEARCH_ORDER%SEARCH_METHOD%CHECK_SIMPLE_GEOM%MERGE_FEAT_LINKED_ANNOS%BEGIN_SQL{0}%END_SQL{0} Advanced -#============================================================================== -# The following specifies that complex representations for annotation should -# be split into simpler representations. -DEFAULT_MACRO GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_SPLIT_COMPLEX_ANNOS "$(GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_SPLIT_COMPLEX_ANNOS_GEODATABASE_FILE_1 yes%no Split Complex Annotations: -#============================================================================== -# The following specifies whether to keep (YES) or clean up (NO) Esri's -# multipatch texture caches. -DEFAULT_MACRO GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes -GEODATABASE_FILE_1_CACHE_MULTIPATCH_TEXTURES "$(GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CACHE_MULTIPATCH_TEXTURES_GEODATABASE_FILE_1 yes%no Cache Multipatch Textures: -# ======================================================================== -# The following defines the search feature for the query. It defines an -# arbitrarily complex search feature as a spatial contraint. It is in the -# form _SEARCH_FEATURE [ ]+. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_SEARCH_FEATURE "$(GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT GEODATABASE_FILE_IN_SEARCH_FEATURE_GEODATABASE_FILE_1 Search Feature: -# ======================================================================== -# Determines whether the spatial component or the attribute component of a -# query is performed first. This is only applicable when both spatial and -# non-spatial searches are being performed. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST -GEODATABASE_FILE_1_SEARCH_ORDER "$(GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_ORDER_GEODATABASE_FILE_1 SPATIAL_FIRST%ATTRIBUTE_FIRST Search Order: -# ======================================================================== -# Determines which type of search method to use. This keyword is only applicable -# when either the keyword SEARCH_ENVELOPE or the keyword SEARCH_FEATURE is specified. -DEFAULT_MACRO GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS -GEODATABASE_FILE_1_SEARCH_METHOD "$(GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_IN_SEARCH_METHOD_GEODATABASE_FILE_1 GEODB_INTERSECTS%GEODB_ENVELOPE_INTERSECTS%GEODB_TOUCHES%GEODB_OVERLAPS%GEODB_CROSSES%GEODB_WITHIN%GEODB_CONTAINS Search Method -# ======================================================================== -# The following specifies that simple geometry should be checked for -# when reading features. -DEFAULT_MACRO GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_CHECK_SIMPLE_GEOM "$(GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_CHECK_SIMPLE_GEOM_GEODATABASE_FILE_1 yes%no Check for Simple Geometry: -# ======================================================================== -# The following specifies whether feature-linked annotations should -# be merged onto the main feature as a text list attribute when reading. -DEFAULT_MACRO GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 no -GEODATABASE_FILE_1_MERGE_FEAT_LINKED_ANNOS "$(GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1)" -GUI CHOICE GEODATABASE_FILE_IN_MERGE_FEAT_LINKED_ANNOS_GEODATABASE_FILE_1 yes%no Merge Feature Linked Annotations: -# ============================================================================== -# The following specifies an SQL command to execute before opening the first -# Geodatabase table. -DEFAULT_MACRO GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_BEGIN_SQL{0} "$(GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_BEGIN_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute Before Translation: -# ============================================================================== -# The following specifies an SQL command to execute after closing all the -# Geodatabase tables. -DEFAULT_MACRO GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_END_SQL{0} "$(GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_IN_END_SQL{0}_GEODATABASE_FILE_1 SQL Statement To Execute After Translation: -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 -GEODATABASE_FILE_1_NETWORK_AUTHENTICATION "$(GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1)" -GUI OPTIONAL AUTHENTICATOR GEODATABASE_FILE_IN_NETWORK_AUTHENTICATION_GEODATABASE_FILE_1 CONTAINER%GROUP%CONTAINER_TITLE%"Network Authentication"%PROMPT_TYPE%NETWORK Network Authentication -# =========================================================================== -DEFAULT_MACRO GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1 ALL -GEODATABASE_FILE_1_ATTRIBUTE_READING "$(GEODATABASE_FILE_IN_ATTRIBUTE_READING_GEODATABASE_FILE_1)" -# ============================================================================ -# Search Envelope Coordinate System -DEFAULT_MACRO _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 -GUI OPTIONAL COORDSYS _GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1 Search Envelope Coordinate System: -GEODATABASE_FILE_1_SEARCH_ENVELOPE_COORDINATE_SYSTEM "$(_GEODB_IN_SEARCH_ENVELOPE_COORDINATE_SYSTEM_GEODATABASE_FILE_1)" -DEFAULT_MACRO _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 NONE -GUI LOOKUP_CHOICE _GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1 None,NONE%ExposeAliasesasMetadataAttributesname_alias,ON_DATA_FEATURES Alias Mode: -GEODATABASE_FILE_1_ALIAS_MODE "$(_GEODB_IN_ALIAS_MODE_GEODATABASE_FILE_1)" -GEODATABASE_FILE_1_DATASET "$(SourceDataset_GEODATABASE_FILE_1)" -#! END_SOURCE_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "GEODATABASE_FILE_2" -#! END_WB_HEADER -#! START_DEST_HEADER GEODATABASE_FILE GEODATABASE_FILE_2 -# ============================================================================ -# The following GUI line prompts for a folder to be used as the -# the destination for the GEODB files. -# The user input is stored in a macro, which is then used to define -# the dataset to be written. -#! END_DEST_HEADER -#! START_WB_HEADER -DEFAULT_MACRO DestDataset -INCLUDE [ if {"$(DestDataset)" != ""} { \ - puts {DEFAULT_MACRO DestDataset_GEODATABASE_FILE_2 $(DestDataset)} \ - } ] -#! END_WB_HEADER -#! START_DEST_HEADER GEODATABASE_FILE GEODATABASE_FILE_2 -DEFAULT_MACRO DestDataset_GEODATABASE_FILE_2 $(FolderOut) -GUI DEST_GEODATABASE DestDataset_GEODATABASE_FILE_2 Destination Esri File Geodatabase: -# ============================================================================ -# Determines if dataset has to be overwritten -DEFAULT_MACRO GEODATABASE_FILE_OUT_OVERWRITE_GEODB_GEODATABASE_FILE_2 YES -GEODATABASE_FILE_2_OVERWRITE_GEODB "$(GEODATABASE_FILE_OUT_OVERWRITE_GEODB_GEODATABASE_FILE_2)" -GUI ACTIVECHECK GEODATABASE_FILE_OUT_OVERWRITE_GEODB_GEODATABASE_FILE_2 YES%NO,GEODATABASE_FILE_OUT_DATASET_TEMPLATE Overwrite Existing Geodatabase -DEFAULT_MACRO GEODATABASE_FILE_OUT_DATASET_TEMPLATE_GEODATABASE_FILE_2 -GEODATABASE_FILE_2_DATASET_TEMPLATE "$(GEODATABASE_FILE_OUT_DATASET_TEMPLATE_GEODATABASE_FILE_2)" -GUI OPTIONAL SOURCE_GEODATABASE_CFG GEODATABASE_FILE_OUT_DATASET_TEMPLATE_GEODATABASE_FILE_2 SWIZZLER:NO Template File Geodatabase -# ============================================================================__ -# Optional template file can be specified. This will load the schema -# of the template file into the database before any writing occurs. -DEFAULT_MACRO GEODATABASE_FILE_OUT_TEMPLATEFILE_GEODATABASE_FILE_2 -GEODATABASE_FILE_2_TEMPLATEFILE "$(GEODATABASE_FILE_OUT_TEMPLATEFILE_GEODATABASE_FILE_2)" -GUI OPTIONAL FILENAME_MUSTEXIST GEODATABASE_FILE_OUT_TEMPLATEFILE_GEODATABASE_FILE_2 XML_Geodatabase_Files(*.xml)|*.xml|(*.zip)|*.zip|(*.z)|*.z|All_files(*)|* Import XML Workspace Document (Schema Only): -# ============================================================================ -# This option specifies the transaction type. This should be set to EDIT_SESSION -# when edits are made to tables/feature classes that have custom behaviour -# associated with them - i.e. when writing to feature classes that are involved -# in relationships. Both TRANSACTIONS and NONE can be chosen only when writing -# to tables/feature classes that do not have custom behaviour. -DEFAULT_MACRO GEODATABASE_FILE_OUT_TRANSACTION_TYPE_GEODATABASE_FILE_2 TRANSACTIONS -GEODATABASE_FILE_2_TRANSACTION_TYPE "$(GEODATABASE_FILE_OUT_TRANSACTION_TYPE_GEODATABASE_FILE_2)" -GUI LOOKUP_CHOICE GEODATABASE_FILE_OUT_TRANSACTION_TYPE_GEODATABASE_FILE_2 EditSession,EDIT_SESSION%Transactions,TRANSACTIONS%None,NONE Transaction Type: -# ============================================================================ -# Specifies if the geomtery is to be simplified or not. Default is no -DEFAULT_MACRO GEODATABASE_FILE_OUT_SIMPLIFY_GEOM_GEODATABASE_FILE_2 No -GEODATABASE_FILE_2_SIMPLIFY_GEOM "$(GEODATABASE_FILE_OUT_SIMPLIFY_GEOM_GEODATABASE_FILE_2)" -GUI LOOKUP_CHOICE GEODATABASE_FILE_OUT_SIMPLIFY_GEOM_GEODATABASE_FILE_2 Yes%No Simplify Geometry: -DEFAULT_MACRO GEODATABASE_FILE_OUT_X_ORIGIN_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_X_ORIGIN "$(GEODATABASE_FILE_OUT_X_ORIGIN_GEODATABASE_FILE_2)" -DEFAULT_MACRO GEODATABASE_FILE_OUT_Y_ORIGIN_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_Y_ORIGIN "$(GEODATABASE_FILE_OUT_Y_ORIGIN_GEODATABASE_FILE_2)" -DEFAULT_MACRO GEODATABASE_FILE_OUT_XY_SCALE_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_XY_SCALE "$(GEODATABASE_FILE_OUT_XY_SCALE_GEODATABASE_FILE_2)" -# ============================================================================ -# Determines if dataset contains Z values. This value will be used if DEF lines -# don't have anything set. The auto_detect option will use the first feature -# headed for each feature class to determine whether the feature class should be -# 2D or 3D. -DEFAULT_MACRO GEODATABASE_FILE_OUT_HAS_Z_VALUES_GEODATABASE_FILE_2 auto_detect -GEODATABASE_FILE_2_HAS_Z_VALUES "$(GEODATABASE_FILE_OUT_HAS_Z_VALUES_GEODATABASE_FILE_2)" -GUI LOOKUP_CHOICE GEODATABASE_FILE_OUT_HAS_Z_VALUES_GEODATABASE_FILE_2 Yes,yes%No,no%AutoDetect,auto_detect Contains Z Values: -DEFAULT_MACRO GEODATABASE_FILE_OUT_Z_ORIGIN_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_Z_ORIGIN "$(GEODATABASE_FILE_OUT_Z_ORIGIN_GEODATABASE_FILE_2)" -DEFAULT_MACRO GEODATABASE_FILE_OUT_Z_SCALE_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_Z_SCALE "$(GEODATABASE_FILE_OUT_Z_SCALE_GEODATABASE_FILE_2)" -DEFAULT_MACRO GEODATABASE_FILE_OUT_GRID_1_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_GRID_1 "$(GEODATABASE_FILE_OUT_GRID_1_GEODATABASE_FILE_2)" -GUI DISCLOSUREGROUP GEODATABASE_FILE_OUT_GEODB_SHARED_WRT_ADV_PARM_GROUP_GEODATABASE_FILE_2 REQUESTED_GEODATABASE_VERSION%DEFAULT_Z_VALUE%WRITER_MODE%TRANSACTION%TRANSACTION_INTERVAL%IGNORE_FAILED_FEATURE_ENTRY%MAX_NUMBER_FAILED_FEATURES%DUMP_FAILED_FEATURES%FFS_DUMP_FILE%ANNOTATION_UNITS%MEASURES_ORIGIN%MEASURES_SCALE%COMPRESS_AT_END%VALIDATE_FEATURES%SIMPLIFY_NETWORK_FEATURES%BEGIN_SQL{0}%END_SQL{0} Advanced -# ============================================================================ -# Determines version of geodatabase to create -DEFAULT_MACRO GEODATABASE_FILE_OUT_REQUESTED_GEODATABASE_VERSION_GEODATABASE_FILE_2 CURRENT -GEODATABASE_FILE_2_REQUESTED_GEODATABASE_VERSION "$(GEODATABASE_FILE_OUT_REQUESTED_GEODATABASE_VERSION_GEODATABASE_FILE_2)" -GUI CHOICE GEODATABASE_FILE_OUT_REQUESTED_GEODATABASE_VERSION_GEODATABASE_FILE_2 CURRENT%10.0%9.3 Geodatabase Version: -# ============================================================================ -# Determines default Z value -DEFAULT_MACRO GEODATABASE_FILE_OUT_DEFAULT_Z_VALUE_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_DEFAULT_Z_VALUE "$(GEODATABASE_FILE_OUT_DEFAULT_Z_VALUE_GEODATABASE_FILE_2)" -GUI FLOAT GEODATABASE_FILE_OUT_DEFAULT_Z_VALUE_GEODATABASE_FILE_2 Default Z Value: -# ============================================================================ -# This option specifies the writer mode. If set to UPDATE or DELETE, the -# mode can be overwritten by each individual feature using the attribute -# 'fme_db_operation'. -DEFAULT_MACRO GEODATABASE_FILE_OUT_WRITER_MODE_GEODATABASE_FILE_2 INSERT -GEODATABASE_FILE_2_WRITER_MODE "$(GEODATABASE_FILE_OUT_WRITER_MODE_GEODATABASE_FILE_2)" -GUI CHOICE GEODATABASE_FILE_OUT_WRITER_MODE_GEODATABASE_FILE_2 INSERT%UPDATE%DELETE Writer Mode: -# ============================================================================ -# This option specifies the starting transaction number. Normally -# this will be zero, which will write every feature to the Geodatabase, -# but it might be another value if a previous translation was interrupted. -DEFAULT_MACRO GEODATABASE_FILE_OUT_TRANSACTION_GEODATABASE_FILE_2 0 -GEODATABASE_FILE_2_TRANSACTION "$(GEODATABASE_FILE_OUT_TRANSACTION_GEODATABASE_FILE_2)" -GUI OPTIONAL INTEGER GEODATABASE_FILE_OUT_TRANSACTION_GEODATABASE_FILE_2 Transaction Number: -# ============================================================================ -# This option specifies how many features will be written to the Geodatabase on -# each transaction. -DEFAULT_MACRO GEODATABASE_FILE_OUT_TRANSACTION_INTERVAL_GEODATABASE_FILE_2 1000 -GEODATABASE_FILE_2_TRANSACTION_INTERVAL "$(GEODATABASE_FILE_OUT_TRANSACTION_INTERVAL_GEODATABASE_FILE_2)" -GUI OPTIONAL INTEGER GEODATABASE_FILE_OUT_TRANSACTION_INTERVAL_GEODATABASE_FILE_2 Features to Write Per Transaction: -# ============================================================================ -# Determines whether to continue the translation, despite having encountered -# a feature that would normally stop the translation. -DEFAULT_MACRO GEODATABASE_FILE_OUT_IGNORE_FAILED_FEATURE_ENTRY_GEODATABASE_FILE_2 no -GEODATABASE_FILE_2_IGNORE_FAILED_FEATURE_ENTRY "$(GEODATABASE_FILE_OUT_IGNORE_FAILED_FEATURE_ENTRY_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_IGNORE_FAILED_FEATURE_ENTRY_GEODATABASE_FILE_2 yes%no Ignore Failed Features: -# ============================================================================ -# The number of features to ignore before stopping a translation due to a -# problematic feature. Used only if IGNORE_FAILED_FEATURE_ENTRY is 'yes'. -DEFAULT_MACRO GEODATABASE_FILE_OUT_MAX_NUMBER_FAILED_FEATURES_GEODATABASE_FILE_2 -1 -GEODATABASE_FILE_2_MAX_NUMBER_FAILED_FEATURES "$(GEODATABASE_FILE_OUT_MAX_NUMBER_FAILED_FEATURES_GEODATABASE_FILE_2)" -GUI OPTIONAL INTEGER GEODATABASE_FILE_OUT_MAX_NUMBER_FAILED_FEATURES_GEODATABASE_FILE_2 Max number of features to ignore: -# ============================================================================ -# Determines whether to store failed features in an FFS file, so that they -# can be viewed/manipulated at a later time. Only applicable if the keyword -# IGNORE_FAILED_FEATURE_ENTRY is set to 'yes'. -DEFAULT_MACRO GEODATABASE_FILE_OUT_DUMP_FAILED_FEATURES_GEODATABASE_FILE_2 no -GEODATABASE_FILE_2_DUMP_FAILED_FEATURES "$(GEODATABASE_FILE_OUT_DUMP_FAILED_FEATURES_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_DUMP_FAILED_FEATURES_GEODATABASE_FILE_2 yes%no Dump Failed Features to File: -# ============================================================================ -# If the keyword DUMP_FAILED_FEATURES is set to 'yes', determines where to -# store the failed features. The location must specify both the path and the -# filename. The filename must have an extension of 'ffs'. -DEFAULT_MACRO GEODATABASE_FILE_OUT_FFS_DUMP_FILE_GEODATABASE_FILE_2 -GEODATABASE_FILE_2_FFS_DUMP_FILE "$(GEODATABASE_FILE_OUT_FFS_DUMP_FILE_GEODATABASE_FILE_2)" -GUI OPTIONAL FILENAME GEODATABASE_FILE_OUT_FFS_DUMP_FILE_GEODATABASE_FILE_2 FME_Feature_Store_Files(*.ffs)|*.ffs|All_files(*)|* Failed Feature Dump filename: -# ============================================================================ -# Specifies the units to use when creating a new annotation feature class. -DEFAULT_MACRO GEODATABASE_FILE_OUT_ANNOTATION_UNITS_GEODATABASE_FILE_2 unknown_units -GEODATABASE_FILE_2_ANNOTATION_UNITS "$(GEODATABASE_FILE_OUT_ANNOTATION_UNITS_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_ANNOTATION_UNITS_GEODATABASE_FILE_2 unknown_units%decimal_degrees%inches%points%feet%yards%miles%nautical_miles%millimeters%centimeters%meters%kilometers%decimeters Annotation Units: -# ============================================================================ -# Determines if the output Geodatabase is to be compacted or not. Note: This -# directive is incorrectly called COMPRESS when in fact it is doing COMPACT. -DEFAULT_MACRO GEODATABASE_FILE_OUT_COMPRESS_AT_END_GEODATABASE_FILE_2 no -GEODATABASE_FILE_2_COMPRESS_AT_END "$(GEODATABASE_FILE_OUT_COMPRESS_AT_END_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_COMPRESS_AT_END_GEODATABASE_FILE_2 yes%no Compact Database When Done: -# ============================================================================ -# Determines whether to perform validation on features being written to the -# geodatabase. Default is no. -DEFAULT_MACRO GEODATABASE_FILE_OUT_VALIDATE_FEATURES_GEODATABASE_FILE_2 no -GEODATABASE_FILE_2_VALIDATE_FEATURES "$(GEODATABASE_FILE_OUT_VALIDATE_FEATURES_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_VALIDATE_FEATURES_GEODATABASE_FILE_2 yes%no Validate Features to Write: -# ============================================================================ -# Determines whether to perform simplification on network features being -# written to the geodatabase. Default is no. -DEFAULT_MACRO GEODATABASE_FILE_OUT_SIMPLIFY_NETWORK_FEATURES_GEODATABASE_FILE_2 no -GEODATABASE_FILE_2_SIMPLIFY_NETWORK_FEATURES "$(GEODATABASE_FILE_OUT_SIMPLIFY_NETWORK_FEATURES_GEODATABASE_FILE_2)" -GUI OPTIONAL CHOICE GEODATABASE_FILE_OUT_SIMPLIFY_NETWORK_FEATURES_GEODATABASE_FILE_2 yes%no Simplify Network Features: -# ============================================================================== -# The following specifies an SQL command to execute before opening the first -# Geodatabase table. -DEFAULT_MACRO GEODATABASE_FILE_OUT_BEGIN_SQL{0}_GEODATABASE_FILE_2 -GEODATABASE_FILE_2_BEGIN_SQL{0} "$(GEODATABASE_FILE_OUT_BEGIN_SQL{0}_GEODATABASE_FILE_2)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_OUT_BEGIN_SQL{0}_GEODATABASE_FILE_2 SQL Statement To Execute Before Translation: -# ============================================================================== -# The following specifies an SQL command to execute after closing all the -# Geodatabase tables. -DEFAULT_MACRO GEODATABASE_FILE_OUT_END_SQL{0}_GEODATABASE_FILE_2 -GEODATABASE_FILE_2_END_SQL{0} "$(GEODATABASE_FILE_OUT_END_SQL{0}_GEODATABASE_FILE_2)" -GUI OPTIONAL TEXT_EDIT_SQL GEODATABASE_FILE_OUT_END_SQL{0}_GEODATABASE_FILE_2 SQL Statement To Execute After Translation: -# ============================================================================ -# Opt in for destination dataset type vs format type validation -DEFAULT_MACRO GEODATABASE_FILE_OUT_DESTINATION_DATASETTYPE_VALIDATION_GEODATABASE_FILE_2 Yes -GEODATABASE_FILE_2_DESTINATION_DATASETTYPE_VALIDATION "$(GEODATABASE_FILE_OUT_DESTINATION_DATASETTYPE_VALIDATION_GEODATABASE_FILE_2)" -DEFAULT_MACRO GEODATABASE_FILE_OUT_COORDINATE_SYSTEM_GRANULARITY_GEODATABASE_FILE_2 FEATURE_TYPE -GEODATABASE_FILE_2_COORDINATE_SYSTEM_GRANULARITY "$(GEODATABASE_FILE_OUT_COORDINATE_SYSTEM_GRANULARITY_GEODATABASE_FILE_2)" -# ============================================================================ -# Determines if dataset contains Measures. This value will be used if DEF lines -# don't have anything set. -DEFAULT_MACRO _GEODBOutMeasures_GEODATABASE_FILE_2 no -GUI OPTIONAL CHOICE _GEODBOutMeasures_GEODATABASE_FILE_2 yes%no Contains Measures: -GEODATABASE_FILE_2_HAS_MEASURES $(_GEODBOutMeasures_GEODATABASE_FILE_2) -GEODATABASE_FILE_2_DATASET "$(DestDataset_GEODATABASE_FILE_2)" -#! END_DEST_HEADER -#! START_WB_HEADER -MACRO WB_KEYWORD "ESRISHAPE_1" -#! END_WB_HEADER -#! START_DEST_HEADER ESRISHAPE ESRISHAPE_1 -# ============================================================================ -# The following GUI line prompts for a folder to be used as the -# the destination for the Esri Shapefiles. -# The user input is stored in a macro, which is then used to define -# the dataset to be written. -#! END_DEST_HEADER -#! START_WB_HEADER -DEFAULT_MACRO DestDataset -INCLUDE [ if {"$(DestDataset)" != ""} { \ - puts {DEFAULT_MACRO DestDataset_ESRISHAPE_1 $(DestDataset)} \ - } ] -#! END_WB_HEADER -#! START_DEST_HEADER ESRISHAPE ESRISHAPE_1 -DEFAULT_MACRO DestDataset_ESRISHAPE_1 $(FolderOut) -GUI DIRNAME DestDataset_ESRISHAPE_1 Destination Esri Shapefile Folder: -ESRISHAPE_1_COORDINATE_SYSTEM_GRANULARITY FEATURE_TYPE -# ============================================================================ -# Determines whether the attribute names should be uppercased, or whether they -# should stay as specified in the shapefile. The default will be Yes for -# backwards compatibility. Once the mapping file/workspace has been generated, -# the value for this keyword should not be changed. -DEFAULT_MACRO ESRISHAPE_OUT_UPPER_CASE_ATTR_NAMES_ESRISHAPE_1 No -ESRISHAPE_1_UPPER_CASE_ATTR_NAMES "$(ESRISHAPE_OUT_UPPER_CASE_ATTR_NAMES_ESRISHAPE_1)" -# ============================================================================ -# The following keyword allows the user to choose what encoding to -# use for outputting the shapefile -# BUG31194: For backwards compatibility and not outputting a .cpg file -# we have chosen to make the writer default encoding system (ANSI) -DEFAULT_MACRO ESRISHAPE_OUT_ENCODING_ESRISHAPE_1 SYSTEM -ESRISHAPE_1_ENCODING "$(ESRISHAPE_OUT_ENCODING_ESRISHAPE_1)" -GUI OPTIONAL ENCODING ESRISHAPE_OUT_ENCODING_ESRISHAPE_1 ANSI%SYSTEM%BIG5%EUC%HKBIG5%ISO%OEM%SJIS%UTF-8%CP437%CP708%CP720%CP737%CP775%CP850%CP852%CP855%CP857%CP860%CP861%CP862%CP863%CP864%CP865%CP866%CP869%CP874%CP932%CP936%CP950%CP1250%CP1251%CP1252%CP1253%CP1254%CP1255%CP1256%CP1257%CP1258%ISO8859-1%ISO8859-2%ISO8859-3%ISO8859-4%ISO8859-5%ISO8859-6%ISO8859-7%ISO8859-8%ISO8859-9%ISO-8859-11%ISO8859-13%ISO8859-15%WINDOWS-874 Character Encoding: -# ============================================================================ -# PR2557: Specifies whether or not the reader will generate spatial index files -DEFAULT_MACRO ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1 No -ESRISHAPE_1_WRITE_SPATIAL_INDEX "$(ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_WRITE_SPATIAL_INDEX_ESRISHAPE_1 Yes%No Write Spatial Index -DEFAULT_MACRO ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1 No -ESRISHAPE_1_COMPRESSED_SHAPE_FILE "$(ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_COMPRESSED_SHAPE_FILE_ESRISHAPE_1 Yes%No Create Compressed Shapefile (.shz) -GUI DISCLOSUREGROUP ESRISHAPE_OUT_DIMENSION_GROUP_ESRISHAPE_1 DIMENSION Dimension Settings -DEFAULT_MACRO ESRISHAPE_OUT_DIMENSION_ESRISHAPE_1 auto -ESRISHAPE_1_DIMENSION "$(ESRISHAPE_OUT_DIMENSION_ESRISHAPE_1)" -GUI DISCLOSUREGROUP ESRISHAPE_OUT_SHAPE_WRT_ADV_PARM_GROUP_ESRISHAPE_1 STRICT_COMPATIBILITY%PRESERVE_RING_VERTEX_ORDER Advanced -# ============================================================================ -# BUG31474: Add an Strict compatibility flag to prevent some apps from -# crashing when record lengths are too long. By default, we want to write -# compatible files going forward. -DEFAULT_MACRO ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1 Yes -ESRISHAPE_1_STRICT_COMPATIBILITY "$(ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_STRICT_COMPATIBILITY_ESRISHAPE_1 Yes%No Strict Compatibility -# ============================================================ -# BUG39095: Add an option to preserve input ring vertex order. -DEFAULT_MACRO ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1 No -ESRISHAPE_1_PRESERVE_RING_VERTEX_ORDER "$(ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1)" -GUI CHECKBOX ESRISHAPE_OUT_PRESERVE_RING_VERTEX_ORDER_ESRISHAPE_1 Yes%No Preserve Ring Vertex Order -# ============================================================================ -# Opt in for destination dataset type vs format type validation -DEFAULT_MACRO ESRISHAPE_OUT_DESTINATION_DATASETTYPE_VALIDATION_ESRISHAPE_1 Yes -ESRISHAPE_1_DESTINATION_DATASETTYPE_VALIDATION "$(ESRISHAPE_OUT_DESTINATION_DATASETTYPE_VALIDATION_ESRISHAPE_1)" -# ============================================================================ -# The following GUI line sets whether measure values in the shapefiles should -# be treated as elevations. -DEFAULT_MACRO _MEASURES_AS_Z_ESRISHAPE_1 no -GUI CHOICE _MEASURES_AS_Z_ESRISHAPE_1 yes%no Treat Measures as Elevation: -ESRISHAPE_1_MEASURES_AS_Z $(_MEASURES_AS_Z_ESRISHAPE_1) -ESRISHAPE_1_ENCODING $(ESRISHAPE_OUT_ENCODING_ESRISHAPE_1) -ESRISHAPE_1_DATASET "$(DestDataset_ESRISHAPE_1)" -#! END_DEST_HEADER -#! START_WB_HEADER -#! END_WB_HEADER - -#! END_HEADER - -LOG_FILENAME "$(FME_MF_DIR)sample_script_2.log" -LOG_APPEND NO -LOG_MAX_FEATURES 200 -LOG_MAX_RECORDED_FEATURES 200 -FME_REPROJECTION_ENGINE FME -FME_IMPLICIT_CSMAP_REPROJECTION_MODE Auto -FME_GEOMETRY_HANDLING Enhanced -FME_STROKE_MAX_DEVIATION 0 -LOG_FILTER_MASK -1 -DEFAULT_MACRO DATASET_KEYWORD_GEODATABASE_FILE_1 GEODATABASE_FILE_1 -DEFAULT_MACRO DATASET_KEYWORD_GEODATABASE_FILE_2 GEODATABASE_FILE_2 -DEFAULT_MACRO DATASET_KEYWORD_ESRISHAPE_1 ESRISHAPE_1 -# ------------------------------------------------------------------------- - -GEODATABASE_FILE_1_READER_META_ATTRIBUTES fme_feature_type - -# ------------------------------------------------------------------------- - -MULTI_READER_CONTINUE_ON_READER_FAILURE No - -# ------------------------------------------------------------------------- - -MACRO WORKSPACE_NAME sample_script_2 -MACRO FME_VIEWER_APP fmedatainspector -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Router and Unexpected Input Remover" COMMAND_PARM_EVALUATION SINGLE_PASS MULTI_READER_KEYWORD GEODATABASE_FILE_1 INPUT FEATURE_TYPE * ROUTE GEODATABASE_FILE GEODATABASE_FILE_1::Products TO FME_GENERIC ::Products ALIAS_GEOMETRY MERGE_INPUT Yes OUTPUT ROUTED FEATURE_TYPE * -GEODATABASE_FILE_1_MERGE_DEF GEODATABASE_FILE_1::Products EXACT Products DEFLINE geodb_type,geodb_table,OBJECTID,integer,guid,char8000,fc,char8000 -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Products (GEODATABASE_FILE_1) Splitter" INPUT FEATURE_TYPE Products OUTPUT FEATURE_TYPE Products_GEODATABASE_FILE_1 -DEFAULT_MACRO WB_CURRENT_CONTEXT -# ------------------------------------------------------------------------- - -Tcl2 proc Creator_CoordSysRemover {} { global FME_CoordSys; set FME_CoordSys {}; } -MACRO Creator_XML NOT_ACTIVATED -MACRO Creator_CLASSIC NOT_ACTIVATED -MACRO Creator_2D3D 2D_GEOMETRY -MACRO Creator_COORDS -INCLUDE [ if { {Geometry Object} == {Geometry Object} } { puts {MACRO Creator_XML *} } ] -INCLUDE [ if { {Geometry Object} == {2D Coordinate List} } { puts {MACRO Creator_2D3D 2D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {3D Coordinate List} } { puts {MACRO Creator_2D3D 3D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {2D Min/Max Box} } { set comment { We need to turn the COORDS which are minX minY maxX maxY into a full polygon list of coordinates }; set splitCoords [split [string trim {}]]; if { [llength $splitCoords] > 4} { set trimmedCoords {}; foreach item $splitCoords { if { $item != {} } {lappend trimmedCoords $item} }; set splitCoords $trimmedCoords; }; if { [llength $splitCoords] != 4 } { error {Creator: Coordinate list is expected to be a space delimited list of four numbers as 'minx miny maxx maxy' - `' is invalid}; }; set minX [lindex $splitCoords 0]; set minY [lindex $splitCoords 1]; set maxX [lindex $splitCoords 2]; set maxY [lindex $splitCoords 3]; puts "MACRO Creator_COORDS $minX $minY $minX $maxY $maxX $maxY $maxX $minY $minX $minY"; puts {MACRO Creator_2D3D 2D_GEOMETRY}; puts {MACRO Creator_CLASSIC *} } ] -FACTORY_DEF $(Creator_XML) CreationFactory FACTORY_NAME Creator_XML_Creator CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ @Geometry(FROM_ENCODED_STRING,?xmlversion=1.0encoding=US_ASCIIstandalone=no?geometrydimension=2nullgeometry) -FACTORY_DEF $(Creator_CLASSIC) CreationFactory FACTORY_NAME Creator_CLASSIC_Creator $(Creator_2D3D) $(Creator_COORDS) CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ -FACTORY_DEF * TeeFactory FACTORY_NAME Creator_Cloner INPUT FEATURE_TYPE _____CREATED______ NUMBER_OF_COPIES 1 COPY_NUMBER_ATTRIBUTE "_creation_instance" OUTPUT FEATURE_TYPE Creator_CREATED @Tcl2(Creator_CoordSysRemover) @CoordSys() fme_feature_type Creator - -FACTORY_DEF * BranchingFactory FACTORY_NAME "Creator_CREATED Brancher -1 4" INPUT FEATURE_TYPE Creator_CREATED TARGET_FACTORY "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" OUTPUT PASSED FEATURE_TYPE * @RemoveAttributes("Creator_CREATED Brancher -1 4".BranchingFactory.Count) -# ------------------------------------------------------------------------- - -Tcl2 proc Creator_2_CoordSysRemover {} { global FME_CoordSys; set FME_CoordSys {}; } -MACRO Creator_2_XML NOT_ACTIVATED -MACRO Creator_2_CLASSIC NOT_ACTIVATED -MACRO Creator_2_2D3D 2D_GEOMETRY -MACRO Creator_2_COORDS -INCLUDE [ if { {Geometry Object} == {Geometry Object} } { puts {MACRO Creator_2_XML *} } ] -INCLUDE [ if { {Geometry Object} == {2D Coordinate List} } { puts {MACRO Creator_2_2D3D 2D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {3D Coordinate List} } { puts {MACRO Creator_2_2D3D 3D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -INCLUDE [ if { {Geometry Object} == {2D Min/Max Box} } { set comment { We need to turn the COORDS which are minX minY maxX maxY into a full polygon list of coordinates }; set splitCoords [split [string trim {}]]; if { [llength $splitCoords] > 4} { set trimmedCoords {}; foreach item $splitCoords { if { $item != {} } {lappend trimmedCoords $item} }; set splitCoords $trimmedCoords; }; if { [llength $splitCoords] != 4 } { error {Creator_2: Coordinate list is expected to be a space delimited list of four numbers as 'minx miny maxx maxy' - `' is invalid}; }; set minX [lindex $splitCoords 0]; set minY [lindex $splitCoords 1]; set maxX [lindex $splitCoords 2]; set maxY [lindex $splitCoords 3]; puts "MACRO Creator_2_COORDS $minX $minY $minX $maxY $maxX $maxY $maxX $minY $minX $minY"; puts {MACRO Creator_2_2D3D 2D_GEOMETRY}; puts {MACRO Creator_2_CLASSIC *} } ] -FACTORY_DEF $(Creator_2_XML) CreationFactory FACTORY_NAME Creator_2_XML_Creator CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ @Geometry(FROM_ENCODED_STRING,?xmlversion=1.0encoding=US_ASCIIstandalone=no?geometrydimension=2nullgeometry) -FACTORY_DEF $(Creator_2_CLASSIC) CreationFactory FACTORY_NAME Creator_2_CLASSIC_Creator $(Creator_2_2D3D) $(Creator_2_COORDS) CREATE_AT_END no OUTPUT FEATURE_TYPE _____CREATED______ -FACTORY_DEF * TeeFactory FACTORY_NAME Creator_2_Cloner INPUT FEATURE_TYPE _____CREATED______ NUMBER_OF_COPIES 1 COPY_NUMBER_ATTRIBUTE "_creation_instance" OUTPUT FEATURE_TYPE Creator_2_CREATED @Tcl2(Creator_2_CoordSysRemover) @CoordSys() fme_feature_type Creator_2 - -FACTORY_DEF * BranchingFactory FACTORY_NAME "Creator_2_CREATED Brancher -1 37" INPUT FEATURE_TYPE Creator_2_CREATED TARGET_FACTORY "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" OUTPUT PASSED FEATURE_TYPE * @RemoveAttributes("Creator_2_CREATED Brancher -1 37".BranchingFactory.Count) -# ------------------------------------------------------------------------- -FACTORY_DEF * TeeFactory FACTORY_NAME "$(WB_CURRENT_CONTEXT)_CREATOR_BRANCH_TARGET" INPUT FEATURE_TYPE * OUTPUT FEATURE_TYPE * -# ------------------------------------------------------------------------- - -FACTORY_DEF * JSONQueryFactory FACTORY_NAME JSONFlattener INPUT FEATURE_TYPE Creator_2_CREATED MODE FLATTEN PREF_STRING "json_" JSON_DOCUMENT "@EvaluateExpression(FDIV,STRING_ENCODED,$(Parameters$encode),JSONFlattener)" EXPLODE_QUERY "json" RECURSIVE_FLATTEN YES ERROR_LIST_ATTR "_json_error" OUTPUT EXPLODED FEATURE_TYPE JSONFlattener_OUTPUT OUTPUT REJECTED FEATURE_TYPE JSONFlattener_ - -FACTORY_DEF * TeeFactory FACTORY_NAME "JSONFlattener Output Nuker" INPUT FEATURE_TYPE JSONFlattener_ -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME JSONFlattener_ INPUT FEATURE_TYPE JSONFlattener_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, JSONFlatteneroutputaRejectedfeature.) -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_format_VAR FORMAT -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_format_VAR $(WB_CURRENT_CONTEXT)_FORMAT}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_format INPUT FEATURE_TYPE JSONFlattener_OUTPUT OUTPUT FEATURE_TYPE Parameter_format_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_format_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,Valuejson_FORMAT,Parameter_format)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_format_2_VAR SELECTION -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_format_2_VAR $(WB_CURRENT_CONTEXT)_SELECTION}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_format_2 INPUT FEATURE_TYPE Parameter_format_OUTPUT OUTPUT FEATURE_TYPE Parameter_format_2_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_format_2_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,Valuejson_SELECTION,Parameter_format_2)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_projection_VAR PROJECTION_EPSG -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_projection_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_EPSG}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_projection INPUT FEATURE_TYPE Parameter_format_2_OUTPUT OUTPUT FEATURE_TYPE Parameter_projection_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_projection_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,FME_CONDITIONAL:DEFAULT_VALUE'AbortNovalidprojectiondefined'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND'EPSG:21781'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND95'EPSG:2056'FME_NUM_CONDITIONS3___,Parameter_projection)") - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO Parameter_projection_2_VAR PROJECTION_NOM -INCLUDE [ if {{Global} == {Local}} { puts {MACRO Parameter_projection_2_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_NOM}; } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Parameter_projection_2 INPUT FEATURE_TYPE Parameter_projection_OUTPUT OUTPUT FEATURE_TYPE Parameter_projection_2_OUTPUT @GlobalVariable(fme_encoded,$(Parameter_projection_2_VAR),"@EvaluateExpression(FDIV,STRING_ENCODED,FME_CONDITIONAL:DEFAULT_VALUE'AbortNovalidprojectiondefined'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND'MN03'BOOL_OP;OR;COMPOSITE_TEST;;TEST Valuejson_PROJECTION = SWITZERLAND95'MN95'FME_NUM_CONDITIONS3___,Parameter_projection_2)") - -FACTORY_DEF * TeeFactory FACTORY_NAME "Parameter_projection_2 OUTPUT Output Nuker" INPUT FEATURE_TYPE Parameter_projection_2_OUTPUT -# ------------------------------------------------------------------------- - -# First determine which function we are going to be using -- we can do -# this statically at parse time for efficiency sake - -MACRO GeometryReplacer_SetTempAttr -MACRO GeometryReplacer_Function -MACRO GeometryReplacer_RemoveTempAttr -MACRO GeometryReplacer_RemoveSourceAttr -INCLUDE [ set sourceText {@EvaluateExpression(FDIV,STRING_ENCODED,$(Perimeter$encode),GeometryReplacer)}; set decodedText [FME_DecodeText "$sourceText"]; if ![regexp {@EvaluateExpression\([^)]*STRING_ENCODED,@Value\(([^()]*)\),[^()]*\)} "$decodedText" dummy geomSrcAttr] { puts "MACRO GeometryReplacer_SetTempAttr @SupplyAttributes(ENCODED,__GeometryReplacerTemp__,\"$sourceText\")"; puts "MACRO GeometryReplacer_RemoveTempAttr @RemoveAttributes(__GeometryReplacerTemp__)"; set geomSrcAttr __GeometryReplacerTemp__; } elseif {{Yes} == {Yes} } { puts "MACRO GeometryReplacer_RemoveSourceAttr @RemoveAttributes(\"$geomSrcAttr\")"; }; if {{wkt} == {fmebinary} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_BINARY,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmehex} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_BINARY_HEX,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmexml} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {fmexmlencoded} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ENCODED_STRING,&\"$geomSrcAttr\")"; } elseif {{wkt} == {polyline} } { puts "MACRO GeometryReplacer_Function @Geometry(FROM_ATTRIBUTE_POLYLINE,\"$geomSrcAttr\")"; } elseif {{wkt} == {wkt} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\",FLOAT_64)"; } elseif {{wkt} == {wkb} || {wkt} == {wkbhex} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {GEOJSON} || {wkt} == {ESRIJSON} } { puts "MACRO GeometryReplacer_Function @JSONGeometry(FROM_ATTRIBUTE,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {GEORSS} } { puts "MACRO GeometryReplacer_Function @GeoRSSGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {KML} } { puts "MACRO GeometryReplacer_Function @KMLGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; } elseif {{wkt} == {GML} } { puts "MACRO GeometryReplacer_Function @GMLGeometry(FROM_ATTRIBUTE,\"$geomSrcAttr\",)"; } elseif {{wkt} == {geohash} } { puts "MACRO GeometryReplacer_Function @GeoHash(FROM_ATTRIBUTE,\"$geomSrcAttr\",)"; } elseif {{wkt} == {ogeosms} || {wkt} == {geosms} || {wkt} == {geo} } { puts "MACRO GeometryReplacer_Function @OGCGeometry(from_attribute,wkt,\"$geomSrcAttr\")"; } elseif {{wkt} == {mgrs}} { puts "MACRO GeometryReplacer_Function @MGRS(FROM_MGRS, , ,\"@EvaluateExpression(FDIV,STRING_ENCODED,$(Perimeter$encode),GeometryReplacer)\")" } elseif {{wkt} == {mssql} } { puts "MACRO GeometryReplacer_Function @SerializeGeometry(from_attribute,wkt,\"$geomSrcAttr\",)"; } elseif {{wkt} == {iso6709} } { puts "MACRO GeometryReplacer_Function @GeographicPoint(FROM_ATTRIBUTE,\"$geomSrcAttr\")"; }; ] -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryReplacer INPUT FEATURE_TYPE Creator_CREATED OUTPUT FEATURE_TYPE ___TOREJECTOR___ @RenameAttributes(FME_STRICT,___fme_rejection_code___,fme_rejection_code) $(GeometryReplacer_SetTempAttr) $(GeometryReplacer_Function) $(GeometryReplacer_RemoveTempAttr) -FACTORY_DEF * TestFactory FACTORY_NAME GeometryReplacer_Rejector INPUT FEATURE_TYPE ___TOREJECTOR___ TEST @Value(fme_rejection_code) != "" OUTPUT FAILED FEATURE_TYPE GeometryReplacer_OUTPUT @RenameAttributes(FME_STRICT,fme_rejection_code,___fme_rejection_code___) $(GeometryReplacer_RemoveSourceAttr) - -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME GeometryReplacer_ INPUT FEATURE_TYPE GeometryReplacer_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, GeometryReplaceroutputaRejectedfeature.) -# ------------------------------------------------------------------------- - -# Wipe out the source setting if it was untouched from the default setting - -DEFAULT_MACRO Reprojection_WGS84_MN95_SOURCE "EPSG:4326" -INCLUDE [if { {EPSG:4326} == {Read from feature} } { puts {MACRO Reprojection_WGS84_MN95_SOURCE} } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Reprojection_WGS84_MN95 INPUT FEATURE_TYPE GeometryReplacer_OUTPUT OUTPUT FEATURE_TYPE Reprojection_WGS84_MN95_REPROJECTED @Reproject($(Reprojection_WGS84_MN95_SOURCE),"EPSG:2056",NearestNeighbor,PreserveCells,Reprojection_WGS84_MN95,"COORD_SYS_WARNING") - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TestFactory FACTORY_NAME Tester INPUT FEATURE_TYPE Products_GEODATABASE_FILE_1 TEST @EvaluateExpression(FDIV,STRING_ENCODED,Valueguid,Tester) = @EvaluateExpression(FDIV,STRING_ENCODED,$(Product$encode),Tester) ENCODED BOOLEAN_OPERATOR OR OUTPUT PASSED FEATURE_TYPE Tester_PASSED -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME ListBuilder_fme_type_remover INPUT FEATURE_TYPE Tester_PASSED OUTPUT FEATURE_TYPE ListBuilder_no_fme_type @RemoveAttributes(fme_type,fme_geometry) -FACTORY_DEF * ListFactory FACTORY_NAME ListBuilder INPUT FEATURE_TYPE ListBuilder_no_fme_type LIST_NAME "list_fc{}" OUTPUT LIST FEATURE_TYPE ListBuilder_OUTPUT - -# ------------------------------------------------------------------------- - -Tcl2 set ListConcatenator__separator [FME_DecodeText {}]; regsub -all \"{}\" [FME_DecodeText {list_fc.fc}] \"{*}\" ListConcatenator__listPattern; -Tcl2 proc ListConcatenator__Concatenate {} { upvar \#0 ListConcatenator__separator separator ListConcatenator__listPattern listPattern; set allAttrs [lsort -dictionary [FME_AttributeNames]]; set keepEmptyParts [string equal {Yes} {No}]; set result {}; foreach attrName $allAttrs { if {[string match $listPattern $attrName]} { set attrValue [FME_GetAttribute $attrName]; if {$keepEmptyParts || $attrValue != {}} { lappend result $attrValue; }; }; }; FME_SetAttribute {"feature_type_to_read"} [join $result $separator]; } -FACTORY_DEF * TeeFactory FACTORY_NAME ListConcatenator INPUT FEATURE_TYPE ListBuilder_OUTPUT OUTPUT FEATURE_TYPE ListConcatenator_OUTPUT @Tcl2(ListConcatenator__Concatenate) - -# ------------------------------------------------------------------------- -MACRO FeatureReader_OUTPUT_PORTS_ENCODED -MACRO FeatureReader_DIRECTIVES ALIAS_MODE,NONE,CHECK_SIMPLE_GEOM,no,USE_SEARCH_ENVELOPE,NO,SPLIT_MULTI_PART_ANNOS,no,IGNORE_NETWORK_INFO,yes,RESOLVE_SUBTYPE_NAMES,yes,SPLIT_COMPLEX_ANNOS,no,TRANSLATE_SPATIAL_DATA_ONLY,no,CLIP_TO_ENVELOPE,NO,MERGE_FEAT_LINKED_ANNOS,no,FEATURE_READ_MODE,Features,SEARCH_METHOD,GEODB_INTERSECTS,IGNORE_RELATIONSHIP_INFO,yes,QUERY_FEATURE_TYPES_FOR_MERGE_FILTERS,Yes,RESOLVE_DOMAINS,no,SEARCH_ORDER,SPATIAL_FIRST,TABLELIST,Products,CACHE_MULTIPATCH_TEXTURES,yes,SPLIT_COMPLEX_EDGES,no - -# Values come in encoded and come out decoded. -Tcl2 proc FeatureReader_findInListElseDefault { valueEncoded defaultValue } { if { [lsearch [split {$(FeatureReader_OUTPUT_PORTS_ENCODED)}] $valueEncoded] >= 0} { return FeatureReader_[FME_DecodeText $valueEncoded] } else { return FeatureReader_$defaultValue } } -# Always provide an INTERACTION, otherwise the factory defaults to ENVELOPE_INTERSECTS -INCLUDE [if { ( {NONE} == {} ) || ( {($INTERACT_OPTIONS)} == {} ) } { puts {MACRO FCTQUERY_INTERACTION_LINE FCTQUERY_INTERACTION NONE}; } else { puts {MACRO FCTQUERY_INTERACTION_LINE FCTQUERY_INTERACTION "NONE"}; } ] -# Consolidate the attribute merge options to what the factory expects -DEFAULT_MACRO FeatureReader_COMBINE_ATTRS -INCLUDE [ if { {RESULT_ONLY} == {MERGE} } { puts "MACRO FeatureReader_COMBINE_ATTRS "; } else { puts "MACRO FeatureReader_COMBINE_ATTRS RESULT_ONLY"; }; ] -FACTORY_DEF * QueryFactory FACTORY_NAME FeatureReader INPUT FEATURE_TYPE ListConcatenator_OUTPUT $(FCTQUERY_INTERACTION_LINE) COMBINE_ATTRIBUTES $(FeatureReader_COMBINE_ATTRS) QUERYFCT_ATTRIBUTE_PREFIX QUERYFCT_TABLE_SEPARATOR SPACE COMBINE_GEOMETRY RESULT_ONLY ENABLE_CACHE NO READER_TYPE GEODATABASE_FILE READER_DATASET "D:TempFME_testGDB_INPUTASIT-VD.gdb" QUERYFCT_IDS "@EvaluateExpression(FDIV,STRING_ENCODED,Valuefeature_type_to_read,FeatureReader)" READER_DIRECTIVES META_MACROS,SourceEND_SQL0SourceGEODATABASE_FILE_EXPOSE_FORMAT_ATTRSSourceALIAS_MODENONESourceCHECK_SIMPLE_GEOMnoSourceUSE_SEARCH_ENVELOPENOSourceEXPOSE_ATTRS_GROUPSourceSPLIT_MULTI_PART_ANNOSnoSourceIGNORE_NETWORK_INFOyesSourceRESOLVE_SUBTYPE_NAMESyesSourceSPLIT_COMPLEX_ANNOSnoSourceTRANSLATE_SPATIAL_DATA_ONLYnoSourceCLIP_TO_ENVELOPENOSourceMERGE_FEAT_LINKED_ANNOSnoSourceFEATURE_READ_MODEFeaturesSourceSEARCH_METHODGEODB_INTERSECTSSourceIGNORE_RELATIONSHIP_INFOyesSourceQUERY_FEATURE_TYPES_FOR_MERGE_FILTERSYesSourceFME_CONNECTION_GROUPSourceGEODB_SHARED_RDR_ADV_PARM_GROUPSourceRESOLVE_DOMAINSnoSourceBEGIN_SQL0SourceSEARCH_ORDERSPATIAL_FIRSTSourceSEARCH_FEATURESourceCACHE_MULTIPATCH_TEXTURESyesSourceSPLIT_COMPLEX_EDGESnoSourceWHERE,METAFILE,GEODATABASE_FILE QUERYFCT_OUTPUT "BASED_ON_CONNECTIONS" CONTINUE_ON_READER_ERROR YES READER_PARAMS_WWJD $(FeatureReader_DIRECTIVES) OUTPUT RESULT FEATURE_TYPE * @SupplyAttributes(fme_feature_type,@FeatureType()) @Tcl2("set FME_FeatureType [FeatureReader_findInListElseDefault [FME_EncodeText $FME_FeatureType] {}]") @Transform(GEODATABASE_FILE,FME_GENERIC,ALIAS_GEOMETRY) OUTPUT SCHEMA FEATURE_TYPE FeatureReader_ - -FACTORY_DEF * TeeFactory FACTORY_NAME "FeatureReader Splitter" INPUT FEATURE_TYPE FeatureReader_ OUTPUT FEATURE_TYPE FeatureReader__0_DYpm1DTmZVo= OUTPUT FEATURE_TYPE FeatureReader__1_O2b4R8WeWNw= -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME FeatureReader_ INPUT FEATURE_TYPE FeatureReader_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, FeatureReaderoutputaRejectedfeature.) -# ------------------------------------------------------------------------- - -FACTORY_DEF * SpatialFilterFactory FACTORY_NAME SpatialFilter INPUT BASE FEATURE_TYPE Reprojection_WGS84_MN95_REPROJECTED INPUT CANDIDATE FEATURE_TYPE FeatureReader_ PREDICATE "INTERSECTS" USE_BOUNDING_BOX NO MULTIPLE_BASES YES MERGE_BASE_ATTR YES ATTR_ACCUM_MODE "HANDLE_CONFLICT" ATTR_CONFLICT_RES "CANDIDATE_IF_CONFLICT" BASE_ATTR_PREFIX "" PREDICATE_ATTR "_predicate" BOOLEAN_OPERATOR OR REJECT_INVALID_GEOM Yes REJECT_INVALID_PREDICATES Yes CUSTOM_MULTI_HANDLING Yes DIMENSION 2 CURVE_BOUNDARY_RULE ENDPOINTS_MOD2 OUTPUT PASSED FEATURE_TYPE SpatialFilter_PASSED - -DEFAULT_MACRO _WB_BYPASS_TERMINATION No -FACTORY_DEF * TeeFactory FACTORY_NAME SpatialFilter_ INPUT FEATURE_TYPE SpatialFilter_ OUTPUT FAILED FEATURE_TYPE * @Abort(ENCODED, SpatialFilteroutputaRejectedfeature.) -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_VAR PROJECTION_EPSG -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_EPSG}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever INPUT FEATURE_TYPE SpatialFilter_PASSED OUTPUT FEATURE_TYPE VariableRetriever_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_VAR),fme_result_attribute,PROJECTION_EPSG) - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_3_VAR PROJECTION_NOM -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_3_VAR $(WB_CURRENT_CONTEXT)_PROJECTION_NOM}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever_3 INPUT FEATURE_TYPE VariableRetriever_OUTPUT OUTPUT FEATURE_TYPE VariableRetriever_3_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_3_VAR),fme_result_attribute,PROJECTION_NOM) - -# ------------------------------------------------------------------------- - -DEFAULT_MACRO WB_CURRENT_CONTEXT -MACRO VariableRetriever_2_VAR FORMAT -INCLUDE [if {{Global} == {Local}} { puts {MACRO VariableRetriever_2_VAR $(WB_CURRENT_CONTEXT)_FORMAT}; }; -FACTORY_DEF * TeeFactory FACTORY_NAME VariableRetriever_2 INPUT FEATURE_TYPE VariableRetriever_3_OUTPUT OUTPUT FEATURE_TYPE VariableRetriever_2_OUTPUT @GlobalVariable(fme_encoded,$(VariableRetriever_2_VAR),fme_result_attribute,FORMAT) - -# ------------------------------------------------------------------------- - -# Wipe out the source setting if it was untouched from the default setting - -DEFAULT_MACRO Reprojector_SOURCE "Read from feature" -INCLUDE [if { {Read from feature} == {Read from feature} } { puts {MACRO Reprojector_SOURCE} } ] -FACTORY_DEF * TeeFactory FACTORY_NAME Reprojector INPUT FEATURE_TYPE VariableRetriever_2_OUTPUT OUTPUT FEATURE_TYPE Reprojector_REPROJECTED @Reproject($(Reprojector_SOURCE),"@EvaluateExpression(FDIV,STRING,ValuePROJECTION_EPSG,Reprojector)",NearestNeighbor,PreserveCells,Reprojector,"COORD_SYS_WARNING") - -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME AttributeFilter INPUT FEATURE_TYPE Reprojector_REPROJECTED OUTPUT FEATURE_TYPE AttributeFilter_FILTER - -FACTORY_DEF * AttributeFilterFactory FACTORY_NAME AttributeFilter_Filter COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE AttributeFilter_FILTER FILTER_ATTRIBUTE FORMAT FILTER_VALUES EMPTY MISSING NULL UNFILTERED GDB SHP OUTPUT EMPTY FEATURE_TYPE AttributeFilter_ OUTPUT MISSING FEATURE_TYPE AttributeFilter_ OUTPUT NULL FEATURE_TYPE AttributeFilter_ OUTPUT UNFILTERED FEATURE_TYPE AttributeFilter_ OUTPUT GDB FEATURE_TYPE AttributeFilter_GDB OUTPUT SHP FEATURE_TYPE AttributeFilter_SHP -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter Output Nuker" INPUT FEATURE_TYPE AttributeFilter_ -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter Output Nuker" INPUT FEATURE_TYPE AttributeFilter_ -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter Output Nuker" INPUT FEATURE_TYPE AttributeFilter_ -FACTORY_DEF * TeeFactory FACTORY_NAME "AttributeFilter Output Nuker" INPUT FEATURE_TYPE AttributeFilter_ -# ------------------------------------------------------------------------- - -FACTORY_DEF * RoutingFactory FACTORY_NAME "Destination Feature Type Routing Correlator" COMMAND_PARM_EVALUATION SINGLE_PASS INPUT FEATURE_TYPE * ROUTE FME_GENERIC FeatureReader__1_O2b4R8WeWNw= TO GEODATABASE_FILE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,0,SupplyAttributesENCODEDfme_template_feature_typeFeatureClass1,SupplyAttributesENCODED__wb_out_feat_type__FeatureClass1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparenFeatureClass1 GEOMETRY ROUTE FME_GENERIC AttributeFilter_GDB TO GEODATABASE_FILE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,0,SupplyAttributesENCODEDfme_template_feature_typeFeatureClass1,SupplyAttributesENCODED__wb_out_feat_type__FeatureClass1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparenFeatureClass1 GEOMETRY ROUTE FME_GENERIC FeatureReader__0_DYpm1DTmZVo= TO ESRISHAPE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,1,SupplyAttributesENCODEDfme_template_feature_typeShapefile1,SupplyAttributesENCODED__wb_out_feat_type__Shapefile1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparenShapefile1 GEOMETRY ROUTE FME_GENERIC AttributeFilter_SHP TO ESRISHAPE __GO_TO_FINAL_OUTPUT_ROUTER__ multi_writer_id,1,SupplyAttributesENCODEDfme_template_feature_typeShapefile1,SupplyAttributesENCODED__wb_out_feat_type__Shapefile1,EvaluateExpressionATTR_CREATE_EXPR__wb_out_feat_type__atValueopenparenfme_feature_typecloseparen_atValueopenparenPROJECTION_NOMcloseparenShapefile1 GEOMETRY FEATURE_TYPE_ATTRIBUTE __wb_out_feat_type__ OUTPUT ROUTED FEATURE_TYPE * OUTPUT NOT_ROUTED FEATURE_TYPE __nuke_me__ @Tcl2("FME_StatMessage 818059 [FME_GetAttribute fme_template_feature_type] 818060 818061 fme_warn") -# ------------------------------------------------------------------------- - -FACTORY_DEF * TeeFactory FACTORY_NAME "Final Output Nuker" INPUT FEATURE_TYPE __nuke_me__ - -# ------------------------------------------------------------------------- -GEODATABASE_FILE_2_DEF_TEMPLATE FeatureClass1 geodb_type {FME_GEN_GEOMETRY} GEODB_DROP_TABLE NO GEODB_TRUNCATE_TABLE NO GEODB_OBJECT_ID_NAME OBJECTID GEODB_OBJECT_ID_ALIAS OBJECTID GEODB_SHAPE_NAME SHAPE GEODB_SHAPE_ALIAS SHAPE GEODB_CONFIG_KEYWORD DEFAULTS GEODB_FEATURE_DATASET "" GEODB_GRID{1} "" GEODB_AVG_NUM_POINTS "" GEODB_HAS_Z_VALUES "" GEODB_HAS_MEASURES "" GEODB_ANNO_REFERENCE_SCALE "" GEODB_XYSCALE "" GEODB_MEASURES_SCALE "" GEODB_YORIGIN "" GEODB_XORIGIN "" GEODB_ZSCALE "" GEODB_ZORIGIN "" GEODB_MEASURES_ORIGIN "" fme_schema_feature_first Yes fme_schema_name_expression @EvaluateExpression(FDIV,STRING,Valuefme_feature_type,FeatureClass1) -# ------------------------------------------------------------------------- -ESRISHAPE_1_DEF_TEMPLATE Shapefile1 SHAPE_GEOMETRY {FME_GEN_GEOMETRY} shape_dimension auto fme_schema_feature_first Yes fme_schema_name_expression @EvaluateExpression(FDIV,STRING,Valuefme_feature_type,Shapefile1) diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..5daa5156 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,74 @@ +site_name: Extract Documentation +site_url: https://asit-asso.github.io/extract/ +theme: + name: material + palette: + primary: custom + + font: + text: Asap + code: JetBrains Mono + + logo: assets/asit_extract_blanc.png + favicon: assets/extract_favicon64.png + + features: + - navigation.sections + - navigation.tabs + - navigation.tabs.sticky + - search.suggest + - toc.integrate + - navigation.path + - content.code.copy + + icon: + repo: fontawesome/brands/github + +repo_url: https://github.com/asit-asso/extract +repo_name: asit-asso/extract + +markdown_extensions: + - attr_list + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - admonition + - pymdownx.details + - pymdownx.superfences + - tables + - pymdownx.magiclink + +extra_css: + - stylesheets/extra.css + +nav: + - index.md + - Getting Started: + - getting-started/install.md + - getting-started/configure.md + - getting-started/network-integration.md + - getting-started/customize.md + - getting-started/cybersecurity-win.md + - getting-started/cybersecurity-linux.md + - features/user-guide.md + - features/admin-guide.md + - Dev guide: + - features/development.md + - features/architecture.md + - How-To: + - how-to/extract-viageo.md + - how-to/fme-form.md + - how-to/fme-flow.md + - how-to/python.md + - how-to/qgis-server-atlas.md + - Miscellaneous: + - misc/viageo-test.md + +plugins: + - search + - exclude-search: + exclude: + - mitigations/* + - extract-connector-sample/* + - extract-task-sample/* + - changelog/*