diff --git a/.gitignore b/.gitignore index cbd4fe33eaa9..84f282d0af71 100644 --- a/.gitignore +++ b/.gitignore @@ -64,11 +64,7 @@ web/src/main/webapp/META-INF/MANIFEST.MF web/src/main/webapp/WEB-INF/data/0* web/src/main/webapp/WEB-INF/data/config/encryptor.properties web/src/main/webapp/WEB-INF/data/config/index/records.json -web/src/main/webapp/WEB-INF/data/config/schema_plugins/*/schematron/schematron*.xsl -web/src/main/webapp/WEB-INF/data/config/schema_plugins/csw-record -web/src/main/webapp/WEB-INF/data/config/schema_plugins/dublin-core -web/src/main/webapp/WEB-INF/data/config/schema_plugins/iso19* -web/src/main/webapp/WEB-INF/data/config/schema_plugins/schemaplugin-uri-catalog.xml +web/src/main/webapp/WEB-INF/data/config/schema_plugins/* web/src/main/webapp/WEB-INF/data/config/schemaplugin-uri-catalog.xml web/src/main/webapp/WEB-INF/data/data/backup web/src/main/webapp/WEB-INF/data/data/metadata_data diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 000000000000..1cdaa3768cfb --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,88 @@ +# This CITATION.cff file was generated with cffinit. +# Visit https://bit.ly/cffinit to generate yours today! + +cff-version: 1.2.0 +title: GeoNetwork opensource +message: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: François + family-names: Prunayre + affiliation: Titellus + - given-names: Jose + family-names: García + affiliation: GeoCat BV + - given-names: Jeroen + family-names: Ticheler + affiliation: GeoCat BV + orcid: 'https://orcid.org/0009-0003-3896-0437' + email: jeroen.ticheler@geocat.net + - given-names: Florent + family-names: Gravin + affiliation: CamptoCamp + - given-names: Simon + family-names: Pigot + affiliation: CSIRO Australia + - name: GeoCat BV + address: Veenderweg 13 + city: Bennekom + country: NL + post-code: 6721 WD + tel: +31 (0) 318 416 664 + website: 'https://www.geocat.net/' + email: info@geocat.net + - name: Titellus + address: 321 Route de la Mollière + city: Saint Pierre de Genebroz + country: FR + post-code: 73360 + website: 'https://titellus.net/' + email: fx.prunayre@titellus.net + - name: CamptoCamp + address: QG Center Rte de la Chaux 4 + city: Bussigny + country: CH + post-code: 1030 + tel: +41 (21) 619 10 10 + website: 'https://camptocamp.com/' + email: info@camptocamp.com + - name: Open Source Geospatial Foundation - OSGeo + address: '9450 SW Gemini Dr. #42523' + location: Beaverton + region: Oregon + post-code: '97008' + country: US + email: info@osgeo.org + website: 'https://www.osgeo.org/' +repository-code: 'http://github.com/geonetwork/core-geonetwork' +url: 'https://geonetwork-opensource.org' +repository-artifact: >- + https://sourceforge.net/projects/geonetwork/files/GeoNetwork_opensource/ +abstract: >- + GeoNetwork is a catalog application to manage spatial and + non-spatial resources. It is compliant with critical + international standards from ISO, OGC and INSPIRE. It + provides powerful metadata editing and search functions as + well as an interactive web map viewer. +keywords: + - catalog + - gis + - sdi + - spatial data infrastructure + - dataspace + - search + - open data + - standards + - spatial + - CSW + - OGCAPI Records + - DCAT + - GeoDCAT-AP + - Catalog Service + - OGC + - open geospatial consortium + - osgeo + - open source geospatial foundation +license: GPL-2.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1bff3df03f5d..3e51f82e852a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,11 @@ Thank you for contributing to GeoNetwork! -* Free-software: GeoNetwork is free-software, using the [GNU GENERAL PUBLIC LICENSE](LICENSE.md). Contributions provided by you, or your employer, are required to be compatible with this free-software license. +* Free-software: GeoNetwork is free-software, using the [GNU GENERAL PUBLIC LICENSE](LICENSE.md). +* Contributions provided by you, or your employer, are required to be compatible with this free-software license. You will therefor be asked to sign the [Contributor License Agreement](https://cla-assistant.io/geonetwork/) when you are contributing to the repositories. This process is automatically enabled when you create your first pull request via https://cla-assistant.io/. + +[![CLA assistant](https://cla-assistant.io/readme/badge/geonetwork/geonetwork)](https://cla-assistant.io/geonetwork/geonetwork) + * Pull-request: GeoNetwork uses a pull-request workflow to review and accept changes. Pull-requests must be submitted against the *main* branch first, and may be back ported as required. # Pull requests diff --git a/README.md b/README.md index 65f57590c6a1..f6a6527ff5ec 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,6 @@ # GeoNetwork opensource -## Build Health - -[![Build Status](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml/badge.svg?branch=main)](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml?query=branch%3Amain) +[![Build Status](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml/badge.svg?branch=main)](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml?query=branch%3Amain) [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) [![CLA assistant](https://cla-assistant.io/readme/badge/geonetwork/geonetwork)](https://cla-assistant.io/geonetwork/geonetwork) ## Features @@ -29,7 +27,4 @@ Developer documentation located in ``README.md`` files in the code-base: * General documentation for the project as a whole is in this [README.md](README.md) * [Software Development Documentation](/software_development/) provides instructions for setting up a development environment, building GeoNetwork, compiling user documentation, and making a releases. -* Module specific documentation can be found in each module: - -## Open Source Security Foundation (OpenSSF) best practices status -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) +* Module specific documentation can be found in each module diff --git a/add-schema.sh b/add-schema.sh index 2a2684285302..4f1ecc8c92d3 100755 --- a/add-schema.sh +++ b/add-schema.sh @@ -83,7 +83,7 @@ then ${insertLine} a\\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ SED_SCRIPT @@ -103,7 +103,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ \\ \ \\ @@ -121,7 +121,7 @@ SED_SCRIPT \ \\ \ \\ \ org.geonetwork-opensource.schemas\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ zip\\ \ false\\ \ \$\{schema-plugins.dir\}\\ @@ -138,7 +138,7 @@ SED_SCRIPT fi # Add schema resources in service/pom.xml with test scope for unit tests -line=$(grep -n "schema-${schema}" services/pom.xml | cut -d: -f1) +line=$(grep -n "gn-schema-${schema}" services/pom.xml | cut -d: -f1) if [ ! $line ] then @@ -154,7 +154,7 @@ then ${finalLine} a\\ \ \\ \ ${projectGroupId}\\ -\ schema-${schema}\\ +\ gn-schema-${schema}\\ \ ${gnSchemasVersion}\\ \ test\\ \ diff --git a/auditable/README.md b/auditable/README.md new file mode 100644 index 000000000000..41f2b36d10c5 --- /dev/null +++ b/auditable/README.md @@ -0,0 +1,14 @@ +# Auditable Module + +The auditable module contains the classes that allow auditing changes in user information using [Hibernate Envers](https://hibernate.org/orm/envers/). + +Support for new auditable entities can be added, for example to audit changes in group information. For users auditing: + +- Entity with the information to audit: [UserAuditable](../domain/src/main/java/org/fao/geonet/domain/auditable/UserAuditable.java). +- Related JPA repository: [UserAuditableRepository](../domain/src/main/java/org/fao/geonet/repository/UserAuditableRepository.java). +- The auditable service: [UserAuditableService](src/main/java/org/fao/geonet/auditable/UserAuditableService.java). +- The users API updated to use the auditable service: [UserApi](../services/src/main/java/org/fao/geonet/api/users/UsersApi.java). + + + + diff --git a/auditable/pom.xml b/auditable/pom.xml new file mode 100644 index 000000000000..4d475ecd0db3 --- /dev/null +++ b/auditable/pom.xml @@ -0,0 +1,51 @@ + + + 4.0.0 + + org.geonetwork-opensource + geonetwork + 4.4.7-SNAPSHOT + + + + + + gn-auditable + jar + GeoNetwork auditable objects + + + + General Public License (GPL) + http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt + repo + + + + + + + ${project.groupId} + gn-domain + ${project.version} + + + + ${project.groupId} + gn-core + ${project.version} + + + + org.springframework + spring-core + + + + + ${basedir}/.. + + + diff --git a/auditable/src/main/java/org/fao/geonet/auditable/BaseAuditableService.java b/auditable/src/main/java/org/fao/geonet/auditable/BaseAuditableService.java new file mode 100644 index 000000000000..9268195da1fa --- /dev/null +++ b/auditable/src/main/java/org/fao/geonet/auditable/BaseAuditableService.java @@ -0,0 +1,180 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.auditable; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.MapDifference; +import com.google.common.collect.Maps; +import org.fao.geonet.auditable.model.RevisionFieldChange; +import org.fao.geonet.auditable.model.RevisionInfo; +import org.fao.geonet.domain.auditable.AuditableEntity; +import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; +import org.fao.geonet.repository.BaseAuditableRepository; +import org.springframework.data.history.Revision; +import org.springframework.data.history.Revisions; +import org.springframework.util.StringUtils; + +import java.util.*; + +public abstract class BaseAuditableService { + protected static final String LINE_SEPARATOR = System.lineSeparator(); + + protected BaseAuditableRepository repository; + protected SettingManager settingManager; + + public abstract String getEntityType(); + + public void auditSave(U auditableEntity) { + if (!isAuditableEnabled()) return; + + repository.save(auditableEntity); + } + + public void auditDelete(U auditableEntity) { + if (!isAuditableEnabled()) return; + + repository.delete(auditableEntity); + } + + public String getEntityHistoryAsString(Integer entityIdentifier, ResourceBundle messages) { + if (!isAuditableEnabled()) return ""; + + Revisions revisions = repository.findRevisions(entityIdentifier); + + return retrieveRevisionHistoryAsString(revisions, messages); + } + + public List getEntityHistory(Integer entityIdentifier) { + if (!isAuditableEnabled()) return new ArrayList<>(); + + Revisions revisions = repository.findRevisions(entityIdentifier); + + return retrieveRevisionHistory(revisions); + } + + + protected String retrieveRevisionHistoryAsString(Revisions revisions, ResourceBundle messages) { + List revisionInfoList = retrieveRevisionHistory(revisions); + + List diffs = new ArrayList<>(); + + revisionInfoList.stream().forEach(revision -> { + List revisionChanges = new ArrayList<>(); + revisionChanges.add(revision.getValue()); + + revision.getChanges().forEach(change -> { + boolean oldValueIsDefined = StringUtils.hasLength(change.getOldValue()); + boolean newValueIsDefined = StringUtils.hasLength(change.getNewValue()); + + if (oldValueIsDefined && newValueIsDefined) { + revisionChanges.add(String.format(messages.getString("audit.revision.field.updated"), + change.getName(), change.getOldValue(), change.getNewValue())); + } else if (!oldValueIsDefined && newValueIsDefined) { + revisionChanges.add(String.format(messages.getString("audit.revision.field.set"), + change.getName(), change.getNewValue())); + } else if (oldValueIsDefined && !newValueIsDefined) { + revisionChanges.add(String.format(messages.getString("audit.revision.field.unset"), change.getName())); + } + }); + + String revisionInfo = String.format(messages.getString("audit.revision"), + revision.getUser(), + revision.getDate(), + String.join(LINE_SEPARATOR, revisionChanges)); + + diffs.add(revisionInfo); + + }); + + return String.join(LINE_SEPARATOR, diffs); + } + + protected List retrieveRevisionHistory(Revisions revisions) { + String idFieldName = "id"; + List> revisionList = revisions.toList(); + int numRevisions = revisions.toList().size(); + + List revisionInfoList = new ArrayList<>(); + + if (numRevisions > 0) { + Revision initialRevision = revisionList.get(0); + AuditableEntity initialRevisionEntity = (AuditableEntity) initialRevision.getEntity(); + + // Initial revision + ObjectMapper objectMapper = new ObjectMapper(); + Map revisionMap = objectMapper.convertValue(initialRevision.getEntity(), Map.class); + // Remove empty values and id + revisionMap.values().removeAll(Arrays.asList("", null)); + revisionMap.remove(idFieldName); + + RevisionInfo initialRevisionInfo = new RevisionInfo( + initialRevision.getMetadata().getRequiredRevisionNumber(), + initialRevisionEntity.getCreatedBy(), + initialRevisionEntity.getCreatedDate(), revisionMap.toString()); + + revisionInfoList.add(initialRevisionInfo); + + int i = 0; + while (i + 1 < numRevisions) { + Revision revision1 = revisionList.get(i); + Revision revision2 = revisionList.get(i + 1); + + Map revision1Map = objectMapper.convertValue(revision1.getEntity(), Map.class); + revision1Map.remove(idFieldName); + Map revision2Map = objectMapper.convertValue(revision2.getEntity(), Map.class); + revision2Map.remove(idFieldName); + + MapDifference diff = Maps.difference(revision1Map, revision2Map); + + revision2Map.values().removeAll(Arrays.asList("", null)); + + final RevisionInfo revisionInfo = new RevisionInfo( + revision2.getMetadata().getRequiredRevisionNumber(), + ((AuditableEntity) revision2.getEntity()).getLastModifiedBy(), + ((AuditableEntity) revision2.getEntity()).getLastModifiedDate(), + revision2Map.toString()); + + diff.entriesDiffering().forEach((key, entry) -> { + String oldValueAsString = (entry.leftValue() != null) ? entry.leftValue().toString() : ""; + String newValueAsString = (entry.rightValue() != null) ? entry.rightValue().toString() : ""; + + RevisionFieldChange revisionFieldChange = new RevisionFieldChange(key, oldValueAsString, newValueAsString); + + revisionInfo.addChange(revisionFieldChange); + }); + + + revisionInfoList.add(revisionInfo); + i++; + } + } + + Collections.reverse(revisionInfoList); + return revisionInfoList; + } + + protected boolean isAuditableEnabled() { + return settingManager.getValueAsBool(Settings.SYSTEM_AUDITABLE_ENABLE, false); + } +} diff --git a/auditable/src/main/java/org/fao/geonet/auditable/UserAuditableService.java b/auditable/src/main/java/org/fao/geonet/auditable/UserAuditableService.java new file mode 100644 index 000000000000..2d00b8243601 --- /dev/null +++ b/auditable/src/main/java/org/fao/geonet/auditable/UserAuditableService.java @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.auditable; + +import org.fao.geonet.domain.auditable.UserAuditable; +import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.repository.UserAuditableRepository; +import org.springframework.stereotype.Service; + +@Service +public class UserAuditableService extends BaseAuditableService { + + public static final String ENTITY_TYPE = "user"; + + public UserAuditableService(SettingManager settingManager, UserAuditableRepository repository) { + this.settingManager = settingManager; + this.repository = repository; + } + + @Override + public String getEntityType() { + return ENTITY_TYPE; + } +} diff --git a/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionFieldChange.java b/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionFieldChange.java new file mode 100644 index 000000000000..a55bfe5cea0e --- /dev/null +++ b/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionFieldChange.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.auditable.model; + +/** + * This class represents a change in an entity field. It stores the field name, + * the previous and the new value. + */ +public class RevisionFieldChange { + private final String name; + private final String oldValue; + private final String newValue; + + public RevisionFieldChange(String name, String oldValue, String newValue) { + this.name = name; + this.oldValue = oldValue; + this.newValue = newValue; + } + + public String getName() { + return name; + } + + public String getOldValue() { + return oldValue; + } + + public String getNewValue() { + return newValue; + } +} diff --git a/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionInfo.java b/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionInfo.java new file mode 100644 index 000000000000..3441a6e8c23c --- /dev/null +++ b/auditable/src/main/java/org/fao/geonet/auditable/model/RevisionInfo.java @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.auditable.model; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import org.fao.geonet.domain.ISODate; + +public class RevisionInfo { + private final int revisionNumber; + private final String user; + private final String date; + private final String value; + private final List changes; + + public RevisionInfo(int revisionNumber, String user, Date date, String value) { + this.revisionNumber = revisionNumber; + this.user = user; + this.date = new ISODate(date.getTime()).toString(); + this.value = value; + this.changes = new ArrayList<>(); + } + + public int getRevisionNumber() { + return revisionNumber; + } + + public String getUser() { + return user; + } + + public String getDate() { + return date; + } + + public String getValue() { + return value; + } + + /** + * @return an unmodifiable view of the list of changes. + */ + public List getChanges() { + return Collections.unmodifiableList(changes); + } + + public void addChange(RevisionFieldChange change) { + changes.add(change); + } +} diff --git a/auditable/src/main/resources/config-spring-geonetwork.xml b/auditable/src/main/resources/config-spring-geonetwork.xml new file mode 100644 index 000000000000..61411f180ac0 --- /dev/null +++ b/auditable/src/main/resources/config-spring-geonetwork.xml @@ -0,0 +1,34 @@ + + + + + + + + diff --git a/auditable/src/test/java/org/fao/geonet/auditable/model/UserAuditableTest.java b/auditable/src/test/java/org/fao/geonet/auditable/model/UserAuditableTest.java new file mode 100644 index 000000000000..0b6a47b8393a --- /dev/null +++ b/auditable/src/test/java/org/fao/geonet/auditable/model/UserAuditableTest.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.auditable.model; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import org.fao.geonet.domain.Group; +import org.fao.geonet.domain.Profile; +import org.fao.geonet.domain.User; +import org.fao.geonet.domain.UserGroup; +import org.fao.geonet.domain.auditable.UserAuditable; +import org.junit.Test; +import org.springframework.util.StringUtils; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class UserAuditableTest { + + @Test + public void testBuildUserAuditable() { + Group group = new Group().setId(1).setName("sample"); + Group group2 = new Group().setId(2).setName("sampleGroup2"); + + + User user = new User() + .setId(1) + .setName("name") + .setSurname("surname") + .setUsername("username") + .setEnabled(true) + .setEmailAddresses(new HashSet<>(List.of("test@mail.com"))) + .setProfile(Profile.Reviewer); + + + List userGroupList = new ArrayList<>(); + UserGroup userGroup1 = new UserGroup() + .setGroup(group) + .setUser(user) + .setProfile(Profile.Editor); + + UserGroup userGroup2 = new UserGroup() + .setGroup(group2) + .setUser(user) + .setProfile(Profile.Reviewer); + + userGroupList.add(userGroup1); + userGroupList.add(userGroup2); + + UserAuditable userAuditable = UserAuditable.build(user, userGroupList); + + assertEquals(user.getId(), userAuditable.getId()); + assertEquals(user.isEnabled(), userAuditable.isEnabled()); + assertEquals(user.getName(), userAuditable.getName()); + assertEquals(user.getSurname(), userAuditable.getSurname()); + assertEquals(user.getUsername(), userAuditable.getUsername()); + assertEquals(user.getEmailAddresses().toArray()[0], userAuditable.getEmailAddress()); + assertEquals(user.getProfile().toString(), userAuditable.getProfile()); + assertFalse(StringUtils.hasLength(userAuditable.getGroupsRegisteredUser())); + assertTrue(userAuditable.getGroupsEditor().contains(group.getName())); + assertTrue(userAuditable.getGroupsReviewer().contains(group2.getName())); + assertFalse(StringUtils.hasLength(userAuditable.getGroupsUserAdmin())); + + assertEquals(group.getName(), userAuditable.getGroupsEditor()); + assertEquals(group2.getName(), userAuditable.getGroupsReviewer()); + } +} diff --git a/cachingxslt/pom.xml b/cachingxslt/pom.xml index 5e9618424966..897ff9707542 100644 --- a/cachingxslt/pom.xml +++ b/cachingxslt/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 631e5df8dad7..2c2e9083ba98 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java index 7b6a3b69c598..cba8608a5567 100644 --- a/common/src/main/java/org/fao/geonet/utils/XmlRequest.java +++ b/common/src/main/java/org/fao/geonet/utils/XmlRequest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -124,13 +124,13 @@ protected final Element executeAndReadResponse(HttpRequestBase httpMethod) throw " -- Response Code: " + httpResponse.getRawStatusCode()); } - byte[] data = null; + byte[] data; try { data = IOUtils.toByteArray(httpResponse.getBody()); return Xml.loadStream(new ByteArrayInputStream(data)); } catch (JDOMException e) { - throw new BadXmlResponseEx("Response: '" + new String(data, "UTF8") + "' (from URI " + httpMethod.getURI() + ")"); + throw new BadXmlResponseEx("Invalid XML document from URI: " + httpMethod.getURI()); } finally { httpMethod.releaseConnection(); diff --git a/core/pom.xml b/core/pom.xml index 30f84917cfc5..9600887d5928 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 @@ -35,6 +35,13 @@ GeoNetwork core + + + org.geoserver.community.jwt-headers + jwt-headers-util + 2.27-SNAPSHOT + + net.objecthunter exp4j @@ -304,6 +311,10 @@ org.geotools gt-geojson + + com.jayway.jsonpath + json-path + org.locationtech.jts jts-core @@ -565,6 +576,11 @@ xmlunit-core test + + org.springframework.data + spring-data-envers + test + com.fasterxml.jackson.datatype jackson-datatype-hibernate5 diff --git a/core/src/main/java/jeeves/component/ProfileManager.java b/core/src/main/java/jeeves/component/ProfileManager.java index 3e93896722b7..25ec6e179fd7 100644 --- a/core/src/main/java/jeeves/component/ProfileManager.java +++ b/core/src/main/java/jeeves/component/ProfileManager.java @@ -65,11 +65,11 @@ public class ProfileManager { */ public static Profile getLowestProfile(String[] profiles) { Profile lowestProfile = null; - int numberOfProfilesExtended = Profile.Administrator.getAll().size(); + int numberOfProfilesExtended = Profile.Administrator.getProfileAndAllChildren().size(); for (String profileName : profiles) { Profile p = Profile.valueOf(profileName); - Set currentProfileSet = p.getAll(); + Set currentProfileSet = p.getProfileAndAllChildren(); if (currentProfileSet.size() < numberOfProfilesExtended) { lowestProfile = p; numberOfProfilesExtended = currentProfileSet.size(); @@ -89,7 +89,7 @@ public static Profile getHighestProfile(Profile[] profiles) { int numberOfProfilesExtended = 0; for (Profile profile : profiles) { - Set all = profile.getAll(); + Set all = profile.getProfileAndAllChildren(); if (all.size() > numberOfProfilesExtended) { highestProfile = profile; numberOfProfilesExtended = all.size(); diff --git a/core/src/main/java/org/fao/geonet/api/exception/GeonetMaxUploadSizeExceededException.java b/core/src/main/java/org/fao/geonet/api/exception/GeonetMaxUploadSizeExceededException.java new file mode 100644 index 000000000000..46d94ef3aac1 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/api/exception/GeonetMaxUploadSizeExceededException.java @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.exception; + +import java.util.Locale; + +import org.fao.geonet.exceptions.LocalizedException; + +public class GeonetMaxUploadSizeExceededException extends LocalizedException { + + public GeonetMaxUploadSizeExceededException() { + super(); + } + + public GeonetMaxUploadSizeExceededException(String message) { + super(message); + } + + public GeonetMaxUploadSizeExceededException(String message, Throwable cause) { + super(message, cause); + } + + public GeonetMaxUploadSizeExceededException(Throwable cause) { + super(cause); + } + + protected String getResourceBundleBeanQualifier() { + return "apiMessages"; + } + + @Override + public GeonetMaxUploadSizeExceededException withMessageKey(String messageKey) { + super.withMessageKey(messageKey); + return this; + } + + @Override + public GeonetMaxUploadSizeExceededException withMessageKey(String messageKey, Object[] messageKeyArgs) { + super.withMessageKey(messageKey, messageKeyArgs); + return this; + } + + @Override + public GeonetMaxUploadSizeExceededException withDescriptionKey(String descriptionKey) { + super.withDescriptionKey(descriptionKey); + return this; + } + + @Override + public GeonetMaxUploadSizeExceededException withDescriptionKey(String descriptionKey, Object[] descriptionKeyArgs) { + super.withDescriptionKey(descriptionKey, descriptionKeyArgs); + return this; + } + + @Override + public GeonetMaxUploadSizeExceededException withLocale(Locale locale) { + super.withLocale(locale); + return this; + } +} diff --git a/core/src/main/java/org/fao/geonet/api/exception/InputStreamLimitExceededException.java b/core/src/main/java/org/fao/geonet/api/exception/InputStreamLimitExceededException.java new file mode 100644 index 000000000000..d8be7ce9c483 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/api/exception/InputStreamLimitExceededException.java @@ -0,0 +1,62 @@ +//============================================================================= +//=== Copyright (C) 2001-2025 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.api.exception; + +import org.springframework.web.multipart.MaxUploadSizeExceededException; + +/** + * Custom exception to be thrown when the size of a remote file to be uploaded to the store exceeds the maximum upload size. + */ +public class InputStreamLimitExceededException extends MaxUploadSizeExceededException { + private final long remoteFileSize; + + /** + * Create a new InputStreamLimitExceededException with an unknown remote file size. + * + * @param maxUploadSize the maximum upload size allowed + */ + public InputStreamLimitExceededException(long maxUploadSize) { + this(maxUploadSize, -1L); + } + + /** + * Create a new InputStreamLimitExceededException with a known remote file size. + * + * @param maxUploadSize the maximum upload size allowed + * @param remoteFileSize the size of the remote file + */ + public InputStreamLimitExceededException(long maxUploadSize, long remoteFileSize) { + super(maxUploadSize); + this.remoteFileSize = remoteFileSize; + } + + /** + * Get the size of the remote file. + * + * @return the size of the remote file or -1 if the size is unknown + */ + public long getRemoteFileSize() { + return this.remoteFileSize; + } +} diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java index c5291a59bbf3..d9385ee94185 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/AbstractStore.java @@ -1,6 +1,6 @@ /* * ============================================================================= - * === Copyright (C) 2019 Food and Agriculture Organization of the + * === Copyright (C) 2024 Food and Agriculture Organization of the * === United Nations (FAO-UN), United Nations World Food Programme (WFP) * === and United Nations Environment Programme (UNEP) * === @@ -28,6 +28,7 @@ import org.apache.commons.io.FilenameUtils; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.api.exception.NotAllowedException; +import org.fao.geonet.api.exception.InputStreamLimitExceededException; import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.domain.MetadataResource; @@ -35,21 +36,37 @@ import org.fao.geonet.kernel.AccessManager; import org.fao.geonet.kernel.datamanager.IMetadataUtils; import org.fao.geonet.repository.MetadataRepository; +import org.fao.geonet.util.LimitedInputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationContext; +import org.springframework.http.ContentDisposition; +import org.springframework.http.HttpHeaders; import org.springframework.web.multipart.MultipartFile; import java.io.BufferedInputStream; +import java.io.IOException; import java.io.InputStream; +import java.net.HttpURLConnection; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Base64; import java.util.List; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; public abstract class AbstractStore implements Store { + protected static final String RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR = ":"; + protected static final String RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_ESCAPED_SEPARATOR = "\\:"; + private static final Logger log = LoggerFactory.getLogger(AbstractStore.class); + + @Value("${api.params.maxUploadSize}") + protected int maxUploadSize; + @Override public final List getResources(final ServiceContext context, final String metadataUuid, final Sort sort, final String filter) throws Exception { @@ -194,7 +211,38 @@ public final MetadataResource putResource(ServiceContext context, String metadat @Override public final MetadataResource putResource(ServiceContext context, String metadataUuid, URL fileUrl, MetadataResourceVisibility visibility, Boolean approved) throws Exception { - return putResource(context, metadataUuid, getFilenameFromUrl(fileUrl), fileUrl.openStream(), null, visibility, approved); + + // Open a connection to the URL + HttpURLConnection connection = (HttpURLConnection) fileUrl.openConnection(); + connection.setInstanceFollowRedirects(true); + connection.setRequestMethod("GET"); + + // Check if the response code is OK + int responseCode = connection.getResponseCode(); + if (responseCode != HttpURLConnection.HTTP_OK) { + throw new IOException("Unexpected response code: " + responseCode); + } + + // Extract filename from Content-Disposition header if present otherwise use the filename from the URL + String contentDisposition = connection.getHeaderField(HttpHeaders.CONTENT_DISPOSITION); + String filename = null; + if (contentDisposition != null) { + filename = ContentDisposition.parse(contentDisposition).getFilename(); + } + if (filename == null || filename.isEmpty()) { + filename = getFilenameFromUrl(fileUrl); + } + + // Check if the content length is within the allowed limit + long contentLength = connection.getContentLengthLong(); + if (contentLength > maxUploadSize) { + throw new InputStreamLimitExceededException(maxUploadSize, contentLength); + } + + // Upload the resource while ensuring the input stream does not exceed the maximum allowed size. + try (LimitedInputStream is = new LimitedInputStream(connection.getInputStream(), maxUploadSize)) { + return putResource(context, metadataUuid, filename, is, null, visibility, approved); + } } @Override @@ -279,4 +327,28 @@ public String toString() { } }; } + + private String escapeResourceManagementExternalProperties(String value) { + return value.replace(RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR, RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_ESCAPED_SEPARATOR); + } + + /** + * Create an encoded base 64 object id contains the following fields to uniquely identify the resource + * The fields are separated by a colon ":" + * @param type to identify type of storage - document/folder + * @param visibility of the resource public/private + * @param metadataId internal metadata id + * @param version identifier which can be used to directly get this version. + * @param resourceId or filename of the resource + * @return based 64 object id + */ + protected String getResourceManagementExternalPropertiesObjectId(final String type, final MetadataResourceVisibility visibility, final Integer metadataId, final String version, + final String resourceId) { + return Base64.getEncoder().encodeToString( + ((type + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(visibility == null ? "" : visibility.toString().toLowerCase()) + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + metadataId + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(version == null ? "" : version) + RESOURCE_MANAGEMENT_EXTERNAL_PROPERTIES_SEPARATOR + + escapeResourceManagementExternalProperties(resourceId)).getBytes())); + } } diff --git a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java index fb0577bc8bde..be110b8af042 100644 --- a/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java +++ b/core/src/main/java/org/fao/geonet/api/records/attachments/FilesystemStore.java @@ -26,6 +26,7 @@ package org.fao.geonet.api.records.attachments; import jeeves.server.context.ServiceContext; +import org.fao.geonet.api.exception.InputStreamLimitExceededException; import org.fao.geonet.api.exception.ResourceAlreadyExistException; import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.constants.Geonet; @@ -202,7 +203,12 @@ public MetadataResource putResource(final ServiceContext context, final String m int metadataId = canEdit(context, metadataUuid, approved); checkResourceId(filename); Path filePath = getPath(context, metadataId, visibility, filename, approved); - Files.copy(is, filePath, StandardCopyOption.REPLACE_EXISTING); + try { + Files.copy(is, filePath, StandardCopyOption.REPLACE_EXISTING); + } catch (InputStreamLimitExceededException e) { + Files.deleteIfExists(filePath); + throw e; + } if (changeDate != null) { IO.touch(filePath, FileTime.from(changeDate.getTime(), TimeUnit.MILLISECONDS)); } @@ -234,20 +240,26 @@ public String delResources(ServiceContext context, int metadataId) throws Except try { Log.info(Geonet.RESOURCES, String.format("Deleting all files from metadataId '%d'", metadataId)); IO.deleteFileOrDirectory(metadataDir, true); - return String.format("Metadata '%s' directory removed.", metadataId); + Log.info(Geonet.RESOURCES, + String.format("Metadata '%d' directory removed.", metadataId)); + return String.format("Metadata '%d' directory removed.", metadataId); } catch (Exception e) { - return String.format("Unable to remove metadata '%s' directory.", metadataId); + return String.format("Unable to remove metadata '%d' directory.", metadataId); } } @Override public String delResource(ServiceContext context, String metadataUuid, String resourceId, Boolean approved) throws Exception { - canEdit(context, metadataUuid, approved); + int metadataId = canEdit(context, metadataUuid, approved); try (ResourceHolder filePath = getResource(context, metadataUuid, resourceId, approved)) { Files.deleteIfExists(filePath.getPath()); - return String.format("MetadataResource '%s' removed.", resourceId); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); + return String.format("Metadata resource '%s' removed.", resourceId); } catch (IOException e) { + Log.warning(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage())); return String.format("Unable to remove resource '%s'.", resourceId); } } @@ -255,12 +267,16 @@ public String delResource(ServiceContext context, String metadataUuid, String re @Override public String delResource(final ServiceContext context, final String metadataUuid, final MetadataResourceVisibility visibility, final String resourceId, Boolean approved) throws Exception { - canEdit(context, metadataUuid, approved); + int metadataId = canEdit(context, metadataUuid, approved); try (ResourceHolder filePath = getResource(context, metadataUuid, visibility, resourceId, approved)) { Files.deleteIfExists(filePath.getPath()); - return String.format("MetadataResource '%s' removed.", resourceId); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); + return String.format("Metadata resource '%s' removed.", resourceId); } catch (IOException e) { + Log.warning(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage())); return String.format("Unable to remove resource '%s'.", resourceId); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/AccessManager.java b/core/src/main/java/org/fao/geonet/kernel/AccessManager.java index 4c8820b24883..3d9e77f356b7 100644 --- a/core/src/main/java/org/fao/geonet/kernel/AccessManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/AccessManager.java @@ -509,6 +509,37 @@ private boolean hasEditingPermissionWithProfile(final ServiceContext context, fi } + /** + * Checks if the user has the specified profile or any profile with greater permissions within the group. + * + * @param context The service context containing the user's session. + * @param profile The profile to be verified. + * @param groupId The ID of the group in which the user's profile is to be verified. + * @return true if the user has the specified profile (or greater) within the group; false otherwise. + */ + public boolean isProfileOrMoreOnGroup(final ServiceContext context, Profile profile, final int groupId) { + UserSession us = context.getUserSession(); + if (!isUserAuthenticated(us)) { + return false; + } + + // Grant access if the user is a global administrator + if (Profile.Administrator == us.getProfile()) { + return true; + } + + // Get the profile and all its parent profiles to consider higher-level permissions + Set acceptedProfiles = profile.getProfileAndAllParents(); + + // Build a specification to fetch any accepted profiles for the user in the specified group + Specification spec = Specification.where(UserGroupSpecs.hasUserId(us.getUserIdAsInt())) + .and(UserGroupSpecs.hasGroupId(groupId)) + .and(UserGroupSpecs.hasProfileIn(acceptedProfiles)); + List userGroups = userGroupRepository.findAll(spec); + + return !userGroups.isEmpty(); + } + public int getPrivilegeId(final String name) { final Operation op = operationRepository.findByName(name); if (op == null) { diff --git a/core/src/main/java/org/fao/geonet/kernel/EditLib.java b/core/src/main/java/org/fao/geonet/kernel/EditLib.java index 873b9c3bcdf9..ce3733fdc85b 100644 --- a/core/src/main/java/org/fao/geonet/kernel/EditLib.java +++ b/core/src/main/java/org/fao/geonet/kernel/EditLib.java @@ -3,7 +3,7 @@ //=== EditLib //=== //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -35,7 +35,6 @@ import java.io.StringReader; import java.util.*; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; @@ -50,6 +49,7 @@ import org.fao.geonet.kernel.schema.MetadataAttribute; import org.fao.geonet.kernel.schema.MetadataSchema; import org.fao.geonet.kernel.schema.MetadataType; +import org.fao.geonet.kernel.schema.MultilingualSchemaPlugin; import org.fao.geonet.kernel.schema.SchemaPlugin; import org.fao.geonet.utils.Xml; import org.jaxen.JaxenException; @@ -223,6 +223,47 @@ public Element addElement(MetadataSchema mdSchema, Element el, String qname) thr return child; } + /** + * Creates an element with a name (qname) as child of an element (el). + * + * If the element to create is multilingual and the related metadata schemas requires to duplicate it, + * one child per metadata language is added. + * + * See {@link org.fao.geonet.kernel.schema.MultilingualSchemaPlugin#duplicateElementsForMultilingual()} + * + * @param mdSchema Metadata schema + * @param el Parent element to add the new element. + * @param qname Name of the new element to add. + * @param languages Languages to add to the new elements. + * @return List of child elements added. For non-multilingual, contains 1 element. + * @throws Exception + */ + public List addElements(MetadataSchema mdSchema, Element el, + String qname, List languages) throws Exception { + + List result = new ArrayList<>(); + + if (mdSchema.getSchemaPlugin() instanceof MultilingualSchemaPlugin) { + MultilingualSchemaPlugin multilingualSchemaPlugin = (MultilingualSchemaPlugin) mdSchema.getSchemaPlugin(); + + if (!languages.isEmpty() && + multilingualSchemaPlugin.duplicateElementsForMultilingual() && + multilingualSchemaPlugin.isMultilingualElementType(mdSchema.getElementType(qname, el.getName()))) { + for(String language : languages) { + Element child = addElement(mdSchema, el, qname); + ((MultilingualSchemaPlugin) mdSchema.getSchemaPlugin()).addTranslationToElement(child, language, ""); + result.add(child); + } + return result; + } + } + + // If no multilingual management is required, process the single element. + result.add(addElement(mdSchema, el, qname)); + + return result; + } + /** * Adds XML fragment to the metadata record in the last element of the type of the element in * its parent. @@ -968,7 +1009,7 @@ public void clearVersion(String id) { //-------------------------------------------------------------------------- private List filterOnQname(List children, String qname) { - Vector result = new Vector(); + Vector result = new Vector<>(); for (Element child : children) { if (child.getQualifiedName().equals(qname)) { result.add(child); @@ -1166,7 +1207,7 @@ public List searchChildren(String chName, Element md, String schema) th // boolean hasContent = false; - Vector holder = new Vector(); + Vector holder = new Vector<>(); MetadataSchema mdSchema = scm.getSchema(schema); String chUQname = getUnqualifiedName(chName); @@ -1222,12 +1263,12 @@ public void expandElements(String schema, Element md) throws Exception { MetadataType thisType = mdSchema.getTypeInfo(typeName); if (thisType.hasContainers) { - Vector holder = new Vector(); + Vector holder = new Vector<>(); for (String chName: thisType.getAlElements()) { if (edit_CHOICE_GROUP_SEQUENCE_in(chName)) { List elems = searchChildren(chName, md, schema); - if (elems.size() > 0) { + if (!elems.isEmpty()) { holder.addAll(elems); } } else { @@ -1246,7 +1287,7 @@ public void expandElements(String schema, Element md) throws Exception { * For each container element - descend and collect children. */ private Vector getContainerChildren(Element md) { - Vector result = new Vector(); + Vector result = new Vector<>(); @SuppressWarnings("unchecked") List chChilds = md.getChildren(); @@ -1268,7 +1309,7 @@ private Vector getContainerChildren(Element md) { public void contractElements(Element md) { //--- contract container children at each level in the XML tree - Vector children = new Vector(); + Vector children = new Vector<>(); @SuppressWarnings("unchecked") List childs = md.getContent(); for (Content obj : childs) { @@ -1276,9 +1317,9 @@ public void contractElements(Element md) { Element mdCh = (Element) obj; String mdName = mdCh.getName(); if (edit_CHOICE_GROUP_SEQUENCE_in(mdName)) { - if (mdCh.getChildren().size() > 0) { + if (!mdCh.getChildren().isEmpty()) { Vector chChilds = getContainerChildren(mdCh); - if (chChilds.size() > 0) { + if (!chChilds.isEmpty()) { children.addAll(chChilds); } } @@ -1525,7 +1566,7 @@ private void insertLast(Element md, String childName, String childNS, Element ch @SuppressWarnings("unchecked") List list = md.getChildren(); - List v = new ArrayList(); + List v = new ArrayList<>(); for (int i = 0; i < list.size(); i++) { Element el = list.get(i); @@ -1606,7 +1647,8 @@ public Element createElement(String schema, Element child, Element parent) throw MetadataSchema mds = scm.getSchema(schema); MetadataType mdt = getType(mds, parent); - int min = -1, max = -1; + int min = -1; + int max = -1; for (int i = 0; i < mdt.getElementCount(); i++) { if (childQName.equals(mdt.getElementAt(i))) { diff --git a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java index 86a0cdca444b..cc5296232bd6 100644 --- a/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java +++ b/core/src/main/java/org/fao/geonet/kernel/GeonetworkDataDirectory.java @@ -27,8 +27,11 @@ import jeeves.server.sources.http.JeevesServlet; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.constants.Geonet; +import org.fao.geonet.exceptions.BadParameterEx; +import org.fao.geonet.utils.FilePathChecker; import org.fao.geonet.utils.IO; import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ConfigurableApplicationContext; @@ -63,6 +66,9 @@ public class GeonetworkDataDirectory { */ public static final String GEONETWORK_BEAN_KEY = "GeonetworkDataDirectory"; + @Autowired + SchemaManager schemaManager; + private Path webappDir; private Path systemDataDir; private Path indexConfigDir; @@ -797,11 +803,18 @@ public Path getXsltConversion(String conversionId) { if (conversionId.startsWith(IMPORT_STYLESHEETS_SCHEMA_PREFIX)) { String[] pathToken = conversionId.split(":"); if (pathToken.length == 3) { + String schema = pathToken[1]; + if (!schemaManager.existsSchema(schema)) { + throw new BadParameterEx(String.format( + "Conversion not found. Schema '%s' is not registered in this catalog.", schema)); + } + FilePathChecker.verify(pathToken[2]); return this.getSchemaPluginsDir() .resolve(pathToken[1]) .resolve(pathToken[2] + ".xsl"); } } else { + FilePathChecker.verify(conversionId); return this.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS). resolve(conversionId + ".xsl"); } diff --git a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java index 4139d045ac53..18742c864948 100644 --- a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java @@ -76,6 +76,7 @@ import java.util.Map; import java.util.Set; import java.util.regex.Pattern; +import java.util.stream.Collectors; /** * Class that handles all functions relating to metadata schemas. This includes @@ -106,6 +107,7 @@ public class SchemaManager { private static int activeWriters = 0; private Map hmSchemas = new HashMap<>(); private Map hmSchemasTypenames = new HashMap<>(); + private Map cswOutputSchemas = new HashMap<>(); private String[] fnames = {"labels.xml", "codelists.xml", "strings.xml"}; private Path schemaPluginsDir; private Path schemaPluginsCat; @@ -958,6 +960,7 @@ private void addSchema(ApplicationContext applicationContext, Path schemaDir, El if (mds.getSchemaPlugin() != null && mds.getSchemaPlugin().getCswTypeNames() != null) { hmSchemasTypenames.putAll(mds.getSchemaPlugin().getCswTypeNames()); + cswOutputSchemas.putAll(mds.getSchemaPlugin().getOutputSchemas()); } // -- add cached xml files (schema codelists and label files) @@ -1925,17 +1928,17 @@ public Map getHmSchemasTypenames() { } /** - * Return the list of namespace URI of all typenames declared in all schema plugins. + * Return the list of outputSchema declared in all schema plugins. + */ + public Map getOutputSchemas() { + return cswOutputSchemas; + } + + /** + * Return the list of namespace URI of all outputSchema declared in all schema plugins. */ public List getListOfOutputSchemaURI() { - Iterator iterator = hmSchemasTypenames.keySet().iterator(); - List listOfSchemaURI = new ArrayList<>(); - while (iterator.hasNext()) { - String typeLocalName = iterator.next(); - Namespace ns = hmSchemasTypenames.get(typeLocalName); - listOfSchemaURI.add(ns.getURI()); - } - return listOfSchemaURI; + return new ArrayList<>(cswOutputSchemas.values()); } /** diff --git a/core/src/main/java/org/fao/geonet/kernel/SelectionManager.java b/core/src/main/java/org/fao/geonet/kernel/SelectionManager.java index a42a9e982e99..230bf390ff6a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/SelectionManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/SelectionManager.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -51,9 +51,9 @@ * Manage objects selection for a user session. */ public class SelectionManager { - public static final String SELECTION_METADATA = "metadata"; - public static final String SELECTION_BUCKET = "bucket"; + // Bucket name used in the search UI to store the selected the metadata + public static final String SELECTION_BUCKET = "s101"; // used to limit select all if get system setting maxrecords fails or contains value we can't parse public static final int DEFAULT_MAXHITS = 1000; public static final String ADD_ALL_SELECTED = "add-all"; @@ -61,20 +61,20 @@ public class SelectionManager { public static final String ADD_SELECTED = "add"; public static final String REMOVE_SELECTED = "remove"; public static final String CLEAR_ADD_SELECTED = "clear-add"; - private Hashtable> selections = null; + private Hashtable> selections; private SelectionManager() { - selections = new Hashtable>(0); + selections = new Hashtable<>(0); Set MDSelection = Collections - .synchronizedSet(new HashSet(0)); + .synchronizedSet(new HashSet<>(0)); selections.put(SELECTION_METADATA, MDSelection); } public Map getSelectionsAndSize() { return selections.entrySet().stream().collect(Collectors.toMap( - e -> e.getKey(), + Map.Entry::getKey, e -> e.getValue().size() )); } @@ -183,7 +183,7 @@ public int updateSelection(String type, // Get the selection manager or create it Set selection = this.getSelection(type); if (selection == null) { - selection = Collections.synchronizedSet(new HashSet()); + selection = Collections.synchronizedSet(new HashSet<>()); this.selections.put(type, selection); } @@ -192,30 +192,21 @@ public int updateSelection(String type, this.selectAll(type, context, session); else if (selected.equals(REMOVE_ALL_SELECTED)) this.close(type); - else if (selected.equals(ADD_SELECTED) && listOfIdentifiers.size() > 0) { + else if (selected.equals(ADD_SELECTED) && !listOfIdentifiers.isEmpty()) { // TODO ? Should we check that the element exist first ? - for (String paramid : listOfIdentifiers) { - selection.add(paramid); - } - } else if (selected.equals(REMOVE_SELECTED) && listOfIdentifiers.size() > 0) { + selection.addAll(listOfIdentifiers); + } else if (selected.equals(REMOVE_SELECTED) && !listOfIdentifiers.isEmpty()) { for (String paramid : listOfIdentifiers) { selection.remove(paramid); } - } else if (selected.equals(CLEAR_ADD_SELECTED) && listOfIdentifiers.size() > 0) { + } else if (selected.equals(CLEAR_ADD_SELECTED) && !listOfIdentifiers.isEmpty()) { this.close(type); - for (String paramid : listOfIdentifiers) { - selection.add(paramid); - } + selection.addAll(listOfIdentifiers); } } // Remove empty/null element from the selection - Iterator iter = selection.iterator(); - while (iter.hasNext()) { - Object element = iter.next(); - if (element == null) - iter.remove(); - } + selection.removeIf(Objects::isNull); return selection.size(); } @@ -241,14 +232,12 @@ public void selectAll(String type, ServiceContext context, UserSession session) if (StringUtils.isNotEmpty(type)) { JsonNode request = (JsonNode) session.getProperty(Geonet.Session.SEARCH_REQUEST + type); - if (request == null) { - return; - } else { + if (request != null) { final SearchResponse searchResponse; try { EsSearchManager searchManager = context.getBean(EsSearchManager.class); searchResponse = searchManager.query(request.get("query"), FIELDLIST_UUID, 0, maxhits); - List uuidList = new ArrayList(); + List uuidList = new ArrayList<>(); ObjectMapper objectMapper = new ObjectMapper(); for (Hit h : (List) searchResponse.hits().hits()) { uuidList.add((String) objectMapper.convertValue(h.source(), Map.class).get(Geonet.IndexFieldNames.UUID)); @@ -293,7 +282,7 @@ public Set getSelection(String type) { Set sel = selections.get(type); if (sel == null) { Set MDSelection = Collections - .synchronizedSet(new HashSet(0)); + .synchronizedSet(new HashSet<>(0)); selections.put(type, MDSelection); } return selections.get(type); diff --git a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java index efaeaf60a890..a9f2d57230fa 100644 --- a/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java +++ b/core/src/main/java/org/fao/geonet/kernel/Thesaurus.java @@ -121,7 +121,7 @@ public class Thesaurus { // map of lang -> dictionary of values // key is a dublinCore element (i.e. https://guides.library.ucsc.edu/c.php?g=618773&p=4306386) // see #retrieveDublinCore() for example - private Map> dublinCoreMultilingual = new Hashtable<>(); + private Map> dublinCoreMultilingual = new Hashtable<>(); private Cache THESAURUS_SEARCH_CACHE; @@ -133,14 +133,15 @@ protected Thesaurus() { } /** - * @param fname file name - * @param dname category/domain name of thesaurus + * @param fname file name + * @param dname category/domain name of thesaurus * @param thesaurusCacheMaxSize */ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, String type, String dname, Path thesaurusFile, String siteUrl, int thesaurusCacheMaxSize) { this(isoLanguageMapper, fname, null, null, type, dname, thesaurusFile, siteUrl, false, thesaurusCacheMaxSize); } + public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, String tname, String tnamespace, String type, String dname, Path thesaurusFile, String siteUrl, boolean ignoreMissingError, int thesaurusCacheMaxSize) { this(isoLanguageMapper, fname, null, null, null, type, dname, thesaurusFile, siteUrl, false, thesaurusCacheMaxSize); } @@ -152,9 +153,9 @@ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, super(); THESAURUS_SEARCH_CACHE = CacheBuilder.newBuilder() - .maximumSize(thesaurusCacheMaxSize) - .expireAfterAccess(25, TimeUnit.HOURS) - .build(); + .maximumSize(thesaurusCacheMaxSize) + .expireAfterAccess(25, TimeUnit.HOURS) + .build(); this.isoLanguageMapper = isoLanguageMapper; this.fname = fname; @@ -193,7 +194,6 @@ public Thesaurus(IsoLanguagesMapper isoLanguageMapper, String fname, } /** - * * @param fname * @param type * @param dname @@ -211,7 +211,7 @@ public Map getMultilingualTitles() { return Collections.unmodifiableMap(this.multilingualTitles); } - public Map> getDublinCoreMultilingual() { + public Map> getDublinCoreMultilingual() { return Collections.unmodifiableMap(this.dublinCoreMultilingual); } @@ -332,7 +332,7 @@ public synchronized Thesaurus initRepository() throws ConfigurationException, IO SailConfig syncSail = new SailConfig("org.openrdf.sesame.sailimpl.sync.SyncRdfSchemaRepository"); SailConfig memSail = new org.openrdf.sesame.sailimpl.memory.RdfSchemaRepositoryConfig(getFile().toString(), - RDFFormat.RDFXML); + RDFFormat.RDFXML); repConfig.addSail(syncSail); repConfig.addSail(memSail); repConfig.setWorldReadable(true); @@ -344,7 +344,7 @@ public synchronized Thesaurus initRepository() throws ConfigurationException, IO } public synchronized QueryResultsTable performRequest(String query) throws IOException, MalformedQueryException, - QueryEvaluationException, AccessDeniedException { + QueryEvaluationException, AccessDeniedException { if (Log.isDebugEnabled(Geonet.THESAURUS)) Log.debug(Geonet.THESAURUS, "Query : " + query); @@ -354,15 +354,15 @@ public synchronized QueryResultsTable performRequest(String query) throws IOExce public boolean hasConceptScheme(String uri) { String query = "SELECT conceptScheme" - + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" - + " WHERE conceptScheme = <" + uri + ">" - + " USING NAMESPACE skos = "; + + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" + + " WHERE conceptScheme = <" + uri + ">" + + " USING NAMESPACE skos = "; try { return performRequest(query).getRowCount() > 0; } catch (Exception e) { Log.error(Geonet.THESAURUS_MAN, - String.format("Error retrieving concept scheme for %s. Error is: %s", thesaurusFile, e.getMessage())); + String.format("Error retrieving concept scheme for %s. Error is: %s", thesaurusFile, e.getMessage())); throw new RuntimeException(e); } } @@ -370,8 +370,8 @@ public boolean hasConceptScheme(String uri) { public List getConceptSchemes() { String query = "SELECT conceptScheme" - + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" - + " USING NAMESPACE skos = "; + + " FROM {conceptScheme} rdf:type {skos:ConceptScheme}" + + " USING NAMESPACE skos = "; try { List ret = new ArrayList<>(); @@ -383,7 +383,7 @@ public List getConceptSchemes() { return ret; } catch (Exception e) { Log.error(Geonet.THESAURUS_MAN, String.format( - "Error retrieving concept schemes for %s. Error is: %s", thesaurusFile, e.getMessage())); + "Error retrieving concept schemes for %s. Error is: %s", thesaurusFile, e.getMessage())); return Collections.emptyList(); } } @@ -406,34 +406,28 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce URI mySubject = myFactory.createURI(keyword.getUriCode()); URI skosClass = myFactory.createURI(SKOS_NAMESPACE, "Concept"); + URI rdfType = myFactory.createURI(org.openrdf.vocabulary.RDF.TYPE); + mySubject.addProperty(rdfType, skosClass); + URI predicatePrefLabel = myFactory - .createURI(SKOS_NAMESPACE, "prefLabel"); + .createURI(SKOS_NAMESPACE, "prefLabel"); URI predicateScopeNote = myFactory - .createURI(SKOS_NAMESPACE, "scopeNote"); - - URI predicateBoundedBy = myFactory.createURI(namespaceGml, "BoundedBy"); - URI predicateEnvelope = myFactory.createURI(namespaceGml, "Envelope"); - URI predicateSrsName = myFactory.createURI(namespaceGml, "srsName"); - URI srsNameURI = myFactory - .createURI("http://www.opengis.net/gml/srs/epsg.xml#epsg:4326"); - BNode gmlNode = myFactory.createBNode(); - URI predicateLowerCorner = myFactory.createURI(namespaceGml, - "lowerCorner"); - URI predicateUpperCorner = myFactory.createURI(namespaceGml, - "upperCorner"); - - Literal lowerCorner = myFactory.createLiteral(keyword.getCoordWest() + " " + keyword.getCoordSouth()); - Literal upperCorner = myFactory.createLiteral(keyword.getCoordEast() + " " + keyword.getCoordNorth()); + .createURI(SKOS_NAMESPACE, "scopeNote"); + + URI predicateInScheme = myFactory + .createURI(SKOS_NAMESPACE, "inScheme"); + myGraph.add(mySubject, + predicateInScheme, + myFactory.createURI(this.getDefaultNamespace())); - mySubject.addProperty(rdfType, skosClass); Set> values = keyword.getValues().entrySet(); for (Entry entry : values) { String language = toiso639_1_Lang(entry.getKey()); Value valueObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(mySubject, predicatePrefLabel, valueObj); - } + Set> definitions = keyword.getDefinitions().entrySet(); for (Entry entry : definitions) { String language = toiso639_1_Lang(entry.getKey()); @@ -441,12 +435,29 @@ public synchronized URI addElement(KeywordBean keyword) throws IOException, Acce myGraph.add(mySubject, predicateScopeNote, definitionObj); } - myGraph.add(mySubject, predicateBoundedBy, gmlNode); - gmlNode.addProperty(rdfType, predicateEnvelope); - myGraph.add(gmlNode, predicateLowerCorner, lowerCorner); - myGraph.add(gmlNode, predicateUpperCorner, upperCorner); - myGraph.add(gmlNode, predicateSrsName, srsNameURI); + if (!(keyword.getCoordEast() + keyword.getCoordNorth() + keyword.getCoordWest() + keyword.getCoordSouth()).trim().isEmpty()) { + URI predicateBoundedBy = myFactory.createURI(namespaceGml, "BoundedBy"); + URI predicateEnvelope = myFactory.createURI(namespaceGml, "Envelope"); + URI predicateSrsName = myFactory.createURI(namespaceGml, "srsName"); + URI srsNameURI = myFactory + .createURI("http://www.opengis.net/gml/srs/epsg.xml#epsg:4326"); + BNode gmlNode = myFactory.createBNode(); + URI predicateLowerCorner = myFactory.createURI(namespaceGml, + "lowerCorner"); + URI predicateUpperCorner = myFactory.createURI(namespaceGml, + "upperCorner"); + + Literal lowerCorner = myFactory.createLiteral(keyword.getCoordWest() + " " + keyword.getCoordSouth()); + Literal upperCorner = myFactory.createLiteral(keyword.getCoordEast() + " " + keyword.getCoordNorth()); + + myGraph.add(mySubject, predicateBoundedBy, gmlNode); + + gmlNode.addProperty(rdfType, predicateEnvelope); + myGraph.add(gmlNode, predicateLowerCorner, lowerCorner); + myGraph.add(gmlNode, predicateUpperCorner, upperCorner); + myGraph.add(gmlNode, predicateSrsName, srsNameURI); + } repository.addGraph(myGraph); return mySubject; @@ -485,7 +496,7 @@ public synchronized Thesaurus removeElement(String uri) throws AccessDeniedExcep } private Thesaurus removeElement(Graph myGraph, URI subject) - throws AccessDeniedException { + throws AccessDeniedException { StatementIterator iter = myGraph.getStatements(subject, null, null); while (iter.hasNext()) { AtomicReference st = new AtomicReference(iter.next()); @@ -504,8 +515,8 @@ private Thesaurus removeElement(Graph myGraph, URI subject) private String toiso639_1_Lang(String lang) { String defaultCode = getIsoLanguageMapper().iso639_2_to_iso639_1( - Geonet.DEFAULT_LANGUAGE, - Geonet.DEFAULT_LANGUAGE.substring(0, 2)); + Geonet.DEFAULT_LANGUAGE, + Geonet.DEFAULT_LANGUAGE.substring(0, 2)); return getIsoLanguageMapper().iso639_2_to_iso639_1(lang, defaultCode); } @@ -548,15 +559,14 @@ public synchronized URI updateElement(KeywordBean keyword, boolean replace) thro String language = toiso639_1_Lang(entry.getKey()); Value valueObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(subject, predicatePrefLabel, valueObj); - } + // add updated Definitions/Notes Set> definitions = keyword.getDefinitions().entrySet(); for (Entry entry : definitions) { String language = toiso639_1_Lang(entry.getKey()); Value definitionObj = myFactory.createLiteral(entry.getValue(), language); myGraph.add(subject, predicateScopeNote, definitionObj); - } // update bbox @@ -677,7 +687,7 @@ public synchronized Thesaurus updateCode(String namespace, String oldcode, Strin /** * Update concept code using its URI. This is recommended when concept identifier may not be * based on thesaurus namespace and does not contains #. - * + *

* eg. http://vocab.nerc.ac.uk/collection/P07/current/CFV13N44/ */ public synchronized Thesaurus updateCodeByURI(String olduri, String newuri) throws AccessDeniedException { @@ -729,13 +739,13 @@ public void createConceptScheme(String thesaurusTitle, Graph myGraph = new org.openrdf.model.impl.GraphImpl(); writeConceptScheme(myGraph, - thesaurusTitle, - multilingualTitles, - thesaurusDescription, - multilingualDescriptions, - identifier, - type, - namespace); + thesaurusTitle, + multilingualTitles, + thesaurusDescription, + multilingualDescriptions, + identifier, + type, + namespace); repository.addGraph(myGraph); } @@ -755,13 +765,13 @@ public void updateConceptScheme(String thesaurusTitle, removeElement(getConceptSchemes().get(0)); writeConceptScheme(myGraph, - thesaurusTitle, - multilingualTitles, - thesaurusDescription, - multilingualDescriptions, - identifier, - type, - namespace); + thesaurusTitle, + multilingualTitles, + thesaurusDescription, + multilingualDescriptions, + identifier, + type, + namespace); } public void writeConceptScheme(Graph myGraph, String thesaurusTitle, @@ -823,9 +833,6 @@ public void writeConceptScheme(Graph myGraph, String thesaurusTitle, } - - - private void addElement(String name, String value, Graph myGraph, ValueFactory myFactory, URI mySubject) { if (StringUtils.isNotEmpty(value)) { URI uri = myFactory.createURI(DC_NAMESPACE, name); @@ -861,22 +868,22 @@ private void addElement(String name, String value, Graph myGraph, ValueFactory m private void retrieveDublinCore(Element thesaurusEl) { List theNSs = getThesaurusNamespaces(); - Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace"); + Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace"); try { List multiLingualTitles = (List) Xml.selectNodes(thesaurusEl, - "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs); + "skos:ConceptScheme/dc:*[@xml:lang]|skos:ConceptScheme/dcterms:*[@xml:lang]", theNSs); dublinCoreMultilingual.clear(); - for (Element el: multiLingualTitles) { + for (Element el : multiLingualTitles) { String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue()); String value = el.getTextTrim(); String name = el.getName(); if (!dublinCoreMultilingual.containsKey(lang)) { - dublinCoreMultilingual.put(lang,new HashMap<>()); + dublinCoreMultilingual.put(lang, new HashMap<>()); } - dublinCoreMultilingual.get(lang).put(name,value); + dublinCoreMultilingual.get(lang).put(name, value); } } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual dublin core items from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual dublin core items from thesaurus", e); } } @@ -896,14 +903,14 @@ private void retrieveDublinCore(Element thesaurusEl) { private void retrieveMultiLingualTitles(Element thesaurusEl) { try { String xpathTitles = "skos:ConceptScheme/dc:title[@xml:lang]" + - "|skos:ConceptScheme/dcterms:title[@xml:lang]" + - "|skos:ConceptScheme/rdfs:label[@xml:lang]" + - "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" + - "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; + "|skos:ConceptScheme/dcterms:title[@xml:lang]" + + "|skos:ConceptScheme/rdfs:label[@xml:lang]" + + "|skos:ConceptScheme/skos:prefLabel[@xml:lang]" + + "|rdf:Description[rdf:type/@rdf:resource = 'http://www.w3.org/2004/02/skos/core#ConceptScheme']/dc:title[@xml:lang]"; multilingualTitles.clear(); multilingualTitles.putAll(retrieveMultilingualField(thesaurusEl, xpathTitles)); } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual titles from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual titles from thesaurus", e); } } @@ -913,19 +920,19 @@ private void retrieveMultiLingualDescriptions(Element thesaurusEl) { multilingualDescriptions.clear(); multilingualDescriptions.putAll(retrieveMultilingualField(thesaurusEl, xpathDescriptions)); } catch (Exception e) { - Log.warning(Geonet.THESAURUS,"error extracting multilingual descriptions from thesaurus",e); + Log.warning(Geonet.THESAURUS, "error extracting multilingual descriptions from thesaurus", e); } } private Map retrieveMultilingualField(Element thesaurusEl, String xpath) throws JDOMException { List theNSs = getThesaurusNamespaces(); - Namespace xmlNS = Namespace.getNamespace("xml","http://www.w3.org/XML/1998/namespace"); + Namespace xmlNS = Namespace.getNamespace("xml", "http://www.w3.org/XML/1998/namespace"); Map multilingualValues = new HashMap<>(); List multilingualValuesEl = (List) Xml.selectNodes(thesaurusEl, - xpath, theNSs); - for (Element el: multilingualValuesEl) { + xpath, theNSs); + for (Element el : multilingualValuesEl) { String lang = isoLanguageMapper.iso639_2_to_iso639_1(el.getAttribute("lang", xmlNS).getValue()); String titleValue = el.getTextTrim(); multilingualValues.put(lang, titleValue); @@ -936,7 +943,7 @@ private Map retrieveMultilingualField(Element thesaurusEl, Strin /** * Retrieves the thesaurus information from rdf file. - * + *

* Used to set the thesaurusName and thesaurusDate for keywords. */ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitle, boolean ignoreMissingError) { @@ -956,25 +963,25 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl retrieveDublinCore(thesaurusEl); Element titleEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" + - "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" + - "|skos:Collection/dc:title|skos:Collection/dcterms:title" + - "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); + "skos:ConceptScheme/dc:title|skos:ConceptScheme/dcterms:title" + + "|skos:ConceptScheme/rdfs:label|skos:ConceptScheme/skos:prefLabel" + + "|skos:Collection/dc:title|skos:Collection/dcterms:title" + + "|rdf:Description/dc:title|rdf:Description/dcterms:title", theNSs); if (titleEl != null) { this.title = titleEl.getValue(); this.defaultNamespace = titleEl - .getParentElement() - .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE)); + .getParentElement() + .getAttributeValue("about", Namespace.getNamespace("rdf", RDF_NAMESPACE)); } else { this.title = defaultTitle; this.defaultNamespace = DEFAULT_THESAURUS_NAMESPACE; } Element descriptionEl = Xml.selectElement(thesaurusEl, - "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" + - "skos:Collection/dc:description|skos:Collection/dcterms:description|" + - "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs); + "skos:ConceptScheme/dc:description|skos:ConceptScheme/dcterms:description|" + + "skos:Collection/dc:description|skos:Collection/dcterms:description|" + + "rdf:Description/dc:description|rdf:Description/dcterms:description", theNSs); this.description = descriptionEl != null ? descriptionEl.getValue() : ""; @@ -987,13 +994,13 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl } Element issuedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued", theNSs); - this.issuedDate = issuedDateEl==null? "": issuedDateEl.getText(); + this.issuedDate = issuedDateEl == null ? "" : issuedDateEl.getText(); Element modifiedDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:modified", theNSs); - this.modifiedDate = modifiedDateEl==null? "": modifiedDateEl.getText(); + this.modifiedDate = modifiedDateEl == null ? "" : modifiedDateEl.getText(); Element createdDateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:created", theNSs); - this.createdDate = createdDateEl==null? "": createdDateEl.getText(); + this.createdDate = createdDateEl == null ? "" : createdDateEl.getText(); // Default date Element dateEl = Xml.selectElement(thesaurusEl, "skos:ConceptScheme/dcterms:issued|skos:Collection/dc:date", theNSs); @@ -1031,12 +1038,12 @@ private void retrieveThesaurusInformation(Path thesaurusFile, String defaultTitl if (Log.isDebugEnabled(Geonet.THESAURUS_MAN)) { Log.debug(Geonet.THESAURUS_MAN, String.format( - "Thesaurus information: %s (%s)", this.title, this.date)); + "Thesaurus information: %s (%s)", this.title, this.date)); } } catch (Exception ex) { if (!ignoreMissingError) Log.error(Geonet.THESAURUS_MAN, String.format( - "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage())); + "Error getting thesaurus info for %s. Error is: %s", thesaurusFile, ex.getMessage())); } } @@ -1064,12 +1071,11 @@ private Date parseThesaurusDate(Element dateEl) { StringBuffer errorMsg = new StringBuffer("Error parsing the thesaurus date value: "); errorMsg.append(dateVal); - boolean success = false; for (SimpleDateFormat df : dfList) { try { thesaurusDate = df.parse(dateVal); - success = true; + return thesaurusDate; } catch (Exception ex) { // Ignore the exception and try next format errorMsg.append("\n * with format: "); @@ -1079,11 +1085,9 @@ private Date parseThesaurusDate(Element dateEl) { } } // Report error if no success - if (!success) { - errorMsg.append("\nCheck thesaurus date in "); - errorMsg.append(this.fname); - Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); - } + errorMsg.append("\nCheck thesaurus date in "); + errorMsg.append(this.fname); + Log.error(Geonet.THESAURUS_MAN, errorMsg.toString()); return thesaurusDate; } @@ -1140,9 +1144,9 @@ public KeywordBean getKeyword(String uri, String... languages) { try { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .where(Wheres.ID(uri)) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .where(Wheres.ID(uri)) + .build(); keywords = query.execute(this); } catch (Exception e) { @@ -1168,9 +1172,9 @@ public List getTopConcepts(String... languages) { try { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .select(Selectors.TOPCONCEPTS, true) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .select(Selectors.TOPCONCEPTS, true) + .build(); keywords = query.execute(this); } catch (Exception e) { @@ -1238,9 +1242,9 @@ public boolean hasBroader(String uri) { */ public List getRelated(String uri, KeywordRelation request, String... languages) { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), languages) - .select(Selectors.related(uri, request), true) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), languages) + .select(Selectors.related(uri, request), true) + .build(); try { return query.execute(this); @@ -1275,9 +1279,9 @@ public boolean hasKeywordWithLabel(String label, String langCode) { */ public KeywordBean getKeywordWithLabel(String label, String langCode) { Query query = QueryBuilder - .keywordQueryBuilder(getIsoLanguageMapper(), langCode) - .where(Wheres.prefLabel(langCode, label)) - .build(); + .keywordQueryBuilder(getIsoLanguageMapper(), langCode) + .where(Wheres.prefLabel(langCode, label)) + .build(); List matchingKeywords; @@ -1307,7 +1311,7 @@ public Map getTitles(ApplicationContext context) throws JDOMExce return LangUtils.translate(context, getKey()); } - public List getKeywordHierarchy(String keywordLabel, String langCode) { + public List getKeywordHierarchy(String keywordLabel, String langCode) { String cacheKey = "getKeywordHierarchy" + keywordLabel + langCode; Object cacheValue = THESAURUS_SEARCH_CACHE.getIfPresent(cacheKey); if (cacheValue != null) { @@ -1315,26 +1319,26 @@ public List getKeywordHierarchy(String keywordLabel, String langCode) { } boolean isUri = keywordLabel.startsWith("http"); KeywordBean term = - isUri - ? this.getKeyword(keywordLabel, langCode) - : this.getKeywordWithLabel(keywordLabel, langCode); + isUri + ? this.getKeyword(keywordLabel, langCode) + : this.getKeywordWithLabel(keywordLabel, langCode); - List> result = this.classify(term, langCode); + List> result = this.classify(term, langCode); - List hierarchies = new ArrayList<>(); - for ( List hierachy : result) { + List hierarchies = new ArrayList<>(); + for (List hierachy : result) { String path = hierachy.stream() - .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode)) - .collect(Collectors.joining("^")); + .map(k -> isUri ? k.getUriCode() : k.getPreferredLabel(langCode)) + .collect(Collectors.joining("^")); hierarchies.add(path); } THESAURUS_SEARCH_CACHE.put(cacheKey, hierarchies); return hierarchies; } - public List> classify(KeywordBean term, String langCode) { + public List> classify(KeywordBean term, String langCode) { - List> result = new ArrayList<>(); + List> result = new ArrayList<>(); if (this.hasBroader(term.getUriCode())) { result.addAll(classifyTermWithBroaderTerms(term, langCode)); } else { @@ -1343,16 +1347,16 @@ public List> classify(KeywordBean term, String langCode) return result; } - private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) { - List> result = new ArrayList<>(); - for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) { + private List> classifyTermWithBroaderTerms(KeywordBean term, String langCode) { + List> result = new ArrayList<>(); + for (ArrayList stringToBroaderTerm : classifyBroaderTerms(term, langCode)) { stringToBroaderTerm.add(term); result.add(stringToBroaderTerm); } return result; } - private List> classifyBroaderTerms(KeywordBean term, String langCode) { + private List> classifyBroaderTerms(KeywordBean term, String langCode) { List> result = new ArrayList<>(); List narrowerList = this.getNarrower(term.getUriCode(), langCode); for (KeywordBean broaderTerm : this.getBroader(term.getUriCode(), langCode)) { @@ -1364,8 +1368,8 @@ private List> classifyBroaderTerms(KeywordBean term, Str return result; } - private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) { - ArrayList list = new ArrayList <>(); + private ArrayList classifyTermWithNoBroaderTerms(KeywordBean term) { + ArrayList list = new ArrayList<>(); list.add(term); return list; } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java index b2411bb2ca82..46dc76779737 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataOperations.java @@ -24,6 +24,7 @@ package org.fao.geonet.kernel.datamanager; import java.util.Collection; +import java.util.List; import org.fao.geonet.domain.OperationAllowed; import org.fao.geonet.domain.ReservedOperation; @@ -52,6 +53,15 @@ public interface IMetadataOperations { */ void deleteMetadataOper(String metadataId, boolean skipAllReservedGroup) throws Exception; + /** + * Removes all operations stored for a metadata except for the operations of the groups in the exclude list. + * Used for preventing deletion of operations for reserved and restricted groups. + * + * @param metadataId Metadata identifier + * @param groupIdsToExclude List of group ids to exclude from deletion + */ + void deleteMetadataOper(String metadataId, List groupIdsToExclude); + /** * Adds a permission to a group. Metadata is not reindexed. */ diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java index 63c3dadb5591..42b146682a22 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataUtils.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2011 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -549,4 +549,40 @@ void setCreativeCommons(ServiceContext context, String id, String licenseurl, St * @param dest */ void replaceFiles(AbstractMetadata original, AbstractMetadata dest); + + /** +<<<<<<< HEAD + * Get the metadata after preforming a search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ + String selectOneWithSearchAndReplace(String uuid, String search, String replace); + + /** + * Get the metadata after preforming a regex search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ + String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags); + + /** + * Get the metadata after preforming a regex search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ + String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace); + + /** + * Checks if the metadata is available in the current portal. + * + * @param id + * @return + */ + boolean isMetadataAvailableInPortal(int id); } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java index f2f159c029c4..7464a2677352 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataManager.java @@ -582,7 +582,11 @@ public AbstractMetadata insertMetadata(ServiceContext context, AbstractMetadata // Check if the schema is allowed by settings String mdImportSetting = settingManager.getValue(Settings.METADATA_IMPORT_RESTRICT); - if (mdImportSetting != null && !mdImportSetting.equals("")) { + if (mdImportSetting != null) { + // Remove spaces from the list so that "iso19115-3.2018, dublin-core" will also work + mdImportSetting = mdImportSetting.replace(" ", ""); + } + if (!StringUtils.isBlank(mdImportSetting)) { if (!newMetadata.getHarvestInfo().isHarvested() && !Arrays.asList(mdImportSetting.split(",")).contains(schema)) { throw new IllegalArgumentException("The system setting '" + Settings.METADATA_IMPORT_RESTRICT + "' doesn't allow to import " + schema diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java index 47cfe3b53426..02a89fb23941 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataOperations.java @@ -116,6 +116,23 @@ public void deleteMetadataOper(String metadataId, boolean skipAllReservedGroup) } } + /** + * Removes all operations stored for a metadata except for the operations of the groups in the exclude list. + * Used for preventing deletion of operations for reserved and restricted groups. + * If groupIdsToExclude is null or empty, all operations are deleted. + * + * @param metadataId Metadata identifier + * @param groupIdsToExclude List of group ids to exclude from deletion + */ + @Override + public void deleteMetadataOper(String metadataId, List groupIdsToExclude) { + if (groupIdsToExclude == null || groupIdsToExclude.isEmpty()) { + opAllowedRepo.deleteAllByMetadataId(Integer.parseInt(metadataId)); + } else { + opAllowedRepo.deleteAllByMetadataIdExceptGroupId(Integer.parseInt(metadataId), groupIdsToExclude); + } + } + /** * Adds a permission to a group. Metadata is not reindexed. */ diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java index ad4dde213b40..0c625fd186dd 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataUtils.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2011 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -23,6 +23,9 @@ package org.fao.geonet.kernel.datamanager.base; +import co.elastic.clients.elasticsearch.core.search.TotalHits; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import jeeves.server.UserSession; import jeeves.server.context.ServiceContext; @@ -40,6 +43,7 @@ import org.fao.geonet.kernel.datamanager.*; import org.fao.geonet.kernel.schema.MetadataSchema; import org.fao.geonet.kernel.schema.SavedQuery; +import org.fao.geonet.kernel.search.EsFilterBuilder; import org.fao.geonet.kernel.search.EsSearchManager; import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.kernel.search.index.IndexingList; @@ -115,6 +119,9 @@ public class BaseMetadataUtils implements IMetadataUtils { protected IMetadataManager metadataManager; + @Autowired + private NodeInfo nodeInfo; + @Override public void setMetadataManager(IMetadataManager metadataManager) { this.metadataManager = metadataManager; @@ -483,7 +490,7 @@ String getMetadataId(@Nonnull String uuid) throws Exception { @Override public List findAllIdsBy(Specification specs) { try { - return metadataRepository.findIdsBy((Specification) specs); + return metadataRepository.findIdsBy(specs); } catch (ClassCastException t) { // Maybe it is not a Specification } @@ -1012,7 +1019,7 @@ public Page> findAllIdsAndChangeDates(Pageable pageable) @Override public Map findAllSourceInfo(Specification spec) { try { - return metadataRepository.findSourceInfo((Specification) spec); + return metadataRepository.findSourceInfo(spec); } catch (Throwable t) { // Maybe it is not a Specification } @@ -1028,4 +1035,52 @@ public void cloneFiles(AbstractMetadata original, AbstractMetadata dest) { public void replaceFiles(AbstractMetadata original, AbstractMetadata dest) { // Empty implementation for non-draft mode as not used } + + @Override + public String selectOneWithSearchAndReplace(String uuid, String search, String replace) { + return metadataRepository.selectOneWithSearchAndReplace(uuid, search, replace); + } + + @Override + public String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags) { + return metadataRepository.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags); + } + + @Override + public String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace) { + return metadataRepository.selectOneWithRegexSearchAndReplace(uuid, search, replace); + } + + public boolean isMetadataAvailableInPortal(int id) { + // Check if the metadata is available in the portal + String elasticSearchQuery = "{ \"bool\": {\n" + + " \"must\": [\n" + + " {" + + " \"term\": {" + + " \"id\": {" + + " \"value\": \"%s\"" + + " }" + + " }" + + " } " + + " ]%s}}"; + + String portalFilter = " ,\"filter\":{\"query_string\":{\"query\":\"%s\"}}"; + + JsonNode esJsonQuery; + + try { + String filterQueryString = EsFilterBuilder.buildPortalFilter(nodeInfo); + String jsonQueryFilter = StringUtils.isNotEmpty(filterQueryString) ? String.format(portalFilter, filterQueryString): ""; + String jsonQuery = String.format(elasticSearchQuery, id, jsonQueryFilter); + + ObjectMapper objectMapper = new ObjectMapper(); + esJsonQuery = objectMapper.readTree(jsonQuery); + + TotalHits total = searchManager.query(esJsonQuery, new HashSet<>(), 0, 0).hits().total(); + + return (java.util.Optional.ofNullable(total).map(TotalHits::value).orElse(0L) > 0); + } catch (Exception e) { + throw new RuntimeException(e); + } + } } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java index c5cc81ad1da2..39b163ded5b7 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java @@ -677,6 +677,7 @@ private void cloneStoreFileUploadRequests(AbstractMetadata original, AbstractMet metadataFileUpload.setFileSize(mfu.getFileSize()); metadataFileUpload.setUploadDate(mfu.getUploadDate()); metadataFileUpload.setUserName(mfu.getUserName()); + metadataFileUpload.setDeletedDate(mfu.getDeletedDate()); repo.save(metadataFileUpload); } @@ -689,4 +690,31 @@ public void setListOfStatusCreatingDraft(Set listOfStatusCreatingDraft) public Set getListOfStatusCreatingDraft() { return listOfStatusToTriggerDraftCreation; } + + @Override + public String selectOneWithSearchAndReplace(String uuid, String search, String replace) { + String updatedXml = metadataDraftRepository.selectOneWithSearchAndReplace(uuid, search, replace); + if (updatedXml == null) { + updatedXml = super.selectOneWithSearchAndReplace(uuid, search, replace); + } + return updatedXml; + } + + @Override + public String selectOneWithRegexSearchAndReplaceWithFlags(String uuid, String search, String replace, String flags) { + String updatedXml = metadataDraftRepository.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags); + if (updatedXml == null) { + updatedXml = super.selectOneWithRegexSearchAndReplaceWithFlags(uuid, search, replace, flags); + } + return updatedXml; + } + + @Override + public String selectOneWithRegexSearchAndReplace(String uuid, String search, String replace) { + String updatedXml = metadataDraftRepository.selectOneWithRegexSearchAndReplace(uuid, search, replace); + if (updatedXml == null) { + updatedXml = super.selectOneWithRegexSearchAndReplace(uuid, search, replace); + } + return updatedXml; + } } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java index cdb7a8bf8f73..8c0c0ca2b33f 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -48,6 +48,8 @@ import org.springframework.context.ApplicationContext; import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_FEEDBACK_EMAIL; import static org.fao.geonet.util.LocalizedEmailComponent.ComponentType.*; @@ -330,22 +332,24 @@ protected void notify(List userToNotify, MetadataStatus status) throws Exc ); } - LocalizedEmail localizedEmail = new LocalizedEmail(false); - localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent); - - String subject = localizedEmail.getParsedSubject(feedbackLocales); - for (User user : userToNotify) { + LocalizedEmail localizedEmail = new LocalizedEmail(false); + String userName = Joiner.on(" ").skipNulls().join(user.getName(), user.getSurname()); //If we have a userName add the salutation String message; if (StringUtils.isEmpty(userName)) { + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent); + message = localizedEmail.getParsedMessage(feedbackLocales); } else { + localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent, emailSalutationComponent); + Map replacements = new HashMap<>(); replacements.put("{{userName}}", userName); message = localizedEmail.getParsedMessage(feedbackLocales, replacements); } + String subject = localizedEmail.getParsedSubject(feedbackLocales); sendEmail(user.getEmail(), subject, message); } } @@ -369,6 +373,25 @@ protected List getUserToNotify(MetadataStatus status) { return new ArrayList<>(); } + // If status is DRAFT and previous status is SUBMITTED, which means either: + // - a cancel working copy (from editor) --> should be notified the reviewer. + // - rejection (from reviewer) --> should be notified the editor. + // and the notification level is recordUserAuthor or recordProfileReviewer, + // then adjust the notification level, depending on the user role + if ((status.getStatusValue().getId() == Integer.parseInt(StatusValue.Status.DRAFT)) && + (!StringUtils.isEmpty(status.getPreviousState()) && + (status.getPreviousState().equals(StatusValue.Status.SUBMITTED))) && + (notificationLevel.equals(StatusValueNotificationLevel.recordUserAuthor) || (notificationLevel.equals(StatusValueNotificationLevel.recordProfileReviewer)))) { + UserRepository userRepository = ApplicationContextHolder.get().getBean(UserRepository.class); + Optional user = userRepository.findById(status.getUserId()); + if (user.isPresent()) { + if (user.get().getProfile() == Profile.Editor) { + notificationLevel = StatusValueNotificationLevel.recordProfileReviewer; + } else { + notificationLevel = StatusValueNotificationLevel.recordUserAuthor; + } + } + } // TODO: Status does not provide batch update // So taking care of one record at a time. // Currently the code could notify a mix of reviewers @@ -430,7 +453,9 @@ public static List getUserToNotify(StatusValueNotificationLevel notificati } } } - return users; + + // Filter out users without email + return users.stream().filter(u -> StringUtils.isNotEmpty(u.getEmail())).collect(Collectors.toList()); } public static List getGroupToNotify(StatusValueNotificationLevel notificationLevel, List groupNames) { diff --git a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java index bfd783bc5f4e..e85e79119f34 100644 --- a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2025 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -29,8 +29,8 @@ import co.elastic.clients.elasticsearch.core.bulk.BulkOperation; import co.elastic.clients.elasticsearch.core.bulk.UpdateOperation; import co.elastic.clients.elasticsearch.core.search.Hit; -import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.elasticsearch.indices.ExistsRequest; +import co.elastic.clients.elasticsearch.indices.*; import co.elastic.clients.transport.endpoints.BooleanResponse; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -73,8 +73,7 @@ import java.util.*; import static org.fao.geonet.constants.Geonet.IndexFieldNames.IS_TEMPLATE; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_FIELD; -import static org.fao.geonet.kernel.search.IndexFields.INDEXING_ERROR_MSG; +import static org.fao.geonet.kernel.search.IndexFields.*; public class EsSearchManager implements ISearchManager { @@ -216,7 +215,6 @@ private void addMDFields(Element doc, Path schemaDir, doc.addContent(new Element(INDEXING_ERROR_FIELD).setText("true")); doc.addContent(createIndexingErrorMsgElement("indexingErrorMsg-indexingStyleSheetError", "error", Map.of("message", e.getMessage()))); - doc.addContent(new Element(IndexFields.DRAFT).setText("n")); } } @@ -225,7 +223,7 @@ private void addMoreFields(Element doc, Multimap fields) { fields.entries().forEach(e -> { Element newElement = new Element(e.getKey()) .setText(String.valueOf(e.getValue())); - if(objectFields.contains(e.getKey())) { + if (objectFields.contains(e.getKey())) { newElement.setAttribute("type", "object"); } doc.addContent(newElement); @@ -349,6 +347,7 @@ public BulkResponse updateFields(String id, Multimap fields, Set fields.asMap().forEach((e, v) -> fieldMap.put(e, v.toArray())); return updateFields(id, fieldMap, fieldsToRemove); } + public BulkResponse updateFields(String id, Map fieldMap, Set fieldsToRemove) throws IOException { fieldMap.put(Geonet.IndexFieldNames.INDEXING_DATE, new Date()); @@ -404,7 +403,7 @@ public void updateFieldsAsynch(String id, Map fields) { if (exception != null) { LOGGER.error("Failed to index {}", exception); } else { - LOGGER.info("Updated fields for document {}", id); + LOGGER.info("Updated fields for document {}", id); } }); } @@ -479,7 +478,7 @@ private void sendDocumentsToIndex() { } catch (Exception e) { LOGGER.error( "An error occurred while indexing {} documents in current indexing list. Error is {}.", - listOfDocumentsToIndex.size(), e.getMessage()); + listOfDocumentsToIndex.size(), e.getMessage()); } finally { // TODO: Trigger this async ? documents.keySet().forEach(uuid -> overviewFieldUpdater.process(uuid)); @@ -502,6 +501,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, String id = ""; String uuid = ""; String isTemplate = ""; + String isDraft = ""; String failureDoc = documents.get(e.id()); try { @@ -510,13 +510,14 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, id = node.get(IndexFields.DBID).asText(); uuid = node.get("uuid").asText(); isTemplate = node.get(IS_TEMPLATE).asText(); + isDraft = node.get(DRAFT).asText(); } catch (Exception ignoredException) { } docWithErrorInfo.put(IndexFields.DBID, id); docWithErrorInfo.put("uuid", uuid); docWithErrorInfo.put(IndexFields.RESOURCE_TITLE, resourceTitle); docWithErrorInfo.put(IS_TEMPLATE, isTemplate); - docWithErrorInfo.put(IndexFields.DRAFT, "n"); + docWithErrorInfo.put(IndexFields.DRAFT, isDraft); docWithErrorInfo.put(INDEXING_ERROR_FIELD, true); ArrayNode errors = docWithErrorInfo.putArray(INDEXING_ERROR_MSG); errors.add(createIndexingErrorMsgObject(e.error().reason(), "error", Map.of())); @@ -539,7 +540,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, BulkResponse response = client.bulkRequest(defaultIndex, listErrorOfDocumentsToIndex); if (response.errors()) { LOGGER.error("Failed to save error documents {}.", - Arrays.toString(errorDocumentIds.toArray())); + Arrays.toString(errorDocumentIds.toArray())); } } } @@ -573,6 +574,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, .add("status_text") .add("coordinateSystem") .add("identifier") + .add("maintenance") .add("responsibleParty") .add("mdLanguage") .add("otherLanguage") @@ -585,7 +587,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, .add("MD_SecurityConstraintsUseLimitation") .add("MD_SecurityConstraintsUseLimitationObject") .add("overview") - .add("sourceDescription") + .add("sourceDescriptionObject") .add("MD_ConstraintsUseLimitation") .add("MD_ConstraintsUseLimitationObject") .add("resourceType") @@ -674,7 +676,7 @@ public ObjectNode documentToJson(Element xml) { mapper.readTree(node.getTextNormalize())); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - node.getTextNormalize(), propertyName, e.getMessage()); + node.getTextNormalize(), propertyName, e.getMessage()); } } else { arrayNode.add( @@ -693,7 +695,7 @@ public ObjectNode documentToJson(Element xml) { )); } catch (IOException e) { LOGGER.error("Parsing invalid JSON node {} for property {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); + nodeElements.get(0).getTextNormalize(), propertyName, e.getMessage()); } } else { doc.put(propertyName, @@ -706,7 +708,8 @@ public ObjectNode documentToJson(Element xml) { } - /** Field starting with _ not supported in Kibana + /** + * Field starting with _ not supported in Kibana * Those are usually GN internal fields */ private String getPropertyName(String name) { @@ -934,12 +937,12 @@ public boolean isIndexWritable(String indexName) throws IOException, Elasticsear String indexBlockRead = "index.blocks.read_only_allow_delete"; GetIndicesSettingsRequest request = GetIndicesSettingsRequest.of( - b -> b.index(indexName) - .name(indexBlockRead) + b -> b.index(indexName) + .name(indexBlockRead) ); GetIndicesSettingsResponse settings = this.client.getClient() - .indices().getSettings(request); + .indices().getSettings(request); IndexState indexState = settings.get(indexBlockRead); @@ -950,7 +953,7 @@ public boolean isIndexWritable(String indexName) throws IOException, Elasticsear /** * Make a JSON Object that properly represents an indexingErrorMsg, to be used in the index. * - * @param type either 'error' or 'warning' + * @param type either 'error' or 'warning' * @param string a string that is translatable (see, e.g., en-search.json) * @param values values that replace the placeholders in the `string` parameter * @return a json object that represents an indexingErrorMsg @@ -961,7 +964,7 @@ public ObjectNode createIndexingErrorMsgObject(String string, String type, Map valuesObject.put(k, String.valueOf(v))); + values.forEach((k, v) -> valuesObject.put(k, String.valueOf(v))); indexingErrorMsg.set("values", valuesObject); return indexingErrorMsg; } @@ -969,7 +972,7 @@ public ObjectNode createIndexingErrorMsgObject(String string, String type, Map1 Jwt-Header filters active at the same time + String filterId = java.util.UUID.randomUUID().toString(); + + + public JwtHeadersAuthFilter(JwtHeadersConfiguration jwtHeadersConfiguration) { + this.jwtHeadersConfiguration = jwtHeadersConfiguration; + } + + @Override + public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) + throws IOException, ServletException { + var existingAuth = SecurityContextHolder.getContext().getAuthentication(); + HttpServletRequest request = (HttpServletRequest) servletRequest; + + + var config = jwtHeadersConfiguration.getJwtConfiguration(); + + var user = JwtHeadersTrivialUser.create(config, request); + + //if request is already logged in by us (same filterId), but there aren't any Jwt-Headers attached + //then log them out. + if (user == null && existingAuth != null) { + if (existingAuth instanceof JwtHeadersUsernamePasswordAuthenticationToken + && ((JwtHeadersUsernamePasswordAuthenticationToken) existingAuth).authFilterId.equals(filterId)) { + //at this point, there isn't a JWT header, but there's an existing auth that was made by us (JWT header) + // in this case, we need to log-off. They have a JSESSION auth that is no longer valid. + logout(request); + filterChain.doFilter(servletRequest, servletResponse); + return; + } + } + + + if (user == null) { + filterChain.doFilter(servletRequest, servletResponse); + return; // no valid user in header + } + + //we have a valid user in the headers + + //existing user is the same user as the request + if (existingAuth != null && existingAuth.getName().equals(user.getUsername())) { + filterChain.doFilter(servletRequest, servletResponse); + return; // abort early - no need to do an expensive login. Use the existing one. + } + + //existing user isnt the same user as the request + if (existingAuth != null && !existingAuth.getName().equals(user.getUsername())) { + //in this case there are two auth's - the existing one (likely from JSESSION) + //and one coming in from the JWT headers. In this case, we kill the other login + //and make a new one. + logout(request); + } + + var userDetails = jwtHeadersUserUtil.getUser(user, jwtHeadersConfiguration); + if (userDetails != null) { + UsernamePasswordAuthenticationToken auth = new JwtHeadersUsernamePasswordAuthenticationToken( + filterId, userDetails, null, userDetails.getAuthorities()); + auth.setDetails(userDetails); + SecurityContextHolder.getContext().setAuthentication(auth); + } + + filterChain.doFilter(servletRequest, servletResponse); + } + + /** + * handle a logout - clear out the security context, and invalidate the session + * + * @param request + * @throws ServletException + */ + public void logout(HttpServletRequest request) throws ServletException { + request.logout();//dont think this does anything in GN + SecurityContextHolder.getContext().setAuthentication(null); + request.getSession().invalidate(); + } + +} + + diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java new file mode 100644 index 000000000000..73d4fee4316e --- /dev/null +++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfiguration.java @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.kernel.security.SecurityProviderConfiguration; +import org.geoserver.security.jwtheaders.JwtConfiguration; + +/** + * configuration for the JWT Headers security filter. + * See GN documentation. + * This is based on GeoServer's JWT-Headers Module, so you can see there as well. + *

+ * This class handles the GN filter configuration details, and hands the actual configuration + * for the filter to the JwtConfiguration class. This class is also used in Geoserver. + */ +public class JwtHeadersConfiguration { + + + public SecurityProviderConfiguration.LoginType loginType = SecurityProviderConfiguration.LoginType.AUTOLOGIN; + /** + * true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI) + * false -> don't update the DB (user must edit profile in UI). + */ + public boolean updateProfile = true; + /** + * true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI) + * false -> don't update the DB (admin must edit groups in UI). + */ + public boolean updateGroup = true; + protected JwtConfiguration jwtConfiguration; + + //shared JwtHeadersSecurityConfig object + JwtHeadersSecurityConfig securityConfig; + + // getters/setters + + public JwtHeadersConfiguration(JwtHeadersSecurityConfig securityConfig) { + this.securityConfig = securityConfig; + jwtConfiguration = new JwtConfiguration(); + } + + public boolean isUpdateProfile() { + return securityConfig.isUpdateProfile(); + } + + public void setUpdateProfile(boolean updateProfile) { + securityConfig.setUpdateProfile(updateProfile); + } + + public boolean isUpdateGroup() { + return securityConfig.isUpdateGroup(); + } + + + //---- abstract class methods + + public void setUpdateGroup(boolean updateGroup) { + securityConfig.setUpdateGroup(updateGroup); + } + + public String getLoginType() { + return securityConfig.getLoginType(); + } + + + public String getSecurityProvider() { + return securityConfig.getSecurityProvider(); + } + + + public boolean isUserProfileUpdateEnabled() { + return securityConfig.isUserProfileUpdateEnabled(); + } + + //======================================================================== + + // @Override + public boolean isUserGroupUpdateEnabled() { + return securityConfig.isUserGroupUpdateEnabled(); + } + + public org.geoserver.security.jwtheaders.JwtConfiguration getJwtConfiguration() { + return jwtConfiguration; + } + + public void setJwtConfiguration( + org.geoserver.security.jwtheaders.JwtConfiguration jwtConfiguration) { + this.jwtConfiguration = jwtConfiguration; + } + +} diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java new file mode 100644 index 000000000000..3e311faaa3e2 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersSecurityConfig.java @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.kernel.security.SecurityProviderConfiguration; + +/** + * GeoNetwork only allows one SecurityProviderConfiguration bean. + * In the jwt-headers-multi (2 auth filters) situation, we need to have a single SecurityProviderConfiguration. + * We, therefore, share a single one. + * This class is shared between all the JwtHeadersConfiguration objects. + */ +public class JwtHeadersSecurityConfig implements SecurityProviderConfiguration { + + + public SecurityProviderConfiguration.LoginType loginType = SecurityProviderConfiguration.LoginType.AUTOLOGIN; + /** + * true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI) + * false -> don't update the DB (user must edit profile in UI). + */ + public boolean updateProfile = true; + /** + * true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI) + * false -> don't update the DB (admin must edit groups in UI). + */ + public boolean updateGroup = true; + + + // getters/setters + + + public JwtHeadersSecurityConfig() { + + } + + public boolean isUpdateProfile() { + return updateProfile; + } + + public void setUpdateProfile(boolean updateProfile) { + this.updateProfile = updateProfile; + } + + public boolean isUpdateGroup() { + return updateGroup; + } + + + //---- abstract class methods + + public void setUpdateGroup(boolean updateGroup) { + this.updateGroup = updateGroup; + } + + //@Override + public String getLoginType() { + return loginType.toString(); + } + + // @Override + public String getSecurityProvider() { + return "JWT-HEADERS"; + } + + // @Override + public boolean isUserProfileUpdateEnabled() { + // If updating profile from the security provider then disable the profile updates in the interface + return !updateProfile; + } + + //======================================================================== + + // @Override + public boolean isUserGroupUpdateEnabled() { + // If updating group from the security provider then disable the group updates in the interface + return !updateGroup; + } + +} diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java new file mode 100644 index 000000000000..de22d9f5ca3a --- /dev/null +++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUser.java @@ -0,0 +1,249 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.domain.Profile; +import org.fao.geonet.utils.Log; +import org.geoserver.security.jwtheaders.JwtConfiguration; +import org.geoserver.security.jwtheaders.roles.JwtHeadersRolesExtractor; +import org.geoserver.security.jwtheaders.token.TokenValidator; +import org.geoserver.security.jwtheaders.username.JwtHeaderUserNameExtractor; +import org.springframework.util.StringUtils; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * uses the GS library to process the headers. + * This returns a GN-compliant "user" (JwtHeadersTrivialUser) that + * has the header-derived username and roles (profile and profileGroups). + *

+ * Most of the code, here is for processing profileGroups (Map>). + */ +public class JwtHeadersTrivialUser { + + static String ROLE_GROUP_SEPARATOR = ":"; + static Profile MIN_PROFILE = Profile.RegisteredUser; + Map> profileGroups; + private String username; + + //---------------------- + private Profile profile; + + public JwtHeadersTrivialUser(String userName) { + setUsername(userName); + profileGroups = new HashMap<>(); + } + + public static JwtHeadersTrivialUser create(JwtConfiguration config, HttpServletRequest request) throws IOException { + if (request == null || config == null || config.getUserNameHeaderAttributeName() == null) { + Log.debug(Geonet.SECURITY, "JwtHeadersUser.create called with null args!"); + return null; // nothing to do + } + + var userNameHeader = request.getHeader(config.getUserNameHeaderAttributeName()); + if (userNameHeader == null) { + return null; // no username in request! + } + + //get the username from the headers (pay attention to config) + JwtHeaderUserNameExtractor userNameExtractor = new JwtHeaderUserNameExtractor(config); + var userName = userNameExtractor.extractUserName(userNameHeader); + + if (userName == null) { + return null; // no username + } + + var tokenValidator = new TokenValidator(config); + try { +// var accessToken = userNameHeader.replaceFirst("^Bearer", ""); +// accessToken = accessToken.replaceFirst("^bearer", ""); +// accessToken = accessToken.trim(); + tokenValidator.validate(userNameHeader); + } catch (Exception e) { + throw new IOException("JWT Token is invalid", e); + } + + //get roles from the headers (pay attention to config) + var result = new JwtHeadersTrivialUser(userName); + handleRoles(result, config, request); + + return result; + } + + /** + * @param user user to be modified + * @param config configuration (i.e. where to get the roles from and how to convert them) + * @param request header to get the roles from + */ + public static void handleRoles(JwtHeadersTrivialUser user, JwtConfiguration config, HttpServletRequest request) { + if (!config.getJwtHeaderRoleSource().equals("JSON") && !config.getJwtHeaderRoleSource().equals("JWT")) + return; // nothing to do - we aren't configured to handle roles extraction (get from GN DB). + + if (config.getRolesHeaderName() == null) + return; //misconfigured + + //get the header value and extract the set of roles in it (processed by the RoleConverter) + var rolesHeader = request.getHeader(config.getRolesHeaderName()); + JwtHeadersRolesExtractor rolesExtractor = new JwtHeadersRolesExtractor(config); + var roles = rolesExtractor.getRoles(rolesHeader); + + + updateUserWithRoles(user, roles); + } + + public static void updateUserWithRoles(JwtHeadersTrivialUser user, Collection roles) { + //need to convert the simple roles into profileGroups + // i.e. group1:Reviewer means user has "Reviewer" Profile for group "group1" + Map> profileGroups = extractProfileRoles(roles); + + //get the "max" profile (for User#Profile) + if (profileGroups != null && profileGroups.size() > 0) { + String profile = getMaxProfile(profileGroups).name(); + if (profile != null) { + user.profile = Profile.valueOf(profile); + } + } + else { + user.profile = Profile.RegisteredUser; + } + + //set the profileGroups + user.profileGroups = profileGroups; + } + + /** + * Get the profiles, and the list of groups for that profile, from the access token. + *

+ * i.e. ["Administrator","g2:Editor"] -> {"Administrator":[], "Editor":["g2"]} + * + * @param rolesInToken list of roles for the user (from headers + gone through the JWT Headers RoleConverter) + * @return map object with the profile and related groups. + */ + //from GN keycloak plugin + public static Map> extractProfileRoles(Collection rolesInToken) { + Map> profileGroups = new HashMap<>(); + + Set roleGroupList = new HashSet<>(); + + // Get role that are in the format of group:role format access + // Todo Reevaluate to see if this is how we want to get role groups. It may not be a good idea to place separator in group name and parse it this way. + for (String role : rolesInToken) { + if (role.contains(ROLE_GROUP_SEPARATOR)) { + Log.debug(Geonet.SECURITY, "Identified group:profile (" + role + ") from user token."); + roleGroupList.add(role); + } else { + // Only use the profiles we know of and don't add duplicates. + Profile p = Profile.findProfileIgnoreCase(role); + if (p != null && !profileGroups.containsKey(p)) { + profileGroups.put(p, new ArrayList<>()); + } + } + } + + + for (String rg : roleGroupList) { + String[] rg_role_groups = rg.split(ROLE_GROUP_SEPARATOR); + + if (rg_role_groups.length == 0 || StringUtils.isEmpty(rg_role_groups[0])) { + continue; + } + + Profile p = null; + if (rg_role_groups.length >= 1) { + p = Profile.findProfileIgnoreCase(rg_role_groups[1]); + } + // If we cannot find the profile then lets ignore this entry. + if (p == null) { + continue; + } + + List groups; + if (profileGroups.containsKey(p)) { + groups = profileGroups.get(p); + } else { + groups = new ArrayList<>(); + } + if (rg_role_groups.length > 1) { + groups.add(rg_role_groups[0]); + } + profileGroups.put(p, groups); + } + + return profileGroups; + } + + //---------------------- + + public static Profile getMaxProfile(Map> profileGroups) { + Profile maxProfile = null; + + for (Profile p : profileGroups.keySet()) { + if (maxProfile == null) { + maxProfile = p; + } else if (maxProfile.compareTo(p) >= 0) { + maxProfile = p; + } + } + + // Fallback if no profile + if (maxProfile == null) { + maxProfile = MIN_PROFILE; + } + return maxProfile; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public Map> getProfileGroups() { + return profileGroups; + } + + public void setProfileGroups(Map> profileGroups) { + this.profileGroups = profileGroups; + } + + public Profile getProfile() { + return profile; + } + + public void setProfile(Profile profile) { + this.profile = profile; + } + +} diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java new file mode 100644 index 000000000000..b5629c521834 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtil.java @@ -0,0 +1,245 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.domain.Group; +import org.fao.geonet.domain.Language; +import org.fao.geonet.domain.Profile; +import org.fao.geonet.domain.User; +import org.fao.geonet.domain.UserGroup; +import org.fao.geonet.kernel.security.GeonetworkAuthenticationProvider; +import org.fao.geonet.repository.GroupRepository; +import org.fao.geonet.repository.LanguageRepository; +import org.fao.geonet.repository.UserGroupRepository; +import org.fao.geonet.repository.UserRepository; +import org.fao.geonet.utils.Log; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.userdetails.UsernameNotFoundException; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * This class handles GeoNetwork related User (and Group/UserGroup) activities. + */ +public class JwtHeadersUserUtil { + + @Autowired + UserRepository userRepository; + + @Autowired + GroupRepository groupRepository; + + @Autowired + UserGroupRepository userGroupRepository; + + @Autowired + GeonetworkAuthenticationProvider authProvider; + + @Autowired + LanguageRepository languageRepository; + + /** + * Gets a user. + * 1. if the user currently existing in the GN DB: + * - user is retrieved from the GN DB + * - if the profile/profileGroup update is true then the DB is updated with info from `userFromHeaders` + * - otherwise, the header roles are ignored and profile/profileGroups are taken from the GN DB + *

+ * 2. if the user doesn't existing in the DB: + * - user is created and saved to the DB + * - if the profile/profileGroup update is true then the DB is updated with info from `userFromHeaders` + * - otherwise, the header roles are ignored and profile/profileGroups are taken from the GN DB + * - NOTE: in this case, the user will not have any profile/profileGraoup - + * an admin will have to manually set them in GN GUI + * + * @param userFromHeaders This is user info supplied in the request headers + * @param configuration Configuration of the JWT Headers filter + * @return + */ + public User getUser(JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) { + try { + User userFromDb = (User) authProvider.loadUserByUsername(userFromHeaders.getUsername()); + injectRoles(userFromDb, userFromHeaders, configuration); + return userFromDb; + } catch (UsernameNotFoundException e) { + return createUser(userFromHeaders, configuration); + } + } + + /** + * given an existing user (both from GN DB and from the Request Headers), + * update roles (profile/profileGroups). + *

+ * isUpdateProfile/isUpdateGroup control if the DB is updated from the request Headers + * + * @param userFromDb + * @param userFromHeaders + * @param configuration + */ + public void injectRoles(User userFromDb, JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) { + if (configuration.isUpdateProfile()) { + userFromDb.setProfile(userFromHeaders.getProfile()); + userRepository.save(userFromDb); + Log.trace(Geonet.SECURITY, String.format("JwtHeaders: existing user (%s) with profile: '%s'", userFromDb.getUsername(), userFromHeaders.getProfile())); + } + if (configuration.isUpdateGroup()) { + var profileGroups = userFromHeaders.getProfileGroups(); + if (profileGroups != null) { + updateGroups(profileGroups, userFromDb); + if (!profileGroups.isEmpty()) { + Log.trace(Geonet.SECURITY, "JwtHeaders: existing user profile groups: "); + for (var group : profileGroups.entrySet()) { + Log.debug(Geonet.SECURITY, + String.format(" + Profile '%s' has groups: '%s'", + group.getKey(), + String.join(",", group.getValue()) + )); + } + } + } + } + + } + + /** + * creates a new user based on what was in the request headers. + *

+ * profile updating (in GN DB) is controlled by isUpdateGroup + * profileGroup updating (in GN DB) is controlled by isUpdateGroup + *

+ * cf. updateGroups for how the profile/profileGroups are updated + * + * @param userFromHeaders + * @param configuration + * @return + */ + public User createUser(JwtHeadersTrivialUser userFromHeaders, JwtHeadersConfiguration configuration) { + //create user + User user = new User(); + user.setUsername(userFromHeaders.getUsername()); + + // Add email + if (userFromHeaders.getUsername().contains("@")) { + user.getEmailAddresses().add(userFromHeaders.getUsername()); + // dave@example.com --> dave + user.setName(user.getUsername().substring(0, user.getUsername().indexOf("@"))); + } + + Log.debug(Geonet.SECURITY, "JwtHeaders: Creating new User in GN DB: " + user); + + if (configuration.isUpdateProfile()) { + user.setProfile(userFromHeaders.getProfile()); + Log.debug(Geonet.SECURITY, String.format("JwtHeaders: new user profile: '%s'", userFromHeaders.getProfile())); + } else { + user.setProfile(Profile.RegisteredUser);//default to registered user + } + + userRepository.save(user); + + + if (configuration.isUpdateGroup()) { + var profileGroups = userFromHeaders.getProfileGroups(); + if (profileGroups != null) { + updateGroups(profileGroups, user); + if (!profileGroups.isEmpty()) { + Log.debug(Geonet.SECURITY, "JwtHeaders: new user profile groups: "); + for (var group : profileGroups.entrySet()) { + Log.debug(Geonet.SECURITY, + String.format(" + Profile '%s' has groups: '%s'", + group.getKey(), + String.join(",", group.getValue()) + )); + } + } + } + } + + return user; + } + + + /** + * Update users group information in the database. + * + * @param profileGroups object containing the profile and related groups. + * @param user to apply the changes to. + */ + //from keycloak + public void updateGroups(Map> profileGroups, User user) { + Set userGroups = new HashSet<>(); + + // Now we add the groups + for (Profile p : profileGroups.keySet()) { + List groups = profileGroups.get(p); + for (String rgGroup : groups) { + + Group group = groupRepository.findByName(rgGroup); + + if (group == null) { + group = new Group(); + group.setName(rgGroup); + + // Populate languages for the group + for (Language l : languageRepository.findAll()) { + group.getLabelTranslations().put(l.getId(), group.getName()); + } + + groupRepository.save(group); + } + + UserGroup usergroup = new UserGroup(); + usergroup.setGroup(group); + usergroup.setUser(user); + + Profile profile = p; + if (profile.equals(Profile.Administrator)) { + // As we are assigning to a group, it is UserAdmin instead + profile = Profile.UserAdmin; + } + usergroup.setProfile(profile); + + //Todo - It does not seem necessary to add the user to the editor profile + // since the reviewer is the parent of the editor + // Seems like the permission checks should be smart enough to know that if a user + // is a reviewer then they are also an editor. Need to test and fix if necessary + if (profile.equals(Profile.Reviewer)) { + UserGroup ug = new UserGroup(); + ug.setGroup(group); + ug.setUser(user); + ug.setProfile(Profile.Editor); + userGroups.add(ug); + } + + userGroups.add(usergroup); + } + } + + userGroupRepository.updateUserGroups(user.getId(), userGroups); + } + +} diff --git a/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java new file mode 100644 index 000000000000..1e83d15e2bb1 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUsernamePasswordAuthenticationToken.java @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.GrantedAuthority; + +import java.util.Collection; + +/** + * this class just allows us to tag an authentication as coming from JWT Headers (for detecting logout) + */ +public class JwtHeadersUsernamePasswordAuthenticationToken extends UsernamePasswordAuthenticationToken { + + //ID of the JwtHeaderAuthFilter that authenticated the user + String authFilterId; + + public JwtHeadersUsernamePasswordAuthenticationToken(String authFilterId, Object principal, Object credentials, Collection authorities) { + super(principal, credentials, authorities); + this.authFilterId = authFilterId; + } +} diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java index a96fa1325853..c3c7a9e2aa05 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java @@ -148,6 +148,8 @@ public class Settings { public static final String METADATA_UNPUBLISH_USERPROFILE = "metadata/publication/profileUnpublishMetadata"; public static final String METADATA_BACKUPARCHIVE_ENABLE = "metadata/backuparchive/enable"; public static final String METADATA_VCS = "metadata/vcs/enable"; + + public static final String SYSTEM_AUDITABLE_ENABLE = "system/auditable/enable"; public static final String VIRTUAL_SETTINGS_SUFFIX_ISDEFINED = "IsDefined"; public static final String NODE = "node/id"; public static final String NODE_DEFAULT = "node/default"; diff --git a/core/src/main/java/org/fao/geonet/util/FileUtil.java b/core/src/main/java/org/fao/geonet/util/FileUtil.java index 483ebab84b06..4c54e2f4f9ae 100644 --- a/core/src/main/java/org/fao/geonet/util/FileUtil.java +++ b/core/src/main/java/org/fao/geonet/util/FileUtil.java @@ -80,4 +80,21 @@ public static String readLastLines(File file, int lines) { } } } + + /** + * Similar to https://commons.apache.org/proper/commons-io/apidocs/org/apache/commons/io/FileUtils.html#byteCountToDisplaySize(long) + * however the format is returned in 2 decimal precision. + * + * @param bytes to be converted into human-readable format. + * @return human-readable formated bytes. + */ + public static String humanizeFileSize(long bytes) { + if (bytes == 0) return "0 Bytes"; + + String[] sizes = {"Bytes", "KB", "MB", "GB", "TB"}; + int i = (int) Math.floor(Math.log(bytes) / Math.log(1024)); // Determine the index for sizes + double humanizedSize = bytes / Math.pow(1024, i); + + return String.format("%.2f %s", humanizedSize, sizes[i]); + } } diff --git a/core/src/main/java/org/fao/geonet/util/LimitedInputStream.java b/core/src/main/java/org/fao/geonet/util/LimitedInputStream.java new file mode 100644 index 000000000000..582621f7f546 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/util/LimitedInputStream.java @@ -0,0 +1,53 @@ +//============================================================================= +//=== Copyright (C) 2001-2025 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This library is free software; you can redistribute it and/or +//=== modify it under the terms of the GNU Lesser General Public +//=== License as published by the Free Software Foundation; either +//=== version 2.1 of the License, or (at your option) any later version. +//=== +//=== This library is distributed in the hope that it will be useful, +//=== but WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== Lesser General Public License for more details. +//=== +//=== You should have received a copy of the GNU Lesser General Public +//=== License along with this library; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.util; + +import org.fao.geonet.api.exception.InputStreamLimitExceededException; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Implementation of {@link org.apache.commons.fileupload.util.LimitedInputStream} that throws a + * {@link InputStreamLimitExceededException} when the configured limit is exceeded. + */ +public class LimitedInputStream extends org.apache.commons.fileupload.util.LimitedInputStream { + + + /** + * Creates a new instance. + * + * @param inputStream The input stream, which shall be limited. + * @param pSizeMax The limit; no more than this number of bytes + * shall be returned by the source stream. + */ + public LimitedInputStream(InputStream inputStream, long pSizeMax) { + super(inputStream, pSizeMax); + } + + @Override + protected void raiseError(long pSizeMax, long pCount) throws IOException { + throw new InputStreamLimitExceededException(pSizeMax); + } +} diff --git a/core/src/main/java/org/fao/geonet/util/LogUtil.java b/core/src/main/java/org/fao/geonet/util/LogUtil.java index aa1d0d437f4d..93f3c023f7d8 100644 --- a/core/src/main/java/org/fao/geonet/util/LogUtil.java +++ b/core/src/main/java/org/fao/geonet/util/LogUtil.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -57,7 +57,7 @@ public static String initializeHarvesterLog(String type, String name) { // Filename safe representation of harvester name (using '_' as needed). final String harvesterName = name.replaceAll("\\W+", "_"); final String harvesterType = type.replaceAll("\\W+", "_"); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmm"); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); String logfile = "harvester_" + harvesterType @@ -71,7 +71,7 @@ public static String initializeHarvesterLog(String type, String name) { } ThreadContext.put("harvest", harvesterName); - ThreadContext.putIfNull("logfile", logfile); + ThreadContext.put("logfile", logfile); ThreadContext.put("timeZone", timeZoneSetting); return logfile; diff --git a/core/src/main/java/org/fao/geonet/util/XslUtil.java b/core/src/main/java/org/fao/geonet/util/XslUtil.java index d6514ffd045a..067d3cc4b8e3 100644 --- a/core/src/main/java/org/fao/geonet/util/XslUtil.java +++ b/core/src/main/java/org/fao/geonet/util/XslUtil.java @@ -46,9 +46,9 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.Constants; import org.fao.geonet.SystemInfo; import org.fao.geonet.analytics.WebAnalyticsConfiguration; -import org.fao.geonet.api.records.attachments.FilesystemStore; import org.fao.geonet.api.records.attachments.FilesystemStoreResourceContainer; import org.fao.geonet.api.records.attachments.Store; import org.fao.geonet.constants.Geonet; @@ -113,6 +113,7 @@ import java.io.StringReader; import java.net.URL; import java.net.URLConnection; +import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.*; @@ -1246,11 +1247,11 @@ public static String buildDataUrl(String url, Integer size) { Matcher m = Pattern.compile(settingManager.getNodeURL() + "api/records/(.*)/attachments/(.*)$").matcher(url); BufferedImage image; if (m.find()) { - Store store = ApplicationContextHolder.get().getBean(FilesystemStore.class); + Store store = ApplicationContextHolder.get().getBean("filesystemStore", Store.class); try (Store.ResourceHolder file = store.getResourceInternal( - m.group(1), + URLDecoder.decode(m.group(1), Constants.ENCODING), MetadataResourceVisibility.PUBLIC, - m.group(2), true)) { + URLDecoder.decode(m.group(2), Constants.ENCODING), true)) { image = ImageIO.read(file.getPath().toFile()); } } else { @@ -1441,6 +1442,28 @@ public static String getThesaurusIdByTitle(String title) { return thesaurus == null ? "" : "geonetwork.thesaurus." + thesaurus.getKey(); } + /** + * Retrieve the thesaurus title using the thesaurus key. + * + * @param id the thesaurus key + * @return the thesaurus title or empty string if the thesaurus doesn't exist. + */ + public static String getThesaurusTitleByKey(String id) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class); + Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id); + return thesaurus == null ? "" : thesaurus.getTitle(); + } + + + public static String getThesaurusUriByKey(String id) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + ThesaurusManager thesaurusManager = applicationContext.getBean(ThesaurusManager.class); + Thesaurus thesaurus = thesaurusManager.getThesaurusByName(id); + return thesaurus == null ? "" : thesaurus.getDefaultNamespace(); + } + + /** * Utility method to retrieve the name (label) for an iso language using it's code for a specific language. @@ -1581,7 +1604,11 @@ private static List buildRecordLink(List hits, String type) { public static String escapeForJson(String value) { return StringEscapeUtils.escapeJson(value); } - + + public static String escapeForEcmaScript(String value) { + return StringEscapeUtils.escapeEcmaScript(value); + } + public static String getWebAnalyticsService() { ApplicationContext applicationContext = ApplicationContextHolder.get(); WebAnalyticsConfiguration webAnalyticsConfiguration = applicationContext.getBean(WebAnalyticsConfiguration.class); diff --git a/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java new file mode 100644 index 000000000000..cdf34c45f181 --- /dev/null +++ b/core/src/main/java/org/fao/geonet/web/GeoNetworkStrictHttpFirewall.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.web; + +import org.springframework.security.web.firewall.StrictHttpFirewall; + +import java.util.regex.Pattern; + +import static java.nio.charset.StandardCharsets.ISO_8859_1; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * Spring Security HttpFirewall that allows parsing UTF8 header values. + */ +public class GeoNetworkStrictHttpFirewall extends StrictHttpFirewall { + private static final Pattern ALLOWED_HEADER_VALUE_PATTERN = Pattern.compile("[\\p{IsAssigned}&&[^\\p{IsControl}]]*"); + + public GeoNetworkStrictHttpFirewall() { + super(); + + this.setAllowedHeaderValues(header -> { + String parsed = new String(header.getBytes(ISO_8859_1), UTF_8); + return ALLOWED_HEADER_VALUE_PATTERN.matcher(parsed).matches(); + }); + } +} diff --git a/core/src/test/java/jeeves/interfaces/ProfileTest.java b/core/src/test/java/jeeves/interfaces/ProfileTest.java index 0e360641f778..b52042cc58e3 100644 --- a/core/src/test/java/jeeves/interfaces/ProfileTest.java +++ b/core/src/test/java/jeeves/interfaces/ProfileTest.java @@ -34,17 +34,27 @@ public class ProfileTest { + @Test + public void testGetChildren() { + assertContainsAllExactly(Administrator.getChildren(), UserAdmin, Monitor); + assertContainsOnly(Reviewer, UserAdmin.getChildren()); + assertContainsOnly(Editor, Reviewer.getChildren()); + assertContainsOnly(RegisteredUser, Editor.getChildren()); + assertContainsOnly(Guest, RegisteredUser.getChildren()); + assertEquals(0, Monitor.getChildren().size()); + assertEquals(0, Guest.getChildren().size()); + } + @Test public void testGetParents() { - assertEquals(2, Administrator.getParents().size()); - assertTrue(Administrator.getParents().contains(UserAdmin)); - assertTrue(Administrator.getParents().contains(Monitor)); - assertContainsOnly(Reviewer, UserAdmin.getParents()); - assertContainsOnly(Editor, Reviewer.getParents()); - assertContainsOnly(RegisteredUser, Editor.getParents()); - assertContainsOnly(Guest, RegisteredUser.getParents()); - assertEquals(0, Monitor.getParents().size()); - assertEquals(0, Guest.getParents().size()); + assertEquals(0, Administrator.getParents().size()); + assertContainsOnly(Administrator, UserAdmin.getParents()); + assertContainsOnly(UserAdmin, Reviewer.getParents()); + assertContainsOnly(Reviewer, Editor.getParents()); + assertContainsOnly(Editor, RegisteredUser.getParents()); + assertContainsOnly(RegisteredUser, Guest.getParents()); + assertContainsOnly(Administrator, Monitor.getParents()); + } private void assertContainsOnly(Profile profile, Set parents) { @@ -53,12 +63,25 @@ private void assertContainsOnly(Profile profile, Set parents) { } @Test - public void testGetAll() { - assertContainsAllExactly(Administrator.getAll(), Administrator, UserAdmin, Reviewer, Editor, RegisteredUser, Guest, Monitor); - assertContainsAllExactly(UserAdmin.getAll(), UserAdmin, Reviewer, Editor, RegisteredUser, Guest); - assertContainsAllExactly(Reviewer.getAll(), Reviewer, Editor, RegisteredUser, Guest); - assertContainsAllExactly(Editor.getAll(), Editor, RegisteredUser, Guest); - assertContainsAllExactly(Editor.getAll(), Editor, RegisteredUser, Guest); + public void testGetProfileAndAllChildren() { + assertContainsAllExactly(Administrator.getProfileAndAllChildren(), Administrator, UserAdmin, Reviewer, Editor, RegisteredUser, Guest, Monitor); + assertContainsAllExactly(UserAdmin.getProfileAndAllChildren(), UserAdmin, Reviewer, Editor, RegisteredUser, Guest); + assertContainsAllExactly(Reviewer.getProfileAndAllChildren(), Reviewer, Editor, RegisteredUser, Guest); + assertContainsAllExactly(Editor.getProfileAndAllChildren(), Editor, RegisteredUser, Guest); + assertContainsAllExactly(RegisteredUser.getProfileAndAllChildren(), RegisteredUser, Guest); + assertContainsAllExactly(Guest.getProfileAndAllChildren(), Guest); + assertContainsAllExactly(Monitor.getProfileAndAllChildren(), Monitor); + } + + @Test + public void testGetProfileAndAllParents() { + assertContainsAllExactly(Administrator.getProfileAndAllParents(), Administrator); + assertContainsAllExactly(UserAdmin.getProfileAndAllParents(), UserAdmin, Administrator); + assertContainsAllExactly(Reviewer.getProfileAndAllParents(), Reviewer, UserAdmin, Administrator); + assertContainsAllExactly(Editor.getProfileAndAllParents(), Editor, Reviewer, UserAdmin, Administrator); + assertContainsAllExactly(RegisteredUser.getProfileAndAllParents(), RegisteredUser, Editor, Reviewer, UserAdmin, Administrator); + assertContainsAllExactly(Guest.getProfileAndAllParents(), Guest, RegisteredUser, Editor, Reviewer, UserAdmin, Administrator); + assertContainsAllExactly(Monitor.getProfileAndAllParents(), Monitor, Administrator); } private void assertContainsAllExactly(Set all, Profile... profiles) { diff --git a/core/src/test/java/org/fao/geonet/kernel/AbstractGeonetworkDataDirectoryTest.java b/core/src/test/java/org/fao/geonet/kernel/AbstractGeonetworkDataDirectoryTest.java index 7f7f4b26b4a0..63624516b091 100644 --- a/core/src/test/java/org/fao/geonet/kernel/AbstractGeonetworkDataDirectoryTest.java +++ b/core/src/test/java/org/fao/geonet/kernel/AbstractGeonetworkDataDirectoryTest.java @@ -26,6 +26,8 @@ import jeeves.server.ServiceConfig; import org.fao.geonet.AbstractCoreIntegrationTest; +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.exceptions.BadParameterEx; import org.jdom.Element; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -34,7 +36,7 @@ import java.nio.file.Path; import java.util.ArrayList; -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.*; /** * Abstract class for GeonetworkDataDirectory tests where the data directory layout is a default @@ -76,6 +78,29 @@ public void testInit() throws Exception { assertSystemDirSubFolders(expectedDataDir); } + @Test + public void testGetXsltConversion() { + Path xsltConversion = dataDirectory.getXsltConversion("conversion"); + assertEquals(dataDirectory.getWebappDir().resolve(Geonet.Path.IMPORT_STYLESHEETS).resolve("conversion.xsl"), xsltConversion); + try { + dataDirectory.getXsltConversion("../conversion"); + } catch (BadParameterEx e) { + assertEquals("../conversion is not a valid value for: Invalid character found in path.", e.getMessage()); + } + + xsltConversion = dataDirectory.getXsltConversion("schema:iso19115-3.2018:convert/fromISO19115-3.2014"); + assertNotNull(xsltConversion); + try { + dataDirectory.getXsltConversion("schema:notExistingSchema:convert/fromISO19115-3.2014"); + } catch (BadParameterEx e) { + assertEquals("Conversion not found. Schema 'notExistingSchema' is not registered in this catalog.", e.getMessage()); + } + try { + dataDirectory.getXsltConversion("schema:iso19115-3.2018:../../custom/path"); + } catch (BadParameterEx e) { + assertEquals("../../custom/path is not a valid value for: Invalid character found in path.", e.getMessage()); + } + } private void assertSystemDirSubFolders(Path expectedDataDir) { final Path expectedConfigDir = expectedDataDir.resolve("config"); assertEquals(expectedConfigDir, dataDirectory.getConfigDir()); diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java new file mode 100644 index 000000000000..56068bad3a39 --- /dev/null +++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersConfigurationTest.java @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.junit.Assert; +import org.junit.Test; + +/** + * very simple tests for JwtHeadersConfiguration for the GN-only portions. + */ +public class JwtHeadersConfigurationTest { + + //Very very simple test to ensure that setters/getters are working correctly + @Test + public void testGetSet() { + var config = JwtHeadersIntegrationTest.getBasicConfig(); + + //CONST + Assert.assertEquals("autologin", config.getLoginType()); + Assert.assertEquals("JWT-HEADERS", config.getSecurityProvider()); + + config.setUpdateGroup(false); + Assert.assertEquals(false, config.isUpdateGroup()); + Assert.assertEquals(false, !config.isUserGroupUpdateEnabled()); + config.setUpdateGroup(true); + Assert.assertEquals(true, config.isUpdateGroup()); + Assert.assertEquals(true, !config.isUserGroupUpdateEnabled()); + + + config.setUpdateProfile(false); + Assert.assertEquals(false, config.isUpdateProfile()); + Assert.assertEquals(false, !config.isUserProfileUpdateEnabled()); + config.setUpdateProfile(true); + Assert.assertEquals(true, config.isUpdateProfile()); + Assert.assertEquals(true, !config.isUserProfileUpdateEnabled()); + + + Assert.assertEquals(config.jwtConfiguration, config.getJwtConfiguration()); + config.setJwtConfiguration(null); + Assert.assertNull(config.getJwtConfiguration()); + } +} diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java new file mode 100644 index 000000000000..f133167989ca --- /dev/null +++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersIntegrationTest.java @@ -0,0 +1,281 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.domain.User; +import org.geoserver.security.jwtheaders.JwtConfiguration; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.security.core.context.SecurityContextHolder; + +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.ServletResponse; +import java.io.IOException; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + +/** + * Basic integration tests for the filter. + *

+ * We are mocking all the other interactions and directly calling JwtHeadersAuthFilter#doFilter + * and validating the results. + */ +public class JwtHeadersIntegrationTest { + + + //JWT example + public static String JWT = "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICItWEdld190TnFwaWRrYTl2QXNJel82WEQtdnJmZDVyMlNWTWkwcWMyR1lNIn0.eyJleHAiOjE3MDcxNTMxNDYsImlhdCI6MTcwNzE1Mjg0NiwiYXV0aF90aW1lIjoxNzA3MTUyNjQ1LCJqdGkiOiJlMzhjY2ZmYy0zMWNjLTQ0NmEtYmU1Yy04MjliNDE0NTkyZmQiLCJpc3MiOiJodHRwczovL2xvZ2luLWxpdmUtZGV2Lmdlb2NhdC5saXZlL3JlYWxtcy9kYXZlLXRlc3QyIiwiYXVkIjoiYWNjb3VudCIsInN1YiI6ImVhMzNlM2NjLWYwZTEtNDIxOC04OWNiLThkNDhjMjdlZWUzZCIsInR5cCI6IkJlYXJlciIsImF6cCI6ImxpdmUta2V5MiIsIm5vbmNlIjoiQldzc2M3cTBKZ0tHZC1OdFc1QlFhVlROMkhSa25LQmVIY0ZMTHZ5OXpYSSIsInNlc3Npb25fc3RhdGUiOiIxY2FiZmU1NC1lOWU0LTRjMmMtODQwNy03NTZiMjczZmFmZmIiLCJhY3IiOiIwIiwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbImRlZmF1bHQtcm9sZXMtZGF2ZS10ZXN0MiIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJsaXZlLWtleTIiOnsicm9sZXMiOlsiR2Vvc2VydmVyQWRtaW5pc3RyYXRvciJdfSwiYWNjb3VudCI6eyJyb2xlcyI6WyJtYW5hZ2UtYWNjb3VudCIsIm1hbmFnZS1hY2NvdW50LWxpbmtzIiwidmlldy1wcm9maWxlIl19fSwic2NvcGUiOiJvcGVuaWQgcGhvbmUgb2ZmbGluZV9hY2Nlc3MgbWljcm9wcm9maWxlLWp3dCBwcm9maWxlIGFkZHJlc3MgZW1haWwiLCJzaWQiOiIxY2FiZmU1NC1lOWU0LTRjMmMtODQwNy03NTZiMjczZmFmZmIiLCJ1cG4iOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCIsImVtYWlsX3ZlcmlmaWVkIjpmYWxzZSwiYWRkcmVzcyI6e30sIm5hbWUiOiJkYXZpZCBibGFzYnkiLCJncm91cHMiOlsiZGVmYXVsdC1yb2xlcy1kYXZlLXRlc3QyIiwib2ZmbGluZV9hY2Nlc3MiLCJ1bWFfYXV0aG9yaXphdGlvbiJdLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCIsImdpdmVuX25hbWUiOiJkYXZpZCIsImZhbWlseV9uYW1lIjoiYmxhc2J5IiwiZW1haWwiOiJkYXZpZC5ibGFzYnlAZ2VvY2F0Lm5ldCJ9.fHzXd7oISnqWb09ah9wikfP2UOBeiOA3vd_aDg3Bw-xcfv9aD3CWhAK5FUDPYSPyj4whAcknZbUgUzcm0qkaI8V_aS65F3Fug4jt4nC9YPL4zMSJ5an4Dp6jlQ3OQhrKFn4FwaoW61ndMmScsZZWEQyj6gzHnn5cknqySB26tVydT6q57iTO7KQFcXRdbXd6GWIoFGS-ud9XzxQMUdNfYmsDD7e6hoWhe9PJD9Zq4KT6JN13hUU4Dos-Z5SBHjRa6ieHoOe9gqkjKyA1jT1NU42Nqr-mTV-ql22nAoXuplpvOYc5-09-KDDzSDuVKFwLCNMN3ZyRF1wWuydJeU-gOQ"; + JwtHeadersConfiguration config; + FilterChain filterChain; + ServletResponse response; + JwtHeadersUserUtil jwtHeadersUserUtil; + User user; + User user2; + + /** + * standard configuration for testing JSON + */ + public static JwtHeadersConfiguration getBasicConfig() { + JwtHeadersConfiguration config = new JwtHeadersConfiguration(new JwtHeadersSecurityConfig()); + var jwtheadersConfiguration = config.getJwtConfiguration(); + jwtheadersConfiguration.setUserNameHeaderAttributeName("OIDC_id_token_payload"); + + jwtheadersConfiguration.setUserNameFormatChoice(JwtConfiguration.UserNameHeaderFormat.JSON); + jwtheadersConfiguration.setUserNameJsonPath("preferred_username"); + + + jwtheadersConfiguration.setRolesJsonPath("resource_access.live-key2.roles"); + jwtheadersConfiguration.setRolesHeaderName("OIDC_id_token_payload"); + jwtheadersConfiguration.setJwtHeaderRoleSource("JSON"); + + jwtheadersConfiguration.setRoleConverterString("GeonetworkAdministrator=ADMINISTRATOR"); + jwtheadersConfiguration.setOnlyExternalListedRoles(false); + + jwtheadersConfiguration.setValidateToken(false); + + jwtheadersConfiguration.setValidateTokenAgainstURL(true); + jwtheadersConfiguration.setValidateTokenAgainstURLEndpoint(""); + jwtheadersConfiguration.setValidateSubjectWithEndpoint(true); + + jwtheadersConfiguration.setValidateTokenAudience(true); + jwtheadersConfiguration.setValidateTokenAudienceClaimName(""); + jwtheadersConfiguration.setValidateTokenAudienceClaimValue(""); + + jwtheadersConfiguration.setValidateTokenSignature(true); + jwtheadersConfiguration.setValidateTokenSignatureURL(""); + + return config; + } + + /** + * standard configuration for testing JWT + */ + public static JwtHeadersConfiguration getBasicConfigJWT() { + JwtHeadersConfiguration config = new JwtHeadersConfiguration(new JwtHeadersSecurityConfig()); + var jwtheadersConfiguration = config.getJwtConfiguration(); + jwtheadersConfiguration.setUserNameHeaderAttributeName("TOKEN"); + + jwtheadersConfiguration.setUserNameFormatChoice(JwtConfiguration.UserNameHeaderFormat.JWT); + jwtheadersConfiguration.setUserNameJsonPath("preferred_username"); + + + jwtheadersConfiguration.setRolesJsonPath("resource_access.live-key2.roles"); + jwtheadersConfiguration.setRolesHeaderName("TOKEN"); + jwtheadersConfiguration.setJwtHeaderRoleSource("JWT"); + + jwtheadersConfiguration.setRoleConverterString("GeoserverAdministrator=ADMINISTRATOR"); + jwtheadersConfiguration.setOnlyExternalListedRoles(false); + + jwtheadersConfiguration.setValidateToken(false); + + jwtheadersConfiguration.setValidateTokenAgainstURL(true); + jwtheadersConfiguration.setValidateTokenAgainstURLEndpoint(""); + jwtheadersConfiguration.setValidateSubjectWithEndpoint(true); + + jwtheadersConfiguration.setValidateTokenAudience(true); + jwtheadersConfiguration.setValidateTokenAudienceClaimName(""); + jwtheadersConfiguration.setValidateTokenAudienceClaimValue(""); + + jwtheadersConfiguration.setValidateTokenSignature(true); + jwtheadersConfiguration.setValidateTokenSignatureURL(""); + + return config; + } + + @Before + public void setUp() throws Exception { + + SecurityContextHolder.getContext().setAuthentication(null); + + + config = getBasicConfig(); + + filterChain = Mockito.mock(FilterChain.class); + response = Mockito.mock(ServletResponse.class); + + jwtHeadersUserUtil = Mockito.mock(JwtHeadersUserUtil.class); + + user = new User(); + user.setUsername("testcase-user@geocat.net"); + + user2 = new User(); + user2.setUsername("testcase-user2222@geocat.net"); + } + + /** + * trivial integration test - user arrives at site with header (gets access). + */ + @Test + public void testTrivialLogin() throws ServletException, IOException { + doReturn(user) + .when(jwtHeadersUserUtil).getUser(any(), any()); + + var request = new MockHttpServletRequest(); + + request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}"); + + JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config); + filter.jwtHeadersUserUtil = jwtHeadersUserUtil; + filter = spy(filter); + + //this should login the user + filter.doFilter(request, response, filterChain); + + //this validate login + var auth = SecurityContextHolder.getContext().getAuthentication(); + Assert.assertNotNull(auth); + Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken); + var principle = (User) auth.getPrincipal(); + Assert.assertEquals(user.getUsername(), principle.getUsername()); + + //logout() should not have been called + verify(filter, never()).logout(any()); + } + + /** + * integration test - + * 1. user arrives at site with header (gets access). + * 2. user then makes request (without headers) - should get logged out (i.e. not auth + logout() called) + */ + @Test + public void testLoginLogout() throws ServletException, IOException { + doReturn(user) + .when(jwtHeadersUserUtil).getUser(any(), any()); + + var request = new MockHttpServletRequest(); + + + JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config); + filter = spy(filter); + filter.jwtHeadersUserUtil = jwtHeadersUserUtil; + + //logged in + request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}"); + + //user should be logged in + filter.doFilter(request, response, filterChain); + + //validate login + var auth = SecurityContextHolder.getContext().getAuthentication(); + Assert.assertNotNull(auth); + Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken); + var principle = (User) auth.getPrincipal(); + Assert.assertEquals(user.getUsername(), principle.getUsername()); + verify(filter, never()).logout(any()); //logout() should not have been called + + //logout + request = new MockHttpServletRequest(); + filter.doFilter(request, response, filterChain); + + //no longer an auth + auth = SecurityContextHolder.getContext().getAuthentication(); + Assert.assertNull(auth); + verify(filter).logout(any()); //logout was called + } + + /** + * integration test - + * 1. user1 arrives at site with header (gets access). + * 2. switch to user2 then makes request (with headers) + * - user1 should get logged out (i.e. not auth + logout() called) + * - user2 gets logged in + *

+ * In general, this shouldn't happen, but could happen: + * 1. logon as low-rights user + * 2. -- do stuff --- + * 3. need high privileges, so change to higher-rights user + * 4. -- do stuff --- + */ + @Test + public void testLoginDifferentLogin() throws ServletException, IOException { + doReturn(user) + .when(jwtHeadersUserUtil).getUser(any(), any()); + + var request = new MockHttpServletRequest(); + + + JwtHeadersAuthFilter filter = new JwtHeadersAuthFilter(config); + filter = spy(filter); + filter.jwtHeadersUserUtil = jwtHeadersUserUtil; + + + //logged in + request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}"); + + filter.doFilter(request, response, filterChain); + + //validate user logged in + var auth = SecurityContextHolder.getContext().getAuthentication(); + Assert.assertNotNull(auth); + Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken); + var principle = (User) auth.getPrincipal(); + Assert.assertEquals(user.getUsername(), principle.getUsername()); + verify(filter, never()).logout(any()); //logout() should not have been called + + //login new user (user2) + request = new MockHttpServletRequest(); + request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}"); + doReturn(user2) + .when(jwtHeadersUserUtil).getUser(any(), any()); + + filter.doFilter(request, response, filterChain); + + //validate that the correct user is logged in + auth = SecurityContextHolder.getContext().getAuthentication(); + Assert.assertNotNull(auth); + Assert.assertTrue(auth instanceof JwtHeadersUsernamePasswordAuthenticationToken); + principle = (User) auth.getPrincipal(); + Assert.assertEquals(user2.getUsername(), principle.getUsername()); + verify(filter).logout(any()); //logout must be called + } + +} diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java new file mode 100644 index 000000000000..82f2171ca0d7 --- /dev/null +++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersTrivialUserTest.java @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.domain.Profile; +import org.junit.Assert; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Tests that the JwtHeadersTrivialUser is working. + */ +public class JwtHeadersTrivialUserTest { + + + /** + * test #maxProfile + * Should give the highest profile in the profileGroups + */ + @Test + public void testMaxProfile() { + Map> profileGroups = new HashMap<>(); + + //no profileGroups -> JwtHeadersTrivialUser.MIN_PROFILE + var maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups); + Assert.assertEquals(JwtHeadersTrivialUser.MIN_PROFILE, maxProfile); + + + //admin -> admin + profileGroups = new HashMap<>(); + profileGroups.put(Profile.Administrator, new ArrayList<>()); + maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups); + Assert.assertEquals(Profile.Administrator, maxProfile); + + //Reviewer -> Reviewer + profileGroups = new HashMap<>(); + profileGroups.put(Profile.Reviewer, new ArrayList<>()); + maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups); + Assert.assertEquals(Profile.Reviewer, maxProfile); + + //Editor -> Editor + profileGroups = new HashMap<>(); + profileGroups.put(Profile.Editor, new ArrayList<>()); + maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups); + Assert.assertEquals(Profile.Editor, maxProfile); + + + //Editor,Reviewer -> Reviewer + profileGroups = new HashMap<>(); + profileGroups.put(Profile.Editor, new ArrayList<>()); + profileGroups.put(Profile.Reviewer, new ArrayList<>()); + maxProfile = JwtHeadersTrivialUser.getMaxProfile(profileGroups); + Assert.assertEquals(Profile.Reviewer, maxProfile); + } + + + /** + * tests that the extraction of ProfileRoles is correct + */ + @Test + public void testExtractProfileRoles() { + + //no roles -> no profileGroups + List processedRolesFromHeaders = Arrays.asList(); + var profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(0, profileGroups.size()); + + // "Administrator" -> "Administrator":[] + processedRolesFromHeaders = Arrays.asList("Administrator"); + profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(1, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + + // "g1:Reviewer" -> "Reviewer":["g1"] + processedRolesFromHeaders = Arrays.asList("g1:Reviewer"); + profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(1, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer)); + Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size()); + Assert.assertEquals("g1", profileGroups.get(Profile.Reviewer).get(0)); + + // "g1:Reviewer","g2:Reviewer" -> "Reviewer":["g1",g2] + processedRolesFromHeaders = Arrays.asList("g1:Reviewer", "g2:Reviewer"); + profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(1, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer)); + Assert.assertEquals(2, profileGroups.get(Profile.Reviewer).size()); + Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g1")); + Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g2")); + + // "g1:Reviewer","g2:Editor" -> "Reviewer":["g1"], "Editor":["g2"] + processedRolesFromHeaders = Arrays.asList("g1:Reviewer", "g2:Editor"); + profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(2, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer)); + Assert.assertTrue(profileGroups.containsKey(Profile.Editor)); + Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size()); + Assert.assertEquals(1, profileGroups.get(Profile.Editor).size()); + Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("g1")); + Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2")); + + // "Administrator","g2:Editor" -> "Administrator":[], "Editor":["g2"] + processedRolesFromHeaders = Arrays.asList("Administrator", "g2:Editor"); + profileGroups = JwtHeadersTrivialUser.extractProfileRoles(processedRolesFromHeaders); + Assert.assertEquals(2, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertTrue(profileGroups.containsKey(Profile.Editor)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + Assert.assertEquals(1, profileGroups.get(Profile.Editor).size()); + Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2")); + } + + /** + * Method #UpdateUserWithRoles relies on the above methods, so we don't test this too much + * The method just updates the user (Profile & ProfileGroups), so we test that here. + */ + @Test + public void testUpdateUserWithRoles() { + // "Administrator","g2:Editor" -> "Administrator":[], "Editor":["g2"] AND Profile=Administrator + var processedRolesFromHeaders = Arrays.asList("Administrator", "g2:Editor"); + var user = new JwtHeadersTrivialUser("testcaseUser"); + JwtHeadersTrivialUser.updateUserWithRoles(user, processedRolesFromHeaders); + + Assert.assertEquals(Profile.Administrator, user.getProfile()); + var profileGroups = user.getProfileGroups(); + Assert.assertEquals(2, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertTrue(profileGroups.containsKey(Profile.Editor)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + Assert.assertEquals(1, profileGroups.get(Profile.Editor).size()); + Assert.assertTrue(profileGroups.get(Profile.Editor).contains("g2")); + } + + + /*** + * Method #handleRoles mostly relies on methods tested above and is mostly about extracting the correct headers from the request + */ + @Test + public void testHandleRolesJson() { + var config = JwtHeadersIntegrationTest.getBasicConfig(); + var user = new JwtHeadersTrivialUser("testCaseUser"); + var request = new MockHttpServletRequest(); + request.addHeader("oidc_id_token_payload", "{\"preferred_username\":\"david.blasby2@geocat.net\",\"resource_access\":{\"live-key2\":{\"roles\":[\"GeonetworkAdministrator\",\"group1:Reviewer\"]}}}"); + + JwtHeadersTrivialUser.handleRoles(user, config.getJwtConfiguration(), request); + + Assert.assertEquals(Profile.Administrator, user.getProfile()); + var profileGroups = user.getProfileGroups(); + Assert.assertEquals(2, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertTrue(profileGroups.containsKey(Profile.Reviewer)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + Assert.assertEquals(1, profileGroups.get(Profile.Reviewer).size()); + Assert.assertTrue(profileGroups.get(Profile.Reviewer).contains("group1")); + } + + @Test + public void testHandleRolesJWT() { + var config = JwtHeadersIntegrationTest.getBasicConfigJWT(); + var user = new JwtHeadersTrivialUser("testCaseUser"); + var request = new MockHttpServletRequest(); + request.addHeader("TOKEN", JwtHeadersIntegrationTest.JWT); + + JwtHeadersTrivialUser.handleRoles(user, config.getJwtConfiguration(), request); + + Assert.assertEquals(Profile.Administrator, user.getProfile()); + var profileGroups = user.getProfileGroups(); + Assert.assertEquals(1, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + } + + /** + * this is dependent on the above methods, so this is just a quick test + */ + @Test + public void testCreate() throws Exception { + var config = JwtHeadersIntegrationTest.getBasicConfigJWT(); + var request = new MockHttpServletRequest(); + request.addHeader("TOKEN", JwtHeadersIntegrationTest.JWT); + + var user = JwtHeadersTrivialUser.create(config.getJwtConfiguration(), request); + + Assert.assertEquals("david.blasby@geocat.net", user.getUsername()); + + Assert.assertEquals(Profile.Administrator, user.getProfile()); + var profileGroups = user.getProfileGroups(); + Assert.assertEquals(1, profileGroups.size()); + Assert.assertTrue(profileGroups.containsKey(Profile.Administrator)); + Assert.assertEquals(0, profileGroups.get(Profile.Administrator).size()); + } +} diff --git a/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java new file mode 100644 index 000000000000..482ef5549f6b --- /dev/null +++ b/core/src/test/java/org/fao/geonet/kernel/security/jwtheaders/JwtHeadersUserUtilTest.java @@ -0,0 +1,365 @@ +/* + * Copyright (C) 2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.kernel.security.jwtheaders; + +import org.fao.geonet.domain.Group; +import org.fao.geonet.domain.Profile; +import org.fao.geonet.domain.User; +import org.fao.geonet.domain.UserGroup; +import org.fao.geonet.kernel.security.GeonetworkAuthenticationProvider; +import org.fao.geonet.repository.GroupRepository; +import org.fao.geonet.repository.LanguageRepository; +import org.fao.geonet.repository.UserGroupRepository; +import org.fao.geonet.repository.UserRepository; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; +import org.springframework.security.core.userdetails.UsernameNotFoundException; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * tests that JwtHeadersUserUtil works. + * + * Because JwtHeadersUserUtil uses the Group/User/UserGroup repositories, this uses a lot + * of Mockito to setup different scenarios. + * + * The main scenarios we are testing; + * 1. user is correctly saved (if new) or loaded (if existing) + * 2. user's profile and profileGroups are correctly updated (or not updated), depending on the + * filter's configuration. + */ +public class JwtHeadersUserUtilTest { + + JwtHeadersUserUtil jwtHeadersUserUtil; //spy()-ed + + @Before + public void setUp() throws Exception { + jwtHeadersUserUtil = new JwtHeadersUserUtil(); + jwtHeadersUserUtil = spy(jwtHeadersUserUtil); + + jwtHeadersUserUtil.userRepository = Mockito.mock(UserRepository.class); + jwtHeadersUserUtil.groupRepository = Mockito.mock(GroupRepository.class); + jwtHeadersUserUtil.userGroupRepository = Mockito.mock(UserGroupRepository.class); + jwtHeadersUserUtil.authProvider = Mockito.mock(GeonetworkAuthenticationProvider.class); + jwtHeadersUserUtil.languageRepository = Mockito.mock(LanguageRepository.class); + } + + + /** + * we have the config setup so it doesn't get any write access from the database + * + no user in DB + * + new user created + */ + @Test + public void testSimplestCase() { + doThrow(new UsernameNotFoundException("")) + .when(jwtHeadersUserUtil.authProvider).loadUserByUsername(any()); + + JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig(); + basicConfig.setUpdateGroup(false); + basicConfig.setUpdateProfile(false); + + var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com"); + trivialUser = spy(trivialUser); + + User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig); + + Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername()); + Assert.assertEquals("testcaseUser", userDetails.getName()); + + //verify helper methods called + verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + verify(jwtHeadersUserUtil).createUser(trivialUser, basicConfig); + + // these shouldn't ever be looked at + verify(jwtHeadersUserUtil, never()).updateGroups(any(), any()); + verify(trivialUser, never()).getProfile(); + verify(trivialUser, never()).getProfileGroups(); + + //db should not have been saved to + verify(jwtHeadersUserUtil.groupRepository, never()).save(any()); + verify(jwtHeadersUserUtil.userGroupRepository, never()).save(any()); + verify(jwtHeadersUserUtil.languageRepository, never()).save(any()); + + //user was saved + verify(jwtHeadersUserUtil.userRepository).save(userDetails); + } + + /** + * we have the config setup so it doesn't get any write access from the database + * + user IS in DB + */ + @Test + public void testSimplestCaseAlreadyExists() { + User user = new User(); + user.setUsername("testcaseUser@example.com"); + user.setName("testcaseUser"); + user.setId(666); + + doReturn(user) + .when(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + + JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig(); + basicConfig.setUpdateGroup(false); + basicConfig.setUpdateProfile(false); + + var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com"); + trivialUser = spy(trivialUser); + + User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig); + + Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername()); + Assert.assertEquals("testcaseUser", userDetails.getName()); + + //verify helper methods called + verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + + + // these shouldn't ever be looked at + verify(jwtHeadersUserUtil, never()).createUser(trivialUser, basicConfig); + verify(jwtHeadersUserUtil, never()).updateGroups(any(), any()); + verify(trivialUser, never()).getProfile(); + verify(trivialUser, never()).getProfileGroups(); + + //db should not have been saved to + verify(jwtHeadersUserUtil.groupRepository, never()).save(any()); + verify(jwtHeadersUserUtil.userGroupRepository, never()).save(any()); + verify(jwtHeadersUserUtil.languageRepository, never()).save(any()); + + //user wasn't saved (no modification) + verify(jwtHeadersUserUtil.userRepository, never()).save(userDetails); + } + + + /** + * we have the config setup so it writes user data to DB + * + no user in DB + * + new user created + * + validate that profile is set + * + validate that user groups (in db) are updated + */ + @Test + public void testNewUserWithGroups() { + doThrow(new UsernameNotFoundException("")) + .when(jwtHeadersUserUtil.authProvider).loadUserByUsername(any()); + + //make sure that the group ID is set when saved. GN uses the ID in Set<> operations, so we must SET it. + when(jwtHeadersUserUtil.groupRepository.save(any())).thenAnswer(new Answer() { + @Override + public Group answer(InvocationOnMock invocation) throws Throwable { + ((Group) invocation.getArguments()[0]).setId(new Random().nextInt()); + return ((Group) invocation.getArguments()[0]); + } + }); + + JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig(); + basicConfig.setUpdateGroup(true); + basicConfig.setUpdateProfile(true); + + var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com"); + trivialUser.setProfile(Profile.Administrator); + + Map> profileGroups = new HashMap<>(); + profileGroups.put(Profile.Reviewer, Arrays.asList("group1", "group2")); + trivialUser.setProfileGroups(profileGroups); + + trivialUser = spy(trivialUser); + + User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig); + + Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername()); + Assert.assertEquals("testcaseUser", userDetails.getName()); + + //verify helper methods called + verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + verify(jwtHeadersUserUtil).createUser(trivialUser, basicConfig); + + //user should be saved with the Profile (admin) + verify(jwtHeadersUserUtil.userRepository).save(userDetails); //user was saved + Assert.assertEquals(Profile.Administrator, userDetails.getProfile()); + + + //update groups method was called + verify(jwtHeadersUserUtil).updateGroups(profileGroups, userDetails); + + //group1 and group2 saved to db + //attempted to find them in DB + verify(jwtHeadersUserUtil.groupRepository).findByName("group1"); + verify(jwtHeadersUserUtil.groupRepository).findByName("group2"); + + //saved + ArgumentCaptor groupsCaptor = ArgumentCaptor.forClass(Group.class); + verify(jwtHeadersUserUtil.groupRepository, times(2)).save(groupsCaptor.capture()); + + Assert.assertEquals("group1", groupsCaptor.getAllValues().get(0).getName()); + Assert.assertEquals("group2", groupsCaptor.getAllValues().get(1).getName()); + + + //user connected to group and role + ArgumentCaptor setUserGroupCaptor = ArgumentCaptor.forClass(Set.class); + + verify(jwtHeadersUserUtil.userGroupRepository).updateUserGroups(eq(userDetails.getId()), setUserGroupCaptor.capture()); + Assert.assertEquals(1, setUserGroupCaptor.getAllValues().size()); + List userGroups = (List) setUserGroupCaptor.getAllValues().get(0).stream().collect(Collectors.toList()); + Collections.sort(userGroups, + (o1, o2) -> ((o1).getGroup().getName() + "-" + o1.getProfile()).compareTo((o2).getGroup().getName() + "-" + o2.getProfile())); + Assert.assertEquals(4, userGroups.size()); + + Assert.assertEquals(Profile.Editor, userGroups.get(0).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(0).getUser()); + Assert.assertEquals("group1", userGroups.get(0).getGroup().getName()); + + Assert.assertEquals(Profile.Reviewer, userGroups.get(1).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(1).getUser()); + Assert.assertEquals("group1", userGroups.get(1).getGroup().getName()); + + Assert.assertEquals(Profile.Editor, userGroups.get(2).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(2).getUser()); + Assert.assertEquals("group2", userGroups.get(2).getGroup().getName()); + + Assert.assertEquals(Profile.Reviewer, userGroups.get(3).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(3).getUser()); + Assert.assertEquals("group2", userGroups.get(3).getGroup().getName()); + } + + + /** + * we have the config setup so it writes user data to DB + * + user IS in DB + * + validate that profile is set + * + validate that user groups (in db) are updated + */ + @Test + public void testOldUserWithGroups() { + User user = new User(); + user.setUsername("testcaseUser@example.com"); + user.setName("testcaseUser"); + user.setId(666); + + doReturn(user) + .when(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + + + //make sure that the group ID is set when saved. GN uses the ID in Set<> operations, so we must SET it. + when(jwtHeadersUserUtil.groupRepository.save(any())).thenAnswer(new Answer() { + @Override + public Group answer(InvocationOnMock invocation) throws Throwable { + ((Group) invocation.getArguments()[0]).setId(new Random().nextInt()); + return ((Group) invocation.getArguments()[0]); + } + }); + + JwtHeadersConfiguration basicConfig = JwtHeadersIntegrationTest.getBasicConfig(); + basicConfig.setUpdateGroup(true); + basicConfig.setUpdateProfile(true); + + var trivialUser = new JwtHeadersTrivialUser("testcaseUser@example.com"); + trivialUser.setProfile(Profile.Administrator); + + Map> profileGroups = new HashMap<>(); + profileGroups.put(Profile.Reviewer, Arrays.asList("group1", "group2")); + trivialUser.setProfileGroups(profileGroups); + + trivialUser = spy(trivialUser); + + User userDetails = (User) jwtHeadersUserUtil.getUser(trivialUser, basicConfig); + + Assert.assertEquals("testcaseUser@example.com", userDetails.getUsername()); + Assert.assertEquals("testcaseUser", userDetails.getName()); + + //verify helper methods called + verify(jwtHeadersUserUtil.authProvider).loadUserByUsername("testcaseUser@example.com"); + verify(jwtHeadersUserUtil, never()).createUser(trivialUser, basicConfig); + + //user should be saved with the Profile (admin) + verify(jwtHeadersUserUtil.userRepository).save(userDetails); //user was saved + Assert.assertEquals(Profile.Administrator, userDetails.getProfile()); + + + //update groups method was called + verify(jwtHeadersUserUtil).updateGroups(profileGroups, userDetails); + + //group1 and group2 saved to db + //attempted to find them in DB + verify(jwtHeadersUserUtil.groupRepository).findByName("group1"); + verify(jwtHeadersUserUtil.groupRepository).findByName("group2"); + + //saved + ArgumentCaptor groupsCaptor = ArgumentCaptor.forClass(Group.class); + verify(jwtHeadersUserUtil.groupRepository, times(2)).save(groupsCaptor.capture()); + + Assert.assertEquals("group1", groupsCaptor.getAllValues().get(0).getName()); + Assert.assertEquals("group2", groupsCaptor.getAllValues().get(1).getName()); + + //user connected to group and role + ArgumentCaptor setUserGroupCaptor = ArgumentCaptor.forClass(Set.class); + + verify(jwtHeadersUserUtil.userGroupRepository).updateUserGroups(eq(userDetails.getId()), setUserGroupCaptor.capture()); + Assert.assertEquals(1, setUserGroupCaptor.getAllValues().size()); + List userGroups = (List) setUserGroupCaptor.getAllValues().get(0).stream().collect(Collectors.toList()); + Collections.sort(userGroups, + (o1, o2) -> ((o1).getGroup().getName() + "-" + o1.getProfile()).compareTo((o2).getGroup().getName() + "-" + o2.getProfile())); + Assert.assertEquals(4, userGroups.size()); + + Assert.assertEquals(Profile.Editor, userGroups.get(0).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(0).getUser()); + Assert.assertEquals("group1", userGroups.get(0).getGroup().getName()); + + Assert.assertEquals(Profile.Reviewer, userGroups.get(1).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(1).getUser()); + Assert.assertEquals("group1", userGroups.get(1).getGroup().getName()); + + Assert.assertEquals(Profile.Editor, userGroups.get(2).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(2).getUser()); + Assert.assertEquals("group2", userGroups.get(2).getGroup().getName()); + + Assert.assertEquals(Profile.Reviewer, userGroups.get(3).getProfile()); + Assert.assertEquals(userDetails, userGroups.get(3).getUser()); + Assert.assertEquals("group2", userGroups.get(3).getGroup().getName()); + } + + +} diff --git a/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/OIDCRoleProcessorTest.java b/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/OIDCRoleProcessorTest.java index c20a2f89aa8e..d02d02d3440d 100644 --- a/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/OIDCRoleProcessorTest.java +++ b/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/OIDCRoleProcessorTest.java @@ -44,32 +44,6 @@ */ public class OIDCRoleProcessorTest { -// @Test -// public void t1() throws Exception { -// String a = "oaLLT9hkcSj2tGfZsjbu7Xz1Krs0qEicXPmEsJKOBQHauZ_kRM1HdEkgOJbUznUspE6xOuOSXjlzErqBxXAu4SCvcvVOCYG2v9G3-uIrLF5dstD0sYHBo1VomtKxzF90Vslrkn6rNQgUGIWgvuQTxm1uRklYFPEcTIRw0LnYknzJ06GC9ljKR617wABVrZNkBuDgQKj37qcyxoaxIGdxEcmVFZXJyrxDgdXh9owRmZn6LIJlGjZ9m59emfuwnBnsIQG7DirJwe9SXrLXnexRQWqyzCdkYaOqkpKrsjuxUj2-MHX31FqsdpJJsOAvYXGOYBKJRjhGrGdONVrZdUdTBQ"; -// String b = "sfsXMXWuO-dniLaIELa3Pyqz9Y_rWff_AVrCAnFSdPHa8__Pmkbt_yq-6Z3u1o4gjRpKWnrjxIh8zDn1Z1RS26nkKcNg5xfWxR2K8CPbSbY8gMrp_4pZn7tgrEmoLMkwfgYaVC-4MiFEo1P2gd9mCdgIICaNeYkG1bIPTnaqquTM5KfT971MpuOVOdM1ysiejdcNDvEb7v284PYZkw2imwqiBY3FR0sVG7jgKUotFvhd7TR5WsA20GS_6ZIkUUlLUbG_rXWGl0YjZLS_Uf4q8Hbo7u-7MaFn8B69F6YaFdDlXm_A0SpedVFWQFGzMsp43_6vEzjfrFDJVAYkwb6xUQ"; -// String c = "yr3v1uETrFfT17zvOiy01w8nO-1t67cmiZLZxq2ISDdte9dw-IxCR7lPV2wezczIRgcWmYgFnsk2j6m10H4tKzcqZM0JJ_NigY29pFimxlL7_qXMB1PorFJdlAKvp5SgjSTwLrXjkr1AqWwbpzG2yZUNN3GE8GvmTeo4yweQbNCd-yO_Zpozx0J34wHBEMuaw-ZfCUk7mdKKsg-EcE4Zv0Xgl9wP2MpKPx0V8gLazxe6UQ9ShzNuruSOncpLYJN_oQ4aKf5ptOp1rsfDY2IK9frtmRTKOdQ-MEmSdjGL_88IQcvCs7jqVz53XKoXRlXB8tMIGOcg-ICer6yxe2itIQ"; -// String d = "spvQcXWqYrMcvcqQmfSMYnbUC8U03YctnXyLIBe148OzhBrgdAOmPfMfJi_tUW8L9svVGpk5qG6dN0n669cRHKqU52GnG0tlyYXmzFC1hzHVgQz9ehve4tlJ7uw936XIUOAOxx3X20zdpx7gm4zHx4j2ZBlXskAj6U3adpHQNuwUE6kmngJWR-deWlEigMpRsvUVQ2O5h0-RSq8Wr_x7ud3K6GTtrzARamz9uk2IXatKYdnj5Jrk2jLY6nWt-GtxlA_l9XwIrOl6Sqa_pOGIpS01JKdxKvpBC9VdS8oXB-7P5qLksmv7tq-SbbiOec0cvU7WP7vURv104V4FiI_qoQ"; -// String e = "wEMMJtj9yMQd8QS6Vnm538K5GN1Pr_I31_LUl9-OCYu-9_DrDvPGjViQK9kOiCjBfyqoAL-pBecn9-XXaS-C4xZTn1ZRw--GELabuo0u-U6r3TKj42xFDEP-_R5RpOGshoC95lrKiU5teuhn4fBM3XfR2GB0dVMcpzN3h4-0OMvBK__Zr9tkQCU_KzXTbNCjyA7ybtbr83NF9k3KjpTyOyY2S-qvFbY-AoqMhL9Rp8r2HBj_vrsr6RX6GeiSxxjbEzDFA2VIcSKbSHvbNBEeW2KjLXkz6QG2LjKz5XsYLp6kv_-k9lPQBy_V7Ci4ZkhAN-6j1S1Kcq58aLbp0wDNKQ"; -// String f = "1n7-nWSLeuWQzBRlYSbS8RjvWvkQeD7QL9fOWaGXbW73VNGH0YipZisPClFv6GzwfWECTWQp19WFe_lASka5-KEWkQVzCbEMaaafOIs7hC61P5cGgw7dhuW4s7f6ZYGZEzQ4F5rHE-YNRbvD51qirPNzKHk3nji1wrh0YtbPPIf--NbI98bCwLLh9avedOmqESzWOGECEMXv8LSM-B9SKg_4QuBtyBwwIakTuqo84swTBM5w8PdhpWZZDtPgH87Wz-_WjWvk99AjXl7l8pWPQJiKNujt_ck3NDFpzaLEppodhUsID0ptRA008eCU6l8T-ux19wZmb_yBnHcV3pFWhQ"; -// String g = "01re9a2BUTtNtdFzLNI-QEHW8XhDiDMDbGMkxHRIYXH41zBccsXwH9vMi0HuxXHpXOzwtUYKwl93ZR37tp6lpvwlU1HePNmZpJ9D-XAvU73x03YKoZEdaFB39VsVyLih3fuPv6DPE2qT-TNE3X5YdIWOGFrcMkcXLsjO-BCq4qcSdBH2lBgEQUuD6nqreLZsg-gPzSDhjVScIUZGiD8M2sKxADiIHo5KlaZIyu32t8JkavP9jM7ItSAjzig1W2yvVQzUQZA-xZqJo2jxB3g_fygdPUHK6UN-_cqkrfxn2-VWH1wMhlm90SpxTMD4HoYOViz1ggH8GCX2aBiX5OzQ6Q"; -// -// String t = "eyJ0eXAiOiJKV1QiLCJub25jZSI6ImM4bDlGQm9KYkhoM2RkRmp1dkpqYWZCeUxFT1VwYmNOeFJEb21oeUVkRDAiLCJhbGciOiJSUzI1NiIsIng1dCI6ImpTMVhvMU9XRGpfNTJ2YndHTmd2UU8yVnpNYyIsImtpZCI6ImpTMVhvMU9XRGpfNTJ2YndHTmd2UU8yVnpNYyJ9.eyJhdWQiOiIwMDAwMDAwMy0wMDAwLTAwMDAtYzAwMC0wMDAwMDAwMDAwMDAiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC84N2Y5MTQ5NC1jMGRjLTQ5M2UtODNjMy05MjI2YzExMTg1MGEvIiwiaWF0IjoxNjU1ODMyNzcyLCJuYmYiOjE2NTU4MzI3NzIsImV4cCI6MTY1NTgzNzE3MCwiYWNjdCI6MCwiYWNyIjoiMSIsImFpbyI6IkFWUUFxLzhUQUFBQStPZEhCUnIxbytUYWNtUkFySXhDSFJFMWxXOFhpRUMwRG5MTE1ZV3VPb3duOUhjWUlFVmRycEJ5T3B4ZUVFMHVEMWlnc3ZmNXZXTHhFY1JScVdWaVBOa1ZWaFhBNGZwQkZJWE5kOUxTSTN3PSIsImFtciI6WyJwd2QiLCJtZmEiXSwiYXBwX2Rpc3BsYXluYW1lIjoiZm9ya2V5Y2xvYWsiLCJhcHBpZCI6ImI5ZThkMDVhLTA4YjYtNDhhNS04MWM4LTk1OTBhMGY1NTBmMyIsImFwcGlkYWNyIjoiMSIsImZhbWlseV9uYW1lIjoiYmxhc2J5IiwiZ2l2ZW5fbmFtZSI6ImRhdmlkIiwiaWR0eXAiOiJ1c2VyIiwiaXBhZGRyIjoiOTYuNTQuODguNjkiLCJuYW1lIjoiZGF2aWQgYmxhc2J5Iiwib2lkIjoiNmFjNjgyYjYtNjA0OC00ZWI2LWI0Y2EtMjUzOGUzM2NjMDlhIiwicGxhdGYiOiI1IiwicHVpZCI6IjEwMDMyMDAxQTUxMUU2QzEiLCJyaCI6IjAuQVY4QWxCVDVoOXpBUGttRHc1SW13UkdGQ2dNQUFBQUFBQUFBd0FBQUFBQUFBQUJmQUhZLiIsInNjcCI6Ikdyb3VwTWVtYmVyLlJlYWQuQWxsIG9wZW5pZCBVc2VyLlJlYWQgcHJvZmlsZSBlbWFpbCIsInNpZ25pbl9zdGF0ZSI6WyJrbXNpIl0sInN1YiI6Im40N3NkYUJDU3FHNnF4NjRISnBvcWpvbHZkV3N5cW5tUmhCLWYydEJfN0EiLCJ0ZW5hbnRfcmVnaW9uX3Njb3BlIjoiRVUiLCJ0aWQiOiI4N2Y5MTQ5NC1jMGRjLTQ5M2UtODNjMy05MjI2YzExMTg1MGEiLCJ1bmlxdWVfbmFtZSI6ImRhdmlkLmJsYXNieUBnZW9jYXQubmV0IiwidXBuIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQiLCJ1dGkiOiJsZ21FaEstdmUwT1Rzc25UOWZOaUFBIiwidmVyIjoiMS4wIiwid2lkcyI6WyI2MmU5MDM5NC02OWY1LTQyMzctOTE5MC0wMTIxNzcxNDVlMTAiLCJiNzlmYmY0ZC0zZWY5LTQ2ODktODE0My03NmIxOTRlODU1MDkiXSwieG1zX3N0Ijp7InN1YiI6IjNvenJ4UUVRd3pwNXIwc1J0WjhOUGtZdkoxeGQyVy1EOGc4eXZjbV9FSW8ifSwieG1zX3RjZHQiOjE1ODczNzQxNjF9.Y2G-Sgg1HAYLrZbIpVfbZ8hTJvHUFL3TJ1ZkWL_1t8NT3QdgVoQekl2OqcnHaRWJK8-0mX-hTgmkBCJ2hadp62w0DmmHr2U_5iCH7nrG4qjp5SEtsn-Q_XQU5HxJ787Aog98CXjr4I0tWDhXBEn8uGIoWIXv7b4uHNIfFa3R7LuMj74WMb9uJu1fGrN5GTM88Gd1riA6sjDpGlK145lou2xtJ54AmVc3e6OSliLXTdglX2-zoCNOkGroc8kJ3BBoaT0YcH6HXhHVNOoa_8WRAxw7L0Fio3l1gpnqTWFD7Tskoden4QEEFNkZM9ISsU3ZP0KvrrEpHDTbsGutvlu5HA"; -//// JwtDecoderFactory factory = new JwtDecoderFactory(); -//// JwtDecoder decoder = factory.createJwtDecoder(g,"AQAB"); -//// decoder.decode(t); -// -//// JwtDecoder decoder2 = NimbusJwtDecoder -//// .withJwkSetUri("https://login.microsoftonline.com/87f91494-c0dc-493e-83c3-9226c111850a/discovery/keys") -//// .build(); -//// -//// decoder2.decode(t); -// -// JWT jj = JWTParser.parse(t); -// ((SignedJWT jj = JWTParser.parse(t); -// ((SignedJWT) jj).getPayload().toJSONObject()JWT) jj).getPayload().toJSONObject() -// -// } //creates the OIDCRoleProcessor public OIDCRoleProcessor getOIDCRoleProcessor() { diff --git a/core/src/test/resources/WEB-INF/config.properties b/core/src/test/resources/WEB-INF/config.properties index 4bb53114f981..e59ce7556608 100644 --- a/core/src/test/resources/WEB-INF/config.properties +++ b/core/src/test/resources/WEB-INF/config.properties @@ -20,5 +20,7 @@ es.index.checker.interval=0/5 * * * * ? thesaurus.cache.maxsize=400000 +api.params.maxUploadSize=100000000 + language.default=eng language.forceDefault=false diff --git a/core/src/test/resources/org/fao/geonet/api/Messages.properties b/core/src/test/resources/org/fao/geonet/api/Messages.properties index 33b48c12df88..8c09a2d1398a 100644 --- a/core/src/test/resources/org/fao/geonet/api/Messages.properties +++ b/core/src/test/resources/org/fao/geonet/api/Messages.properties @@ -1,5 +1,5 @@ # -# Copyright (C) 2001-2016 Food and Agriculture Organization of the +# Copyright (C) 2001-2024 Food and Agriculture Organization of the # United Nations (FAO-UN), United Nations World Food Programme (WFP) # and United Nations Environment Programme (UNEP) # @@ -20,7 +20,6 @@ # Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, # Rome - Italy. email: geonetwork@osgeo.org # - mail_error=Failed to send email. mail_config_test_subject=%s / Test / Mail configuration mail_config_test_message=Test message from %s\n\ @@ -54,8 +53,7 @@ user_password_changed='%s' password was updated. user_password_notchanged=A problem occurred trying to change '%s' password. Contact the helpdesk. user_password_invalid_changekey='%s' is an invalid change key for '%s'. Change keys are only valid for one day. user_registered=User '%s' registered. -user_with_that_email_found=A user with this email or username already exists. -user_with_that_username_found=A user with this email or username already exists. +user_with_that_email_username_found=A user with this email or username already exists. register_email_admin_subject=%s / New account for %s as %s register_email_admin_message=Dear Admin,\n\ Newly registered user %s has requested %s access for %s.\n\ @@ -177,6 +175,9 @@ api.exception.resourceAlreadyExists=Resource already exists api.exception.resourceAlreadyExists.description=Resource already exists. api.exception.unsatisfiedRequestParameter=Unsatisfied request parameter api.exception.unsatisfiedRequestParameter.description=Unsatisfied request parameter. +exception.maxUploadSizeExceeded=Maximum upload size of {0} exceeded. +exception.maxUploadSizeExceeded.description=The request was rejected because its size ({0}) exceeds the configured maximum ({1}). +exception.maxUploadSizeExceededUnknownSize.description=The request was rejected because its size exceeds the configured maximum ({0}). exception.resourceNotFound.metadata=Metadata not found exception.resourceNotFound.metadata.description=Metadata with UUID ''{0}'' not found. exception.resourceNotFound.resource=Metadata resource ''{0}'' not found @@ -211,6 +212,10 @@ exception.doi.serverErrorDelete=Error deleting DOI exception.doi.serverErrorDelete.description=Error deleting DOI: {0} exception.doi.serverErrorUnregister=Error unregistering DOI exception.doi.serverErrorUnregister.description=Error unregistering DOI: {0} +exception.doi.serverCanNotHandleRecord=DOI server can not handle the metadata +exception.doi.serverCanNotHandleRecord.description=DOI server ''{0}'' can not handle the metadata with UUID ''{1}'' +exception.doi.configurationMissing=DOI server configuration is not complete +exception.doi.configurationMissing.description=DOI server configuration is not complete. Check the DOI server configuration to complete it exception.doi.notSupportedOperationError=Operation not supported exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Metadata imported with ID '%s' @@ -241,3 +246,9 @@ api.metadata.status.errorGetStatusNotAllowed=Only the owner of the metadata can api.metadata.status.errorSetStatusNotAllowed=Only the owner of the metadata can set the status of this record. User is not the owner of the metadata. feedback_subject_userFeedback=User feedback + +audit.revision=Updated by %s on %s:\n\ +%s +audit.revision.field.set=- Field '%s' set to '%s' +audit.revision.field.unset=- Field '%s' unset +audit.revision.field.updated=- Field '%s' changed from '%s' to '%s' diff --git a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties index 178db7cba12f..db0fc9f14e16 100644 --- a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties +++ b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties @@ -20,7 +20,6 @@ # Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, # Rome - Italy. email: geonetwork@osgeo.org # - mail_error=Erreur lors de l'envoi du mail. mail_config_test_subject=%s / Test / Configuration serveur de mail mail_config_test_message=Message de test de %s\n\ @@ -53,8 +52,8 @@ user_password_sent=Si l''utilisateur existe, vous recevrez un courriel contenant user_password_changed=Le mot de passe de %s a \u00E9t\u00E9 mis \u00E0 jour. user_password_notchanged=\u00C9chec lors du changement de mot de passe de %s. Contactez le support. user_password_invalid_changekey=%s est une cl\u00E9 invalide pour %s. Les cl\u00E9s ne sont valides que pendant une journ\u00E9e. -user_with_that_email_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. -user_with_that_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. +user_registered=Utilisateur '%s' enregistr\u00E9. +user_with_that_email_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. register_email_admin_subject=%s / Cr\u00E9ation de compte pour %s en tant que %s register_email_admin_message=Cher administrateur,\n\ L'utilisateur %s vient de demander une cr\u00E9ation de compte pour %s.\n\ @@ -171,6 +170,9 @@ api.exception.resourceAlreadyExists=La ressource existe d\u00E9j\u00E0 api.exception.resourceAlreadyExists.description=La ressource existe d\u00E9j\u00E0. api.exception.unsatisfiedRequestParameter=Param\u00E8tre de demande non satisfait api.exception.unsatisfiedRequestParameter.description=Param\u00E8tre de demande non satisfait. +exception.maxUploadSizeExceeded=La taille maximale du t\u00E9l\u00E9chargement de {0} a \u00E9t\u00E9 exc\u00E9d\u00E9e. +exception.maxUploadSizeExceeded.description=La demande a \u00E9t\u00E9 refus\u00E9e car sa taille ({0}) exc\u00E8de le maximum configur\u00E9 ({1}). +exception.maxUploadSizeExceededUnknownSize.description=La demande a \u00E9t\u00E9 refus\u00E9e car sa taille exc\u00E8de le maximum configur\u00E9 ({0}). exception.resourceNotFound.metadata=Fiches introuvables exception.resourceNotFound.metadata.description=La fiche ''{0}'' est introuvable. exception.resourceNotFound.resource=Ressource ''{0}'' introuvable @@ -203,6 +205,10 @@ exception.doi.serverErrorDelete=Erreur lors de la suppression du DOI exception.doi.serverErrorDelete.description=Erreur lors de la suppression du DOI : {0} exception.doi.serverErrorUnregister=Erreur lors de la d\u00E9sinscription du DOI exception.doi.serverErrorUnregister.description=Erreur lors de la d\u00E9sinscription du DOI {0} +exception.doi.serverCanNotHandleRecord=DOI server can not handle the metadata +exception.doi.serverCanNotHandleRecord.description=DOI server ''{0}'' can not handle the metadata with UUID ''{1}'' +exception.doi.configurationMissing=DOI server configuration is not complete +exception.doi.configurationMissing.description=DOI server configuration is not complete. Check the DOI server configuration to complete it exception.doi.notSupportedOperationError=Op\u00E9ration non prise en charge exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Fiche import\u00E9e avec l'ID '%s' @@ -233,3 +239,9 @@ api.metadata.status.errorGetStatusNotAllowed=Seul le propri\u00E9taire des m\u00 api.metadata.status.errorSetStatusNotAllowed=Seul le propri\u00E9taire des m\u00E9tadonn\u00E9es peut d\u00E9finir le statut de cet enregistrement. L'utilisateur n'est pas le propri\u00E9taire des m\u00E9tadonn\u00E9es feedback_subject_userFeedback=Commentaire de l'utilisateur + +audit.revision=Mise \u00E0 jour par %s le %s:\n\ +%s +audit.revision.field.set=- Champ '%s' d\u00E9fini \u00E0 '%s' +audit.revision.field.unset=- Champ '%s' d\u00E9sactiv\u00E9 +audit.revision.field.updated=- Champ '%s' modifi\u00E9 de '%s' \u00E0 '%s' diff --git a/core/src/test/resources/org/fao/geonet/kernel/security/ldap/LDAPUserDetailsContextMapperWithProfileSearchEnhancedTest-context.xml b/core/src/test/resources/org/fao/geonet/kernel/security/ldap/LDAPUserDetailsContextMapperWithProfileSearchEnhancedTest-context.xml index bd68693764a6..7ab3055d502b 100644 --- a/core/src/test/resources/org/fao/geonet/kernel/security/ldap/LDAPUserDetailsContextMapperWithProfileSearchEnhancedTest-context.xml +++ b/core/src/test/resources/org/fao/geonet/kernel/security/ldap/LDAPUserDetailsContextMapperWithProfileSearchEnhancedTest-context.xml @@ -138,7 +138,8 @@ + transaction-manager-ref="transactionManager" + factory-class="org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean"/> @@ -211,5 +212,8 @@ + + + diff --git a/csw-server/pom.xml b/csw-server/pom.xml index 4f625facbe75..f6a27aadb3c5 100644 --- a/csw-server/pom.xml +++ b/csw-server/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java b/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java index 50f0a4202001..72f0dc19c439 100644 --- a/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java +++ b/csw-server/src/main/java/org/fao/geonet/component/csw/GetCapabilities.java @@ -65,6 +65,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static org.fao.geonet.kernel.setting.SettingManager.isPortRequired; @@ -529,6 +530,8 @@ private void setOperationsParameters(Element capabilities) { */ private void populateTypeNameAndOutputSchema(Element op) { Map typenames = _schemaManager.getHmSchemasTypenames(); + List outputSchemas = _schemaManager.getOutputSchemas().values().stream().sorted().collect(Collectors.toList()); + List operations = op.getChildren("Parameter", Csw.NAMESPACE_OWS); for (Element operation : operations) { if ("typeNames".equals(operation.getAttributeValue("name"))) { @@ -541,12 +544,10 @@ private void populateTypeNameAndOutputSchema(Element op) { .setText(typename)); } } else if ("outputSchema".equals(operation.getAttributeValue("name"))) { - for (Map.Entry entry : typenames.entrySet()) { - Namespace ns = entry.getValue(); - operation.addNamespaceDeclaration(ns); + outputSchemas.forEach(uri -> operation.addContent(new Element("Value", Csw.NAMESPACE_OWS) - .setText(ns.getURI())); - } + .setText(uri)) + ); } } } diff --git a/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java b/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java index 9b156b715410..c6d65519c47b 100644 --- a/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java +++ b/csw-server/src/main/java/org/fao/geonet/csw/common/OutputSchema.java @@ -78,16 +78,16 @@ public static String parse(String schema, SchemaManager schemaManager) throws In if (schema.equals("csw:IsoRecord")) return "gmd"; if (schema.equals("own")) return "own"; - Map typenames = schemaManager.getHmSchemasTypenames(); - for (Map.Entry entry : typenames.entrySet()) { - Namespace ns = entry.getValue(); - if (schema.equals(ns.getURI())) { - return ns.getPrefix(); + Map typenames = schemaManager.getOutputSchemas(); + for (Map.Entry entry : typenames.entrySet()) { + String ns = entry.getValue(); + if (schema.equals(ns)) { + return entry.getKey(); } } throw new InvalidParameterValueEx("outputSchema", - String.format("'%s' schema is not valid. Supported values are %s", + String.format("'%s' output schema is not valid. Supported values are %s", schema, schemaManager.getListOfOutputSchemaURI().toString())); } diff --git a/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java b/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java index 51bdeffe7937..c5ab2c8053a3 100644 --- a/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java +++ b/csw-server/src/main/java/org/fao/geonet/csw/common/util/Xml.java @@ -125,22 +125,24 @@ public static Element applyElementSetName(ServiceContext context, SchemaManager ResultType resultType, String id, String displayLanguage) throws InvalidParameterValueEx { Path schemaDir = schemaManager.getSchemaCSWPresentDir(schema); Path styleSheet = schemaDir.resolve(outputSchema + "-" + elementSetName + ".xsl"); + Path styleSheetWithoutElementSet = schemaDir.resolve(outputSchema + ".xsl"); - if (!Files.exists(styleSheet)) { + if (!Files.exists(styleSheet) && !Files.exists(styleSheetWithoutElementSet)) { throw new InvalidParameterValueEx("OutputSchema", String.format( - "OutputSchema '%s' not supported for metadata with '%s' (%s).\nCorresponding XSL transformation '%s' does not exist for this schema.\nThe record will not be returned in response.", - outputSchema, id, schema, styleSheet.getFileName())); + "OutputSchema '%s' not supported for metadata with '%s' (%s).\nCorresponding XSL transformation '%s' (or '%s') does not exist for this schema.\nThe record will not be returned in response.", + outputSchema, id, schema, styleSheet.getFileName(), styleSheetWithoutElementSet.getFileName())); } else { Map params = new HashMap<>(); params.put("lang", displayLanguage); + Path xslFile = Files.exists(styleSheet) ? styleSheet : styleSheetWithoutElementSet; try { - result = org.fao.geonet.utils.Xml.transform(result, styleSheet, params); + result = org.fao.geonet.utils.Xml.transform(result, xslFile, params); } catch (Exception e) { String msg = String.format( - "Error occured while transforming metadata with id '%s' using '%s'.", - id, styleSheet.getFileName()); + "Error occurred while transforming metadata with id '%s' using '%s'.", + id, xslFile.getFileName()); context.error(msg); context.error(" (C) StackTrace:\n" + Util.getStackTrace(e)); throw new InvalidParameterValueEx("OutputSchema", msg); diff --git a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java index 564280b4ce2f..7abea41d66ba 100644 --- a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java +++ b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java @@ -111,37 +111,9 @@ public Element retrieveMetadata(ServiceContext context, String id, ElementSetNam boolean checkMetadataAvailableInPortal) throws CatalogException { if (checkMetadataAvailableInPortal) { - // Check if the metadata is available in the portal - String elasticSearchQuery = "{ \"bool\": {\n" + - " \"must\": [\n" + - " {" + - " \"term\": {" + - " \"id\": {" + - " \"value\": \"%s\"" + - " }" + - " }" + - " } " + - " ]\n" + - " ,\"filter\":{\"query_string\":{\"query\":\"%s\"}}}}"; - - JsonNode esJsonQuery; - - try { - String filterQueryString = esFilterBuilder.build(context, "metadata", false, node); - String jsonQuery = String.format(elasticSearchQuery, id, filterQueryString); - - ObjectMapper objectMapper = new ObjectMapper(); - esJsonQuery = objectMapper.readTree(jsonQuery); - - TotalHits total = searchManager.query(esJsonQuery, new HashSet<>(), 0, 0).hits().total(); - - if (Optional.ofNullable(total).map(TotalHits::value).orElse(0L) == 0) { - return null; - } - } catch (Exception e) { - throw new RuntimeException(e); + if (!metadataUtils.isMetadataAvailableInPortal(Integer.parseInt(id))) { + return null; } - } try { diff --git a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java index 2122a8c4a10d..d77bfd28818b 100644 --- a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java +++ b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2Es.java @@ -29,6 +29,7 @@ import org.apache.commons.text.StringEscapeUtils; import org.fao.geonet.constants.Geonet; import org.fao.geonet.kernel.csw.services.getrecords.IFieldMapper; +import org.fao.geonet.utils.DateUtil; import org.fao.geonet.utils.Log; import org.geotools.api.filter.*; import org.geotools.api.filter.expression.Expression; @@ -338,7 +339,11 @@ public Object visitRange(BinaryComparisonOperator filter, String operator, Objec String dataPropertyValue = stack.pop(); String dataPropertyName = stack.pop(); - if (!NumberUtils.isNumber(dataPropertyValue)) { + boolean isDate = (DateUtil.parseBasicOrFullDateTime(dataPropertyValue) != null); + + if (isDate) { + dataPropertyValue = CswFilter2Es.quoteString(dataPropertyValue); + } else if (!NumberUtils.isNumber(dataPropertyValue)) { dataPropertyValue = StringEscapeUtils.escapeJson(CswFilter2Es.quoteString(dataPropertyValue)); } diff --git a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java index f8a31dabbfbd..987706705637 100644 --- a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java +++ b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/CswFilter2EsTest.java @@ -381,4 +381,27 @@ void assertFilterEquals(JsonNode expected, String actual, String filterSpecVersi assertEquals(expected, MAPPER.readTree(new StringReader(result))); } + + + @Test + void testPropertyIsGreaterThanDateValue() throws IOException { + + // INPUT: + final String input = + " \n" + + " \n" + + " Modified\n" + + " 1910-02-05\n" + + " \n" + + " "; + + // EXPECTED: + final ObjectNode expected = EsJsonHelper.boolbdr(). // + must(array(range("Modified", "gt", "1910-02-05"))). // + filter(queryStringPart()). // + bld(); + + + assertFilterEquals(expected, input); + } } diff --git a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java index 629247c88256..f727509808a6 100644 --- a/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java +++ b/csw-server/src/test/java/org/fao/geonet/kernel/csw/services/getrecords/es/EsJsonHelper.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -62,6 +62,35 @@ public static ObjectNode match(String property, String matchString) { return outer; } + + /** + * Returns a structure like + * + *

+     *  { "range":
+     *    {
+     *      "gt": "value"
+     *    }
+     * 
+ * + * @param property + * @param operator + * @param matchString + * @return + */ + public static ObjectNode range(String property, String operator, String matchString) { + final ObjectNode rangeOperatorObject = MAPPER.createObjectNode(); + rangeOperatorObject.put(operator, matchString); + + final ObjectNode rangeObject = MAPPER.createObjectNode(); + rangeObject.put(property, rangeOperatorObject); + + final ObjectNode outer = MAPPER.createObjectNode(); + outer.set("range", rangeObject); + return outer; + } + + private static ArrayNode bound(double x, double y) { final ArrayNode bound = MAPPER.createArrayNode(); bound.add(x); diff --git a/datastorages/cmis/pom.xml b/datastorages/cmis/pom.xml index 18533eb6b727..dffd357d170b 100644 --- a/datastorages/cmis/pom.xml +++ b/datastorages/cmis/pom.xml @@ -28,7 +28,7 @@ gn-datastorages org.geonetwork-opensource.datastorage - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java b/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java index de258b3a7115..5957cc4c3c82 100644 --- a/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java +++ b/datastorages/cmis/src/main/java/org/fao/geonet/api/records/attachments/CMISStore.java @@ -1,6 +1,6 @@ /* * ============================================================================= - * === Copyright (C) 2001-2016 Food and Agriculture Organization of the + * === Copyright (C) 2001-2024 Food and Agriculture Organization of the * === United Nations (FAO-UN), United Nations World Food Programme (WFP) * === and United Nations Environment Programme (UNEP) * === @@ -190,7 +190,20 @@ public ResourceHolder getResource(final ServiceContext context, final String met @Override public ResourceHolder getResourceInternal(String metadataUuid, MetadataResourceVisibility visibility, String resourceId, Boolean approved) throws Exception { - throw new UnsupportedOperationException("CMISStore does not support getResourceInternal."); + int metadataId = getAndCheckMetadataId(metadataUuid, approved); + checkResourceId(resourceId); + + try { + ServiceContext context = ServiceContext.get(); + final CmisObject object = cmisConfiguration.getClient().getObjectByPath(getKey(context, metadataUuid, metadataId, visibility, resourceId)); + return new ResourceHolderImpl(object, createResourceDescription(context, metadataUuid, visibility, resourceId, + (Document) object, metadataId, approved)); + } catch (CmisObjectNotFoundException e) { + throw new ResourceNotFoundException( + String.format("Metadata resource '%s' not found for metadata '%s'", resourceId, metadataUuid)) + .withMessageKey("exception.resourceNotFound.resource", new String[]{resourceId}) + .withDescriptionKey("exception.resourceNotFound.resource.description", new String[]{resourceId, metadataUuid}); + } } protected String getKey(final ServiceContext context, String metadataUuid, int metadataId, MetadataResourceVisibility visibility, String resourceId) { @@ -424,13 +437,9 @@ public String delResource(final ServiceContext context, final String metadataUui for (MetadataResourceVisibility visibility : MetadataResourceVisibility.values()) { if (tryDelResource(context, metadataUuid, metadataId, visibility, resourceId)) { - Log.info(Geonet.RESOURCES, - String.format("MetadataResource '%s' removed.", resourceId)); - return String.format("MetadataResource '%s' removed.", resourceId); + return String.format("Metadata resource '%s' removed.", resourceId); } } - Log.info(Geonet.RESOURCES, - String.format("Unable to remove resource '%s'.", resourceId)); return String.format("Unable to remove resource '%s'.", resourceId); } @@ -439,12 +448,8 @@ public String delResource(final ServiceContext context, final String metadataUui final String resourceId, Boolean approved) throws Exception { int metadataId = canEdit(context, metadataUuid, approved); if (tryDelResource(context, metadataUuid, metadataId, visibility, resourceId)) { - Log.info(Geonet.RESOURCES, - String.format("MetadataResource '%s' removed.", resourceId)); - return String.format("MetadataResource '%s' removed.", resourceId); + return String.format("Metadata resource '%s' removed.", resourceId); } - Log.info(Geonet.RESOURCES, - String.format("Unable to remove resource '%s'.", resourceId)); return String.format("Unable to remove resource '%s'.", resourceId); } @@ -459,6 +464,8 @@ protected boolean tryDelResource(final ServiceContext context, final String meta try { final CmisObject object = cmisConfiguration.getClient().getObjectByPath(key, oc); object.delete(); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); if (object instanceof Folder) { cmisUtils.invalidateFolderCacheItem(key); } @@ -467,6 +474,8 @@ protected boolean tryDelResource(final ServiceContext context, final String meta //CmisPermissionDeniedException when user does not have permissions. //CmisConstraintException when there is a lock on the file from a checkout. } catch (CmisObjectNotFoundException | CmisPermissionDeniedException | CmisConstraintException e) { + Log.info(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s). %s", resourceId, metadataId, metadataUuid, e.getMessage())); return false; } } @@ -627,8 +636,10 @@ private GeonetworkDataDirectory getDataDirectory(ServiceContext context) { /** * get external resource management for the supplied resource. * Replace the following + * {objectId} type:visibility:metadataId:version:resourceId in base64 encoding * {id} resource id - * {type:folder:document} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed + * {type:folder:document} // Custom return type based on type. If the type is folder then type "folder" will be displayed else if document then "document" will be displayed + * {type} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed * {uuid} metadatauuid * {metadataid} metadataid * {visibility} visibility @@ -657,16 +668,27 @@ protected MetadataResourceExternalManagementProperties getMetadataResourceExtern ) { String metadataResourceExternalManagementPropertiesUrl = cmisConfiguration.getExternalResourceManagementUrl(); if (!StringUtils.isEmpty(metadataResourceExternalManagementPropertiesUrl)) { + // {objectid} objectId // It will be the type:visibility:metadataId:version:resourceId in base64 + // i.e. folder::100::100 # Folder in resource 100 + // i.e. document:public:100:v1:sample.jpg # public document 100 version v1 name sample.jpg + if (metadataResourceExternalManagementPropertiesUrl.contains("{objectid}")) { + metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{objectid\\})", + getResourceManagementExternalPropertiesObjectId((type == null ? "document" : (type instanceof Folder ? "folder" : "document")), visibility, metadataId, version, resourceId)); + } // {id} id if (metadataResourceExternalManagementPropertiesUrl.contains("{id}")) { metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{id\\})", (resourceId==null?"":resourceId)); } - // {type:folder:document} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed + // {type:folder:document} // Custom return type based on type. If the type is folder then type "folder" will be displayed else if document then "document" will be displayed if (metadataResourceExternalManagementPropertiesUrl.contains("{type:")) { metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("\\{type:([a-zA-Z0-9]*?):([a-zA-Z0-9]*?)\\}", (type==null?"":(type instanceof Folder?"$1":"$2"))); } - + // {type} // If the type is folder then type "folder" will be displayed else if document then "document" will be displayed + if (metadataResourceExternalManagementPropertiesUrl.contains("{type}")) { + metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{type\\})", + (type == null ? "document" : (type instanceof Folder ? "folder" : "document"))); + } // {uuid} metadatauuid if (metadataResourceExternalManagementPropertiesUrl.contains("{uuid}")) { metadataResourceExternalManagementPropertiesUrl = metadataResourceExternalManagementPropertiesUrl.replaceAll("(\\{uuid\\})", (metadataUuid==null?"":metadataUuid)); @@ -757,14 +779,14 @@ public String toString() { } protected static class ResourceHolderImpl implements ResourceHolder { - private CmisObject cmisObject; + private final CmisObject cmisObject; private Path tempFolderPath; private Path path; private final MetadataResource metadataResource; public ResourceHolderImpl(final CmisObject cmisObject, MetadataResource metadataResource) throws IOException { // Preserve filename by putting the files into a temporary folder and using the same filename. - tempFolderPath = Files.createTempDirectory("gn-meta-res-" + String.valueOf(metadataResource.getMetadataId() + "-")); + tempFolderPath = Files.createTempDirectory("gn-meta-res-" + metadataResource.getMetadataId() + "-"); tempFolderPath.toFile().deleteOnExit(); path = tempFolderPath.resolve(getFilename(cmisObject.getName())); this.metadataResource = metadataResource; @@ -795,11 +817,5 @@ public void close() throws IOException { path=null; tempFolderPath = null; } - - @Override - protected void finalize() throws Throwable { - close(); - super.finalize(); - } } } diff --git a/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java b/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java index 257ef3246d6b..87b76ec0821b 100644 --- a/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java +++ b/datastorages/cmis/src/main/java/org/fao/geonet/resources/CMISConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -59,26 +59,28 @@ public class CMISConfiguration { private Session client = null; - public final static Integer CMIS_MAX_ITEMS_PER_PAGE = 1000; - public final static String CMIS_FOLDER_DELIMITER = "/"; // Specs indicate that "/" is the folder delimiter/separator - not sure if other delimiter can be used?. - public final static String CMIS_SECONDARY_PROPERTY_SEPARATOR = "->"; - private final String CMIS_DEFAULT_WEBSERVICES_ACL_SERVICE = "/services/ACLService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_DISCOVERY_SERVICE = "/services/DiscoveryService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_MULTIFILING_SERVICE = "/services/MultiFilingService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_NAVIGATION_SERVICE = "/services/NavigationService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_OBJECT_SERVICE = "/services/ObjectService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_POLICY_SERVICE = "/services/PolicyService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_RELATIONSHIP_SERVICE = "/services/RelationshipService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_REPOSITORY_SERVICE = "/services/RepositoryService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_VERSIONING_SERVICE = "/services/VersioningService?wsdl"; - private final String CMIS_DEFAULT_WEBSERVICES_BASE_URL_SERVICE = "/cmis"; - private final String CMIS_DEFAULT_BROWSER_URL_SERVICE = "/browser"; - private final String CMIS_DEFAULT_ATOMPUB_URL_SERVICE = "/atom"; - - private final String CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_WINDOW_PARAMETERS = "toolbar=0,width=600,height=600"; - private final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED = true; - private final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED = true; - private final Boolean CMIS_DEFAULT_VERSIONING_ENABLED = false; + // DFO change to 100. Due to bug with open text cmis where if max is set to 1000, it will return 100 but if it is set to 100 it will return all records. + // https://dev.azure.com/foc-poc/EDH-CDE/_workitems/edit/95878 + public static final Integer CMIS_MAX_ITEMS_PER_PAGE = 100; + public static final String CMIS_FOLDER_DELIMITER = "/"; // Specs indicate that "/" is the folder delimiter/separator - not sure if other delimiter can be used?. + public static final String CMIS_SECONDARY_PROPERTY_SEPARATOR = "->"; + private static final String CMIS_DEFAULT_WEBSERVICES_ACL_SERVICE = "/services/ACLService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_DISCOVERY_SERVICE = "/services/DiscoveryService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_MULTIFILING_SERVICE = "/services/MultiFilingService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_NAVIGATION_SERVICE = "/services/NavigationService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_OBJECT_SERVICE = "/services/ObjectService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_POLICY_SERVICE = "/services/PolicyService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_RELATIONSHIP_SERVICE = "/services/RelationshipService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_REPOSITORY_SERVICE = "/services/RepositoryService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_VERSIONING_SERVICE = "/services/VersioningService?wsdl"; + private static final String CMIS_DEFAULT_WEBSERVICES_BASE_URL_SERVICE = "/cmis"; + private static final String CMIS_DEFAULT_BROWSER_URL_SERVICE = "/browser"; + private static final String CMIS_DEFAULT_ATOMPUB_URL_SERVICE = "/atom"; + + private static final String CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_WINDOW_PARAMETERS = "toolbar=0,width=600,height=600"; + private static final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED = true; + private static final Boolean CMIS_DEFAULT_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED = true; + private static final Boolean CMIS_DEFAULT_VERSIONING_ENABLED = false; private String servicesBaseUrl; private String bindingType; @@ -111,7 +113,6 @@ public class CMISConfiguration { * Property name for validation status that is expected to be an integer with values of null, 0, 1, 2 * (See MetadataResourceExternalManagementProperties.ValidationStatus for code meaning) * Property name follows the same format as cmisMetadataUUIDPropertyName - * * If null then validation status will default to UNKNOWN. */ private String externalResourceManagementValidationStatusPropertyName; @@ -505,7 +506,6 @@ public void setExternalResourceManagementValidationStatusPropertyName(String ext String.format("Invalid format for property name %s property will not be used", externalResourceManagementValidationStatusPropertyName)); this.externalResourceManagementValidationStatusPropertyName = null; this.externalResourceManagementValidationStatusSecondaryProperty = false; - return; } else { this.externalResourceManagementValidationStatusSecondaryProperty = true; } @@ -514,7 +514,7 @@ public void setExternalResourceManagementValidationStatusPropertyName(String ext public MetadataResourceExternalManagementProperties.ValidationStatus getValidationStatusDefaultValue() { // We only need to set the default if there is a status property supplied, and it is not already set - if (this.defaultStatus == null && !org.springframework.util.StringUtils.isEmpty(getExternalResourceManagementValidationStatusPropertyName())) { + if (this.defaultStatus == null && org.springframework.util.StringUtils.hasLength(getExternalResourceManagementValidationStatusPropertyName())) { if (getExternalResourceManagementValidationStatusDefaultValue() != null) { // If a default property name does exist then use it this.defaultStatus = MetadataResourceExternalManagementProperties.ValidationStatus.valueOf(getExternalResourceManagementValidationStatusDefaultValue()); @@ -536,9 +536,8 @@ public void init() { } // default factory implementation - Map parameters = new HashMap(); + Map parameters = new HashMap<>(); - this.baseRepositoryPath = baseRepositoryPath; if (this.baseRepositoryPath == null) { this.baseRepositoryPath = ""; } @@ -609,7 +608,7 @@ public void init() { } } } else { - // Try to find the repository name for the id that we have specified.. + // Try to find the repository name for the id that we have specified. try { for (Repository repository : factory.getRepositories(parameters)) { if (repository.getId().equalsIgnoreCase(this.repositoryId)) { @@ -633,7 +632,7 @@ public void init() { repositoryUrl + "' using product '" + client.getRepositoryInfo().getProductName() + "' version '" + client.getRepositoryInfo().getProductVersion() + "'."); - // Check if we can parse the secondary parameters from human readable to secondary ids. + // Check if we can parse the secondary parameters from human-readable to secondary ids. parsedCmisMetadataUUIDPropertyName = parseSecondaryProperty(client, cmisMetadataUUIDPropertyName); parsedExternalResourceManagementValidationStatusPropertyName = parseSecondaryProperty(client, externalResourceManagementValidationStatusPropertyName); @@ -743,7 +742,7 @@ public boolean existExternalResourceManagementValidationStatusSecondaryProperty( } /** - * Generte a full url based on the supplied entered serviceurl and the default. + * Generate a full url based on the supplied entered serviceUrl and the default. * * @param baseUrl Base url * @param serviceUrl Supplied service url (This could start with / or http. If it starts with http then ignore baseUrl) diff --git a/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties b/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties index f0a62c1920af..4c154639ca52 100644 --- a/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties +++ b/datastorages/cmis/src/main/resources/config-store/config-cmis-overrides.properties @@ -11,8 +11,8 @@ cmis.external.resource.management.window.parameters=${CMIS_EXTERNAL_RESOURCE_MAN cmis.external.resource.management.modal.enabled=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_MODAL_ENABLED:#{null}} cmis.external.resource.management.folder.enabled=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ENABLED:#{null}} cmis.external.resource.management.folder.root=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_FOLDER_ROOT:#{null}} -cmis.external.resource.status.property.name=${CMIS_EXTERNAL_RESOURCE_STATUS_PROPERTY_NAME:#{null}} -cmis.external.resource.management.status.default.value=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_STATUS_DEFAULT_VALUE:#{null}} +cmis.external.resource.management.validation.status.property.name=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_VALIDATION_STATUS_PROPERTY_NAME:#{null}} +cmis.external.resource.management.validation.status.default.value=${CMIS_EXTERNAL_RESOURCE_MANAGEMENT_VALIDATION_STATUS_DEFAULT_VALUE:#{null}} cmis.versioning.enabled=${CMIS_VERSIONING_ENABLED:#{null}} cmis.versioning.state=#{'${CMIS_VERSIONING_STATE:MAJOR}'.toUpperCase()} diff --git a/datastorages/cmis/src/main/resources/config-store/config-cmis.xml b/datastorages/cmis/src/main/resources/config-store/config-cmis.xml index 76abe73572c4..1c302788b5c9 100644 --- a/datastorages/cmis/src/main/resources/config-store/config-cmis.xml +++ b/datastorages/cmis/src/main/resources/config-store/config-cmis.xml @@ -1,6 +1,6 @@ + + + + geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/datastorages/s3/pom.xml b/datastorages/s3/pom.xml index 861af49bf91c..7c348f271e29 100644 --- a/datastorages/s3/pom.xml +++ b/datastorages/s3/pom.xml @@ -28,7 +28,7 @@ gn-datastorages org.geonetwork-opensource.datastorage - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java b/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java index 27df07a45325..2114f8f5d204 100644 --- a/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java +++ b/datastorages/s3/src/main/java/org/fao/geonet/api/records/attachments/S3Store.java @@ -193,9 +193,13 @@ public String delResources(final ServiceContext context, final int metadataId) t for (S3ObjectSummary object: objects.getObjectSummaries()) { s3.getClient().deleteObject(s3.getBucket(), object.getKey()); } - return String.format("Metadata '%s' directory removed.", metadataId); + Log.info(Geonet.RESOURCES, + String.format("Metadata '%d' directory removed.", metadataId)); + return String.format("Metadata '%d' directory removed.", metadataId); } catch (AmazonServiceException e) { - return String.format("Unable to remove metadata '%s' directory.", metadataId); + Log.warning(Geonet.RESOURCES, + String.format("Unable to remove metadata '%d' directory. %s", metadataId, e.getMessage())); + return String.format("Unable to remove metadata '%d' directory.", metadataId); } } @@ -206,7 +210,7 @@ public String delResource(final ServiceContext context, final String metadataUui for (MetadataResourceVisibility visibility: MetadataResourceVisibility.values()) { if (tryDelResource(metadataUuid, metadataId, visibility, resourceId)) { - return String.format("MetadataResource '%s' removed.", resourceId); + return String.format("Metadata resource '%s' removed.", resourceId); } } return String.format("Unable to remove resource '%s'.", resourceId); @@ -217,7 +221,7 @@ public String delResource(final ServiceContext context, final String metadataUui final String resourceId, Boolean approved) throws Exception { int metadataId = canEdit(context, metadataUuid, approved); if (tryDelResource(metadataUuid, metadataId, visibility, resourceId)) { - return String.format("MetadataResource '%s' removed.", resourceId); + return String.format("Metadata resource '%s' removed.", resourceId); } return String.format("Unable to remove resource '%s'.", resourceId); } @@ -227,8 +231,12 @@ private boolean tryDelResource(final String metadataUuid, final int metadataId, final String key = getKey(metadataUuid, metadataId, visibility, resourceId); if (s3.getClient().doesObjectExist(s3.getBucket(), key)) { s3.getClient().deleteObject(s3.getBucket(), key); + Log.info(Geonet.RESOURCES, + String.format("Resource '%s' removed for metadata %d (%s).", resourceId, metadataId, metadataUuid)); return true; } + Log.info(Geonet.RESOURCES, + String.format("Unable to remove resource '%s' for metadata %d (%s).", resourceId, metadataId, metadataUuid)); return false; } @@ -287,11 +295,5 @@ public void close() throws IOException { path = null; } } - - @Override - protected void finalize() throws Throwable { - close(); - super.finalize(); - } } } diff --git a/docs/changes/changes4.4.6-0.txt b/docs/changes/changes4.4.6-0.txt new file mode 100644 index 000000000000..636b5eb919c9 --- /dev/null +++ b/docs/changes/changes4.4.6-0.txt @@ -0,0 +1,133 @@ +================================================================================ +=== +=== GeoNetwork 4.4.6-SNAPSHOT: List of changes +=== +================================================================================ +- Release / 4.4.6 / Changelog. (#8462) +- update PSC details in user guide +- Delete date not being copied causing duplicate (#8454) +- Standard / DCAT (and profiles) export (#7600) +- Update home page "browse by" to display facet as label if there is only one (#8426) +- Avoid duplicate validation message when trying to register a user with existing email +- Add bootstrap datepicker language files for supported UI languages +- Add better logging when resources are deleted to make it clear what metadata record the resource was deleted from. (#8430) +- Record view / More like this / Add filter option. +- Fix saving UI settings without changes +- Harvester / Simple URL / Fix multiple URL alignement +- Elasticsearch / Update to 8.14.3. (#8337) +- Remove empty filename condition (#8436) +- Elasticsearch / API / Allow ndjson for _msearch endpoint +- Improve administrator guide UI configuration documentation +- Harvester / Simple URL / ODS improvement +- Editor / Geopublication / Misc fix. (#8092) +- WebDav harvester / Add support for XSLT filter process (#8243) +- Editor / Associated resource / Remote document / Add content type +- Fixed description for getIdentifiers in IdentifierApi (#8422) +- Formatter / Datacite / Default resource type (#8407) +- Remove spaces from the list of schema list of metadata import restrictions so that "iso19115-3.2018, dublin-core" will also work. (#8408) +- Thesaurus / OWL format / Mobility theme hierarchy (#8393) +- Record view / Does not display thesaurus block if no keywords. +- Map / Save your map improvements (#8155) +- Editor / Table mode / Fix field using directive (#8261) +- Thesaurus / Add inScheme property in concept of local thesaurus +- Standard / ISO19115-3 / Only search for associated record with UUID +- CSW / Fix parsing date values for filters. Fixes #8034 +- Javascript / HTML formatting fixes related to Prettier +- Update external management url Add {objectId} property in external management url (base64 unique identifier for the record) Change external management type url property {type} so that it is fixed values so that same value can be used in {objectId} CMIS Fixed property names used for validation fields to be consistent with other names. Jcloud Updgade from jcloud 2.3.0 to jcloud 2.5.0 Add support for external management named properties similar to cmis Fix bug with deleting all resources as it was failing to identify folders correctly for azure blob. +- Harvester / ISO19115-3 / Better support missing metadata date info +- Metadata indexing / ISO19139 / ISO19115-3.2018 / Escape graphic overview file name for JSON (#8412) +- Fixed spurious whitespace for gn-comma-list (#8398) +- Metadata editor / Add required indicator support to the keyword selector directive and fix its display for the field duration directive +- Fix the width of the projection switcher (#8399) +- Metadata editor / validation report improvements (#8395) +- Don't capitalize the labels for the facet filter values (#8133) +- Support multiple DOI servers (#8098) +- Thesaurus / Date improvements. (#8392) +- GeoNetwork harvester - avoid double counting of updated metadata. (#8389) +- Fix harvester execution logs added to previous logs (#8387) +- Visual and UX changes for WFS previews (#8284) +- Metadata detail page - hide history types selector when tasks (DOI) and workflow are disabled +- Fix the overlapping filter settings and the customize options (#8316) +- ISO19139 / ISO19115.3 / Index resource date fields as defined in the metadata. +- Fix the schema artifact name in add schema script +- Update configuring-faceted-search.md +- Aggregations / Temporal range / Avoid browser autocomplete on calendar field +- OpenAPI / Operation returning no content should not advertised a schema. +- Indexing / DCAT multilingual support (#8377) +- Xsl utility / Add a function to retrieve thesaurus title with its key (#8378) +- GIT / .gitignore +- Map viewer / WMS GetFeatureInfo support for application/json info format (#8372) +- Add build profile for MacOS ARM +- Editor / Associated resource / DOI search. (#8363) +- Standard / ISO19115-3 / Label improvement. (#8364) +- Harvester / Simple URL / ODS / Improve mapping +- Don't add file content to the exception when requesting XML documents, if the content is not XML (#8360) +- Put the image name in the `alt` attribute in the thumbnail on the metadata page. (#8290) +- CSW Harvester / Avoid increment 2 metrics for a single metadata in certain conditions (#8069) +- iso19139 - Update thumbnail add/update and remove to support index update/removal (#8348) +- publish status not refreshing fix (#8344) +- Editor / Associated resource / Avoid empty label (#8339) +- Editor / DOI search / Improve label (#8338) +- API / Improve parameter check for XSL conversion. (#8201) +- Admin / Source / Improve dirty state (#8222) +- Standard / ISO19115-3 / Formatters / ISO19139 / Ignore mcc linkage for overview (#8225) +- Fix Clipboard copy/paste on Firefox - use ES5 (#8332) +- Indexing / Draft field MUST not be an array (#8242) +- Editor / Dublin core / Fix extent coordinates (#8258) +- Workflow / update notification level based on user profile when cancelling a submission (#8264) +- INSPIRE Atom harvester / process only public datasets by resource identifier +- Special characters in the cookie causing 400 bad requests from Spring Security. Fixes #8275 +- Do not try to request clipboard permissions +- Social links in metadata page doesn't have the metadata page permalink. Fixes #8322 +- Repository Citation.cff metadata for DOI registration with Zenodo (#8317) +- Modify record not found message to only link to signin if user is not logged in (#8312) +- Modify GnMdViewController to set recordIdentifierRequested using the getUuid function +- harvesting CSW: changed loglevel for invalid metadata to info (#8303) +- Standard / ISO19139 / i18n / Missing french translation (#8298) +- Index / Add maintenance details. +- Record view / Improve layout of table (eg. quality measures) +- Update batch PDF export to skip working copies (#8292) +- Standard / ISO19139 / Fix removal of online source when multiple transfer options block are used. (#8281) +- Fix a problem with recaptcha not shown sometimes (#8285) +- Zoom to map popup remains active on non-map pages. (#8267) +- Use UI language for metadata selection export to CSV / PDF. Fixes #7969 (#8262) +- Fixed issue with working copy not being returned from /api/records/{metadataUuid}/formatters/{formatterId:.+} (#8269) +- Fixed issue with working copy not being returned from getRecordAS api (#8265) +- Standard / ISO19115-3 / Formatters / ISO19139 / Fix scope code (#8224) +- Standard / ISO19115-3 / Formatter / Fix namespace declaration (#8223) +- Editor / Configuration / Improve deletion in forEach section (#8244) +- Fix infinite "Please wait" message on error (#8249) +- Broadcasting error when delete record (#8212) +- ISO19115-3.2018 / Remove duplicated fields for metadata identifier and uuid in CSV export (#8238) +- Standard / ISO19139 / Formatter / Do not display extent if none available (#8229) +- Fix wrong HTML self closing tags (#8232) +- Editor / Polygon not saved (#8230) +- Add info logs to make transaction of working copy merge more traceable (#8178) +- API / Client code generation / Avoid reserved word (#8214) +- Double translation can lead to infinite stack (#8209) +- Fix canViewRecord function so that it returned the workflow record. (#8152) +- Automatic formatting +- Association type / Consistent labels (#8077) +- Multilingual Emails (#8044) +- Add support for multilingual thesaurus titles in the index (#8154) +- Bump actions/setup-java from 4.1.0 to 4.2.1 (#7870) +- Fix presence of duplicated geonet elements on partial metadata updates +- Fix user application feedback (#7769) +- Update SECURITY.md (#8172) +- Register user / allow to configured allowed email domains (#8186) +- docs: fix image links in change-log(version-3.8.0.md and 4.0.0-alpha.1 (#7938) +- Elasticsearch / Update to 8.14.0. +- Bump org.apache.maven.plugins:maven-dependency-plugin +- Release script improvement +- Cleaning / Remove transifex converting tools +- Indexing / Lower severity of getIndexField +- Metadata extents API - fix service for metadata with working copy - test (#8197) +- Register user / allow to select the group where the user wants to register (#8176) +- Metadata extents API - fix service for metadata with working copy +- Bump com.jayway.jsonpath:json-path from 2.4.0 to 2.9.0 in /services +- Bump org.owasp.esapi:esapi from 2.4.0.0 to 2.5.4.0 +- Bump org.postgresql:postgresql from 42.6.0 to 42.7.3 +- Bump org.xmlunit:xmlunit-core from 2.1.1 to 2.10.0 +- Bump com.google.guava:guava from 30.0-jre to 33.2.1-jre +- Update en-admin.json +- Update version to 4.4.6-SNAPSHOT \ No newline at end of file diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png new file mode 100644 index 000000000000..5d377748e868 Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-email.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png new file mode 100644 index 000000000000..87044df119df Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/feedback-multilingual.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png new file mode 100644 index 000000000000..bc215c549b52 Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/morelikethisconfig.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png index 06bc3bff3125..c764f5d9244f 100644 Binary files a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/ui-settings-searchpage.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md b/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md index d56eb454ffa8..2bec134017e6 100644 --- a/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md +++ b/docs/manual/docs/administrator-guide/configuring-the-catalog/system-configuration.md @@ -58,21 +58,41 @@ JVM proxy parameters may also be required to properly set the proxy for all remo ## Feedback {#system-config-feedback} -Email may be sent by the catalog. +Email notifications are sent by the catalog. -- you are using the User Self-registration system -- you are using the metadata status workflow (See [Life cycle](../../user-guide/workflow/life-cycle.md)) -- a file uploaded with a metadata record is downloaded and notify privilege is selected +- When using the User Self-registration system. +- When using the metadata status workflow (See [Life cycle](../../user-guide/workflow/life-cycle.md)). +- When a file uploaded with a metadata record is downloaded and notify privilege is selected. This section configure the mail server to use. - **Email** This is the administrator's email address used to send feedback. - **SMTP host** The mail server name or IP address to use for sending emails. - **SMTP port** The SMTP port. -- **Use SSL** Enable SSL mode +- **Use SSL** Enable Secure Sockets Layer (SSL) mode - **User name** Username if connection is required on the SMTP server - **Password** Username password if connection is required on the SMTP server +- **Use TLS** Enable use of Transport Layer Security (TLS) +![](img/feedback-email.png) + +Additional settings are available to respect user language preference: + +- **Language for system generated emails** The ui language will be used when sending notification emails by default. To To override this behaviour and generate a multi-lingual notification email list the langauges to be used. + +- **Translation follows text** Provide an introduction phrase indicating a multi-lingual notification follows. + +![](img/feedback-multilingual.png) + +!!! note + + Email notifications for metadata publication are sent as `text/html` messages, this can be changed using ```WEB-INF/config.properties``` configuration: + + ```properties + # Configure the metadata publication notification mails to be sent as HTML (true) or TEXT (false) + metadata.publicationmail.format.html=true + ``` + ## Metadata search results Configuration settings in this group determine what the limits are on user interaction with the search results. diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md b/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md index 1734724599f1..9f3072a76579 100644 --- a/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md +++ b/docs/manual/docs/administrator-guide/configuring-the-catalog/user-interface-configuration.md @@ -18,7 +18,7 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration] Since the settings form is a long form, the `save` button is repeated at the base of the page. In either case, all settings are saved. -- **Filter settings**: This search box can be used to filter settings in the form, for example searching for "social" will show only the settings related to the Social Bar. +- **Filter settings**: This search box can be used to filter settings in the form, for example searching for "social" will show only the settings related to the Social bar. ![](img/ui-settings-filter.png) @@ -31,7 +31,7 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration] ## Footer {#user-interface-config-footer} - **Footer**: Select this checkbox to determine whether the GeoNetwork footer is shown. If not set, no footer will be visible. -- **Social bar**: Select this check box to show the social bar (links to twitter, facebook, linkedin etc) in the footer. +- **Social bar**: Select this check box to show the social media bar in the footer. ![](img/ui-settings-footer.png) @@ -60,22 +60,23 @@ To add a new configuration, such as for a sub-portal (see [Portal configuration] - **Search application**: Select this check box to determine whether the search application is visible in the top toolbar. If not set, no link is shown. - **Application URL**: Define the URL for the search application. In the majority of cases this can be left as the default. - **Number of records per page**: Define the options to determine the number of records shown per page of results, and the default. -- **Type of facet**: Define the set of search facets should be visible in the search page. The default is `details` but `manager` can be used to show the facets more normally used on the editor page. -- **Default search**: Define a default filter for the search. +- **Facet configuration**: See [Configuring faceted search](../../customizing-application/configuring-faceted-search.md)). The configuration are defined using JSON following Elasticsearch API (See . ![](img/ui-settings-searchpage.png) -- **Facet field to display using tabs**: This option creates a tab for each configured facet above the search results. This can be used to further narrow down the search results. The list of facet names can be found at . For example, to include the Topic Category filter above the search results, the administrator would add `topicCat` as the facet field to display. -- **List of facets**: This can be used to restrict the facets available for searching. For example, adding `topicCat` to this list would restrict the search options to `Topic Category` only. This can be useful for restricting the search options in a sub-portal or external web application. To add additional facets to the list, select the blue `+` button. +- **Facet field to display using tabs**: This option creates a tab for each configured facet above the search results. This can be used to further narrow down the search results. - **Filters**: Define additional search criteria added to all searches and again are used primarily for external applications and sub-portals. - -![](img/ui-settings-searchpage2.png) - - **Type of sort options**: Define the different ways by which a user can sort a set of search results. The **default sort by option** is shown below. Note that to search for example on `title` in alphabetical order it is necessary to set the order to `reverse`. - **List of templates for search results**: This section allows the administrator to configure templates for the layout of the search results. The default is `grid` whereas `list` is the default for the editor board. ![](img/ui-settings-searchpage3.png) + +- **Similar records** or **More like this**: Define the query used to search for similar records that are displayed at the bottom of the record view. + +![](img/morelikethisconfig.png) + + - **Default template used for search results**: Define the template page for the search. Generally this can be left as the default. - **List of formatter for record view**: Determine the formatter used to display the search results. See [Customizing metadata views](../../customizing-application/creating-custom-view.md) for information on creating a new formatter. To add an additional view, click the blue `+` button below the list and provide a name and a URL. @@ -135,30 +136,30 @@ You can configure each map with different layers and projections. - **Map Projection** This is the default projection of the map. Make sure the projection is defined in **Projections to display maps into** below. -![](img/ui-settings-mapprojection.png) + ![](img/ui-settings-mapprojection.png) -- **List of map projections to display bounding box coordinates in** This is used in the map when editing a record and defining the bounding box extent. Note that the coordinates will be stored in WGS84 regardless of the projection used to draw them. +- **List of map projections to display bounding box coordinates in** This is used in the map when editing a record and defining the bounding box extent. Make sure the listed projections are defined in **Projections to display maps into** below. Note that the coordinates will be stored in WGS84 regardless of the projection used to draw them. -![](img/ui-settings-mapprojectionslist.png) + ![](img/ui-settings-mapprojectionslist.png) - **Projections to display maps into** This is where the different projections available to the map are defined. All projections will be shown in the `Projection Switcher` tool of the map. -![](img/ui-settings-mapprojection2.png) + ![](img/ui-settings-mapprojection2.png) -In order to enable a new projection it must be defined here using the **proj4js** syntax, which can be found at . Additionally the default bounding box extent, maximum bounding box extent, and allowed resolutions (if required) can be defined. + In order to enable a new projection it must be defined here using the **proj4** syntax, which can be found for many EPSG-listed projections at, for example, . Additionall, the default bounding box extent, maximum bounding box extent and allowed resolutions (if required) can be defined. -Ensure that the coordinates inserted are in the correct units for and are local to the projection. A list of resolutions is only relevant if the main map layer has a XYZ source, which does not follow the common tiling pattern. + Ensure that the coordinates inserted are in the correct units for the projection and are local to the projection. A list of resolutions is only relevant if the main map layer has a XYZ source that does not follow the common tiling pattern. -Check that this configuration is valid by opening the map. + Check that this configuration is valid by opening the map. -![](img/ui-settings-mapprojection3.png) + ![](img/ui-settings-mapprojection3.png) -!!! info "Important" + !!! info "Important" If the configuration of a projection is incomplete or invalid, the map may fail to load. -If a projection is defined which is not supported by the source of the map layer, the map application will reproject map images at the client side. This may cause unexpected behaviour, such as rotated or distorted labels. + If a projection is defined which is not supported by the source of the map layer, the map application will reproject map images at the client side. This may cause unexpected behaviour, such as rotated or distorted labels. - **Optional Map Viewer Tools** The checkboxes in this section define the tools available to the user in the right toolbar of the main map. Elements that are not checked are not visible. - **OGC Service to use as a graticule**: This is optional and allows the use of an external service to display the graticule on the map. @@ -215,7 +216,7 @@ This section defines the configuration for the map shown when editing a record. ## Record View - **Record view**: -- **Show Social bar**: If enabled the social bar (links to facebook, twitter etc) are enabled in record view. +- **Show Social bar**: If enabled, the social media bar is enabled in record view. ## Editor Application @@ -250,7 +251,7 @@ This section defines the configuration for the map shown when editing a record. ## JSON Configuration -This section shows the JSON configuration for the currently applied User Interface settings. From here, the json can be saved to a file (by copying and pasting). +This section shows the JSON configuration for the currently applied User Interface settings. From here, the JSON can be saved to a file (by copying and pasting). - **Test client configuration**: Click this button to test the configuration in a new browser tab. - **Reset configuration**: Click this button to reset the configuration back to the default. Note that this will revert any changes you have made in the above page. diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md index efc095df787d..7026e9c804bd 100644 --- a/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/authentication-mode.md @@ -6,6 +6,7 @@ By default the catalog uses the internal database for user management and authen - [Configuring LDAP - Hierarchy](authentication-mode.md#authentication-ldap-hierarchy) - [Configuring CAS](authentication-mode.md#authentication-cas) - [Configuring OAUTH2 OpenID Connect](authentication-mode.md#authentication-openid) +- [Configuring JWT/JSON Headers](authentication-mode.md#jwt-headers) - [Configuring Keycloak](authentication-mode.md#authentication-keycloak) - [Configuring Shibboleth](authentication-mode.md#authentication-shibboleth) @@ -818,6 +819,253 @@ sample:RegisteredUser A similar setup is described for geoserver in the [geoserver documentation](https://docs.geoserver.org/latest/en/user/community/keycloak/index.html). +## Configurating JWT/JSON Headers {#jwt-headers} + +The JWT Headers module provides a security module for header based security. It is equivalent to GeoServer's JWT Headers Module (both GeoServer and GeoNetwork share a code library to make them equivalent). + +This module allows [JSON-based](https://en.wikipedia.org/wiki/JSON) headers (for username and roles) as well as [JWT-based](https://en.wikipedia.org/wiki/JSON_Web_Token>) headers (for username and roles). It also allows for validating JWT-Based AccessTokens (i.e. via [OAUTH2](https://en.wikipedia.org/wiki/OAuth>)/[OpenID Connect](ttps://en.wikipedia.org/wiki/OpenID#OpenID_Connect_(OIDC)). + + +If you are using something like [Apache's mod_auth_openidc](https://github.com/OpenIDC/mod_auth_openidc), then this module will allow you to; + +1. Get the username from an Apache-provided `OIDC_*` header (either as simple-strings or as a component of a JSON object). +2. Get the user's roles from an Apache-provided `OIDC_*` header (as a component of a JSON object). +3. The user's roles can also come from the GeoNetwork Database (managed by the administrator in the GeoNetwork GUI). + +If you are using [OAUTH2/OIDC Access Tokens](https://www.oauth.com/oauth2-servers/access-tokens/): + +1. Get the username from the attached JWT Access Token (via a path into the [Access Token's JSON Claims](https://auth0.com/docs/authenticate/login/oidc-conformant-authentication/oidc-adoption-access-tokens/)). +2. Get the user's roles from the JWT Access Token (via a path into the Token's JSON Claims). +3. Validate the Access Token + + * Validate its Signature + * Validate that it hasn't expired + * Validate the token against a token verifier URL ("userinfo_endpoint") and check that subjects match + * Validate components of the Access Token (like [aud (audience)](https://auth0.com/docs/secure/tokens/json-web-tokens/json-web-token-claims>)) + +4. The user's roles can also come from the GeoNetwork Database (managed by the administrator in the GeoNetwork GUI). +5. You can also extract roles from the JWT Access Token (via a JSON path). + +### JWT Headers configuration + + +The JWT Headers module covers three main use cases: + +1. Simple Text, JSON, or JWT headers for the username +2. Verification of JWT Access Tokens +3. Getting roles from a JSON header or an attached JWT Access Token claim + +#### Configuration Options + +You must turn on JWT Header Support by setting the `GEONETWORK_SECURITY_TYPE` environment variable to `jwt-headers`. + +``` +GEONETWORK_SECURITY_TYPE=jwt-headers +``` + +Please see these files for more detailed configuration: +* `config-security-jwt-header.xml` +* `config-security-jwt-header-overrides.properties` + +##### User Name Options + + +| Environment Variable | Meaning | +| ------------- | ------- | +|JWTHEADERS_UserNameHeaderFormat | The name of the HTTP header item that contains the user name. | +|JWTHEADERS_UserNameFormat| Format that the user name is in:
`STRING` - user name is the header's value.
`JSON` - The header is a JSON string. Use "JSON path" for where the user name is in the JSON.
`JWT` - The header is a JWT (base64) string. Use "JSON path" for where the user name is in the JWT claims. | +|JWTHEADERS_UserNameJsonPath | JSON path for the User Name. If the user name is in JSON or JWT format, this is the JSON path to the user's name.| + + + +If you are using [Apache's mod_auth_openidc](https://github.com/OpenIDC/mod_auth_openidc), then Apache will typically add: + +* an `OIDC_id_token_payload` header item (containing a JSON string of the ID token claims) +* an `OIDC_access_token` header item (containing a base64 JWT Access Token) +* optionally, a simple header item with individual claim values (i.e. `OIDC_access_token`) + +Here are some example values; + +STRING +``` +OIDC_preferred_username: david.blasby@geocat.net +``` + +JSON +``` +OIDC_id_token_payload: {"exp":1708555947,"iat":1708555647,"auth_time":1708555288,"jti":"42ee833e-89d3-4779-bd9d-06b979329c9f","iss":"http://localhost:7777/realms/dave-test2","aud":"live-key2","sub":"98cfe060-f980-4a05-8612-6c609219ffe9","typ":"ID","azp":"live-key2","nonce":"4PhqmZSJ355KBtJPbAP_PdwqiLnc7B1lA2SGpB0zXr4","session_state":"7712b364-339a-4053-ae0c-7d3adfca9005","at_hash":"2Tyw8q4ZMewuYrD38alCug","acr":"0","sid":"7712b364-339a-4053-ae0c-7d3adfca9005","upn":"david.blasby@geocat.net","resource_access":{"live-key2":{"roles":["GeonetworkAdministrator","GeoserverAdministrator"]}},"email_verified":false,"address":{},"name":"david blasby","groups":["default-roles-dave-test2","offline_access","uma_authorization"],"preferred_username":"david.blasby@geocat.net","given_name":"david","family_name":"blasby","email":"david.blasby@geocat.net"} +``` + +JWT +``` +OIDC_access_token: eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICItb0QyZXphcjF3ZHBUUmZCS0NqMFY4cm5ZVkJGQmxJLW5ldzFEREJCNTJrIn0.eyJleHAiOjE3MDg1NTU5NDcsImlhdCI6MTcwODU1NTY0NywiYXV0aF90aW1lIjoxNzA4NTU1Mjg4LCJqdGkiOiI0M2UyYjUwZS1hYjJkLTQ2OWQtYWJjOC01Nzc1YTY0MTMwNTkiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0Ojc3NzcvcmVhbG1zL2RhdmUtdGVzdDIiLCJhdWQiOiJhY2NvdW50Iiwic3ViIjoiOThjZmUwNjAtZjk4MC00YTA1LTg2MTItNmM2MDkyMTlmZmU5IiwidHlwIjoiQmVhcmVyIiwiYXpwIjoibGl2ZS1rZXkyIiwibm9uY2UiOiI0UGhxbVpTSjM1NUtCdEpQYkFQX1Bkd3FpTG5jN0IxbEEyU0dwQjB6WHI0Iiwic2Vzc2lvbl9zdGF0ZSI6Ijc3MTJiMzY0LTMzOWEtNDA1My1hZTBjLTdkM2FkZmNhOTAwNSIsImFjciI6IjAiLCJyZWFsbV9hY2Nlc3MiOnsicm9sZXMiOlsiZGVmYXVsdC1yb2xlcy1kYXZlLXRlc3QyIiwib2ZmbGluZV9hY2Nlc3MiLCJ1bWFfYXV0aG9yaXphdGlvbiJdfSwicmVzb3VyY2VfYWNjZXNzIjp7ImxpdmUta2V5MiI6eyJyb2xlcyI6WyJHZW9uZXR3b3JrQWRtaW5pc3RyYXRvciIsIkdlb3NlcnZlckFkbWluaXN0cmF0b3IiXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoib3BlbmlkIHBob25lIG9mZmxpbmVfYWNjZXNzIG1pY3JvcHJvZmlsZS1qd3QgcHJvZmlsZSBhZGRyZXNzIGVtYWlsIiwic2lkIjoiNzcxMmIzNjQtMzM5YS00MDUzLWFlMGMtN2QzYWRmY2E5MDA1IiwidXBuIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsImFkZHJlc3MiOnt9LCJuYW1lIjoiZGF2aWQgYmxhc2J5IiwiZ3JvdXBzIjpbImRlZmF1bHQtcm9sZXMtZGF2ZS10ZXN0MiIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQiLCJnaXZlbl9uYW1lIjoiZGF2aWQiLCJmYW1pbHlfbmFtZSI6ImJsYXNieSIsImVtYWlsIjoiZGF2aWQuYmxhc2J5QGdlb2NhdC5uZXQifQ.Iq8YJ99s_HBd-gU2zaDqGbJadCE--7PlS2kRHaegYTil7WoNKfjfcH-K-59mHGzJm-V_SefE-iWG63z2c6ChddzhvG8I_O5vDNFoGlGOQFunZC379SqhqhCEdwscEUDkNA3iTTXvK9vn0muStDiv9OzpJ1zcpqYqsgxGbolGgLJgeuK8yNDH7kzDtoRzHiHw2rx4seeVpxUYAjyg_cCkEjRt3wzud7H3xlfQWRx75YfpJ0pnVphuXYR7Z8x9p6hCPtrBfDeriudm-wkwXtcV2LNlXrZ2zpKS_6Zdxzza2lN30q_6DQXHGo8EAIr8SiiQrxPQulNiX9r8XmQ917Ep0g +``` + + + +It is recommended to either use the `OIDC_id_token_payload` (JSON) or `OIDC_access_token` (JWT) header. + +For `OIDC_id_token_payload`: + +* Request header attribute for User Name: `OIDC_id_token_payload` +* Format the Header value is in: `JSON` +* JSON path for the User Name: `preferred_username` + +For `OIDC_access_token`: + +* Request header attribute for User Name: `OIDC_access_token` +* Format the Header value is in: `JWT` +* JSON path for the User Name: `preferred_username` + + + +#### Role Source Options + + +You can use the standard role source options in GeoNetwork (`Request Header`, `User Group Service`, or `Role Service`). The JWT Headers module adds two more role sources - `Header Containing JSON String` and `Header containing JWT`. + + +| Environment Variable | Meaning | +| ------------- | ------- | +|JWTHEADERS_RolesHeaderName| Name of the header item the JSON or JWT is contained in| +| JWTHEADERS_JwtHeaderRoleSource |Which Role Source to use:
`JSON` - The header is a JSON string. Use "JSON path" for where the roles are in the JSON.
`JWT` - The header is a JWT (base64) string. Use "JSON path" for where the roles are in the JWT claims. | +| JWTHEADERS_RolesJsonPath| Path in the JSON object or JWT claims that contains the roles. This should either be a simple string (single role) or a list of strings.| + + + +Using the example `OIDC_id_token_payload` (JSON) or `OIDC_access_token` (JWT) shown above, the claims are: + + +``` + { + "exp": 1708555947, + "iat": 1708555647, + "auth_time": 1708555288, + "jti": "42ee833e-89d3-4779-bd9d-06b979329c9f", + "iss": "http://localhost:7777/realms/dave-test2", + "aud": "live-key2", + "sub": "98cfe060-f980-4a05-8612-6c609219ffe9", + "typ": "ID", + "azp": "live-key2", + "nonce": "4PhqmZSJ355KBtJPbAP_PdwqiLnc7B1lA2SGpB0zXr4", + "session_state": "7712b364-339a-4053-ae0c-7d3adfca9005", + "at_hash": "2Tyw8q4ZMewuYrD38alCug", + "acr": "0", + "sid": "7712b364-339a-4053-ae0c-7d3adfca9005", + "upn": "david.blasby@geocat.net", + "resource_access": + { + "live-key2": + { + "roles": + [ + "GeonetworkAdministrator", + "GeoserverAdministrator" + ] + } + }, + "email_verified": false, + "address": { }, + "name": "david blasby", + "groups": ["default-roles-dave-test2", "offline_access", "uma_authorization"], + "preferred_username": "david.blasby@geocat.net", + "given_name": "david", + "family_name": "blasby", + "email": "david.blasby@geocat.net" + } +``` + +In this JSON set of claims (mirrored in the JWT claims of the Access Token), and the two roles from the IDP are "GeonetworkAdministrator", and "GeoserverAdministrator". The JSON path to the roles is `resource_access.live-key2.roles`. + +#### Role Conversion + + +The JWT Headers module also allows for converting roles (from the external IDP) to the GeoNetwork internal role names. + + +| Environment Variable | Meaning | +| ------------- |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +|JWTHEADERS_RoleConverterString| Role Converter Map from External Roles to GeoNetwork Roles.
This is a ";" delimited map in the form of:
`ExternalRole1=GeoNetworkRole1;ExternalRole2=GeoNetworkRole2` | +|JWTHEADERS_OnlyExternalListedRoles | Only allow External Roles that are explicitly named above.
If true, external roles that are not mentioned in the conversion map will be ignored. If false, those external roles will be turned into GeoNetwork roles of the same name.
These roles should either be a Profile ("Administrator", "Reviewer", etc..) or group-based permissions ("GroupName:ProfileName") | + + +For example, a conversion map like `GeonetworkAdministrator=ADMINISTRATOR` will convert our IDP "GeonetworkAdministrator" to the "ADMINISTRATOR" Profile... + +In our example, the user has two roles "GeoserverAdministrator" and "GeonetworkAdministrator". If the "Only allow External Roles that are explicitly named above" is true, then GeoNetwork will only see the "ADMINISTRATOR" role. If true, it will see "ADMINISTRATOR" and "GeoserverAdministrator". In neither case will it see the converted "GeonetworkAdministrator" roles. + +##### Groups + +As equivalent with the OIDC and Keycloak providers, specify group permissions in the `:` format. + + +### JWT Validation + + +If you are using Apache's `mod_auth_openidc` module, then you do *not* have to do JWT validation - Apache will ensure they are valid when it attaches the headers to the request. + +However, if you are using robot access to GeoNetwork, you can attach an Access Token to the request header for access. + +``` +Authentication: Bearer `base64 JWT Access Token` +``` + +OR + +``` +Authentication: `base64 JWT Access Token` +``` + +You would then setup the user name to come from a JWT token in the `Authentication` header with a JSON path like `preferred_username`. + + + + +You can also extract roles from the Access Token in a similar manner - make sure your IDP imbeds roles inside the Access Token. + +| Environment Variable | Meaning | +| ------------- | ------- | +|JWTHEADERS_ValidateToken |Validate JWT (Access Token).
If false, do not do any validation. | +| JWTHEADERS_ValidateTokenExpiry|Validate Token Expiry.
If true, validate the `exp` claim in the JWT and ensure it is in the future. This should always be true so you do not allow expired tokens. | +| JWTHEADERS_ValidateTokenSignature| Validate JWT (Access Token) Signature.
If true, validate the Token's Signature| +|JWTHEADERS_ValidateTokenSignatureURL | JSON Web Key Set URL (jwks_uri).
URL for a JWK Set. This is typically called `jwks_uri` in the OIDC metadata configuration. This will be downloaded and used to check the JWT's signature. This should always be true to ensure that the JWT has not been modified.| +|JWTHEADERS_ValidateTokenAgainstURL | Validate JWT (Access Token) Against Endpoint.
If true, validate the access token against an IDP's token verification URL.| +| JWTHEADERS_ValidateTokenAgainstURLEndpoint| URL (userinfo_endpoint).
IDP's token validation URL. This URL will be retrieved by adding the Access Token to the `Authentiation: Bearer ` header. It should return a HTTP 200 status code if the token is valid. This is recommened by the OIDC specification.| +| JWTHEADERS_ValidateSubjectWithEndpoint|Also validate Subject.
If true, the `sub` claim of the Access Token and the "userinfo_endpoint" `sub` claim will be checked to ensure they are equal. This is recommened by the OIDC specification. | +|JWTHEADERS_ValidateTokenAudience | Validate JWT (Access Token) Audience.
If true, the audience of the Access Token is checked. This is recommened by the OIDC specification since this verifies that the Access Token is meant for us.| +|JWTHEADERS_ValidateTokenAudienceClaimName | Claim Name.
The name of the claim the audience is in (`aud`, `azp`, or `appid` claim) the Access Token.| +|JWTHEADERS_ValidateTokenAudienceClaimValue|Required Claim Value.
The value this claim must be (if the claim is a list of string, then it must contain this value). | + + +#### Using Headers or GeoNetwork Database for Profiles & Profile Groups + +Inside `JwtHeaderSecurityConfig`, use these values to determine where Profile and ProfileGroups come from. + +| Property | Meaning | +| ------------- | ------- | +|updateProfile| true -> update the DB with the information from OIDC (don't allow user to edit profile in the UI)
false -> don't update the DB (user must edit profile in UI). | +|updateGroup| true -> update the DB (user's group) with the information from OIDC (don't allow admin to edit user's groups in the UI)
false -> don't update the DB (admin must edit groups in UI).| + +### Using JWT Headers for both OIDC and OAUTH2 (Simultaneously) + +Using the above configuration, you can configure JWT Headers for either OIDC-based browser access (i.e. with Apache `mod_auth_openidc`) ***or*** for OAUTH2 based Bearer Token access. However, you cannot do both at the same time. + +To configure JWT Headers to simultaneously provide OIDC and OAUTH2 access, you can use the `jwt-headers-multi` configuration. + +To use this, set the `GEONETWORK_SECURITY_TYPE` to `jwt-headers-multi` + +``` +GEONETWORK_SECURITY_TYPE=jwt-headers-multi +``` + +Please see these files for more detailed configuration: +* `config-security-jwt-header-multi.xml` +* `config-security-jwt-header-multi-overrides.properties` + +This creates two JWT Header authentication filters for GeoNetwork - one for OIDC based Browser access, and one for OAUTH2 based Robot access. + +You configure each of these independently using the same environment variables described above. +For the first filter, use the environment variables defined above (ie. `JWTHEADERS_UserNameFormat`). For the second filter, add a `2` at the end of the environment variable (i.e. `JWTHEADERS_UserNameFormat2`). + ## Configuring EU Login {#authentication-ecas} EU Login is the central login mechanism of the European Commission. You can enable login against that central service in case your intended users have ar can acquire a EU Login. diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md index 857df1f970d0..3b6083cf49e3 100644 --- a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-group.md @@ -4,9 +4,9 @@ The administrator can create new groups of users. User groups could correspond t To create new groups you should be logged on with an account that has administrative privileges. -1. Select the *Administration* button in the menu. On the Administration page, select *Group management*. +1. Select *Users and groups* from the *Admin console* drop down, then select *Manage groups*. -2. Select *Add a new group*. You may want to remove the *Sample* group; +2. Click *+New group*. You may want to remove the *Sample* group; 3. Fill out the details. The email address will be used to send feedback on data downloads when they occur for resources that are part of the Group. @@ -17,12 +17,33 @@ To create new groups you should be logged on with an account that has administra 4. Click *Save* -Access privileges can be set per metadata record. You can define privileges on a per Group basis. +## Access privileges -Privileges that can be set relate to visibility of the Metadata (*Publish*), data *Download*, *Interactive Map* access and display of the record in the *Featured* section of the home page. +Access privileges can be set on a per-metadata-record basis. Privileges define which actions are available to users in the group: -*Editing* defines the groups for which editors can edit the metadata record. +- **Publish**: Controls visibility of the metadata. +- **Download**: Grants access to data downloads. +- **Interactive Map**: Provides access to map tools. +- **Featured**: Displays the record in the *Featured* section on the home page. -*Notify* defines what Groups are notified when a file managed by GeoNetwork is downloaded. +Additional settings: +- **Editing**: Specifies which groups can edit the metadata record. +- **Notify**: Determines which groups are notified when a file managed by GeoNetwork is downloaded. -Below is an example of the privileges management table related to a dataset. +## Minimum user profile allowed to set privileges + +This setting allows administrators to control the minimum user profile required to assign privileges for a group. It provides enhanced control over who can manage sensitive privileges for users within the group. + +### Default setting + +By default, the **"Minimum User Profile Allowed to Set Privileges"** is set to **No Restrictions**. This means that any user with permission to manage privileges for a metadata record can assign privileges for users in this group. + +### Restricted setting + +When a specific profile is selected, only users with that profile or higher within the group can assign privileges. Users with lower profiles will have **read-only** access to privilege settings for this group. + +### Example usage + +If a group has **"Minimum User Profile Allowed to Set Privileges"** set to **Reviewer**: +- Only users with the **Reviewer** profile or higher (e.g., **Administrator**) can assign privileges for users in this group. +- Users with profiles below **Reviewer** (e.g., **Editor**) will see the group as **read-only** in the privileges interface. diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md index 240fac6944b3..e1d35ba75eb4 100644 --- a/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/creating-user.md @@ -3,8 +3,11 @@ To add a new user to the GeoNetwork system, please do the following: 1. Select the *Administration* button in the menu. On the Administration page, select *User management*. -2. Click the button *Add a new user*; -3. Provide the *information* required for the new user; -4. Assign the correct *profile* (see [Users, Groups and Roles](index.md#user_profiles)); -5. Assign the user to a *group* (see [Creating group](creating-group.md)); +2. Click the button *Add a new user*. +3. Provide the *information* required for the new user. +4. Assign the correct *profile* (see [Users, Groups and Roles](index.md#user_profiles)). +5. Assign the user to a *group* (see [Creating group](creating-group.md)). 6. Click *Save*. + +!!! note + Usernames are not case sensitive. The application does not allow to create different users with the same username in different cases. diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png index d1bc512667df..bdccc9830b26 100644 Binary files a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png and b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/password-forgot.png differ diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png index 7e9a6f8084f5..1c617a5d007b 100644 Binary files a/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png and b/docs/manual/docs/administrator-guide/managing-users-and-groups/img/selfregistration-start.png differ diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md index aa0408ce3f4b..c35bb17f71bf 100644 --- a/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/index.md @@ -3,6 +3,7 @@ - [Creating group](creating-group.md) - [Creating user](creating-user.md) - [User Self-Registration](user-self-registration.md) +- [User reset password](user-reset-password.md) - [Authentication mode](authentication-mode.md) ## Default user {#user-defaults} diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md new file mode 100644 index 000000000000..2eb887c85d54 --- /dev/null +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-reset-password.md @@ -0,0 +1,36 @@ +# User 'Forgot your password?' function {#user_forgot_password} + +!!! note + This function requires an email server configured. See [System configuration](../configuring-the-catalog/system-configuration.md#system-config-feedback). + +This function allows users who have forgotten their password to request a new one. Go to the sign in page to access the form: + +![](img/password-forgot.png) + +If a user takes this option they will receive an email inviting them to change their password as follows: + + You have requested to change your Greenhouse GeoNetwork Site password. + + You can change your password using the following link: + + http://localhost:8080/geonetwork/srv/en/password.change.form?username=dubya.shrub@greenhouse.gov&changeKey=635d6c84ddda782a9b6ca9dda0f568b011bb7733 + + This link is valid for today only. + + Greenhouse GeoNetwork Site + +The catalog has generated a changeKey from the forgotten password and the current date and emailed that to the user as part of a link to a change password form. + +If you want to change the content of this email, you should modify `xslt/service/account/password-forgotten-email.xsl`. + +When the user clicks on the link, a change password form is displayed in their browser and a new password can be entered. When that form is submitted, the changeKey is regenerated and checked with the changeKey supplied in the link, if they match then the password is changed to the new password supplied by the user. + +The final step in this process is a verification email sent to the email address of the user confirming that a change of password has taken place: + + Your Greenhouse GeoNetwork Site password has been changed. + + If you did not change this password contact the Greenhouse GeoNetwork Site helpdesk + + The Greenhouse GeoNetwork Site team + +If you want to change the content of this email, you should modify `xslt/service/account/password-changed-email.xsl`. diff --git a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md index fe3cb2d01426..aa7fdbb254b8 100644 --- a/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md +++ b/docs/manual/docs/administrator-guide/managing-users-and-groups/user-self-registration.md @@ -1,5 +1,9 @@ # User Self-Registration {#user_self_registration} +!!! note + This function requires an email server configured. See [System configuration](../configuring-the-catalog/system-configuration.md#system-config-feedback). + + To enable the self-registration functions, see [System configuration](../configuring-the-catalog/system-configuration.md). When self-registration is enabled, for users that are not logged in, an additional link is shown on the login page: ![](img/selfregistration-start.png) @@ -15,8 +19,8 @@ The fields in this form are self-explanatory except for the following: - the user will still be given the `Registered User` profile - an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the request for a more privileged profile - **Requested group**: By default, self-registered users are not assigned to any group. If a group is selected: - - the user will still not be assigned to any group - - an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the requested group. + - the user will still not be assigned to any group + - an email will be sent to the Email address nominated in the Feedback section of the 'System Administration' menu, informing them of the requested group. ## What happens when a user self-registers? @@ -72,39 +76,3 @@ If you want to change the content of this email, you should modify `xslt/service The Greenhouse GeoNetwork Site If you want to change the content of this email, you should modify `xslt/service/account/registration-prof-email.xsl`. - -## The 'Forgot your password?' function - -This function allows users who have forgotten their password to request a new one. Go to the sign in page to access the form: - -![](img/password-forgot.png) - -For security reasons, only users that have the `Registered User` profile can request a new password. - -If a user takes this option they will receive an email inviting them to change their password as follows: - - You have requested to change your Greenhouse GeoNetwork Site password. - - You can change your password using the following link: - - http://localhost:8080/geonetwork/srv/en/password.change.form?username=dubya.shrub@greenhouse.gov&changeKey=635d6c84ddda782a9b6ca9dda0f568b011bb7733 - - This link is valid for today only. - - Greenhouse GeoNetwork Site - -The catalog has generated a changeKey from the forgotten password and the current date and emailed that to the user as part of a link to a change password form. - -If you want to change the content of this email, you should modify `xslt/service/account/password-forgotten-email.xsl`. - -When the user clicks on the link, a change password form is displayed in their browser and a new password can be entered. When that form is submitted, the changeKey is regenerated and checked with the changeKey supplied in the link, if they match then the password is changed to the new password supplied by the user. - -The final step in this process is a verification email sent to the email address of the user confirming that a change of password has taken place: - - Your Greenhouse GeoNetwork Site password has been changed. - - If you did not change this password contact the Greenhouse GeoNetwork Site helpdesk - - The Greenhouse GeoNetwork Site team - -If you want to change the content of this email, you should modify `xslt/service/account/password-changed-email.xsl`. diff --git a/docs/manual/docs/annexes/standards/iso19115-3.2018.md b/docs/manual/docs/annexes/standards/iso19115-3.2018.md index baaabc077f1d..396aa2759884 100644 --- a/docs/manual/docs/annexes/standards/iso19115-3.2018.md +++ b/docs/manual/docs/annexes/standards/iso19115-3.2018.md @@ -6887,7 +6887,7 @@ Those values are defined in the standard but hidden when editing. | code | label | description | |------------------------------|--------------------------------|-------------| -| map staticMap interactiveMap | Map Static map Interactive map | | +| map map-static map-interactive | Map Static map Interactive map | | ### Scope description {#iso19115-3.2018-elem-mcc-MD_ScopeDescription-7995800501eaf72f941d8e81542f8e98} @@ -19811,7 +19811,7 @@ Those values are defined in the standard but hidden when editing. | code | label | description | |------------------------------|--------------------------------|-------------| -| map staticMap interactiveMap | Map Static map Interactive map | | +| map map-static map-interactive | Map Static map Interactive map | | ### Standard codelists Spatial Representation Type (mcc:MD_SpatialRepresentationTypeCode) {#iso19115-3.2018-cl-mcc-MD_SpatialRepresentationTypeCode} diff --git a/docs/manual/docs/annexes/standards/iso19139.md b/docs/manual/docs/annexes/standards/iso19139.md index ceb1ef656c99..53a1707aaf46 100644 --- a/docs/manual/docs/annexes/standards/iso19139.md +++ b/docs/manual/docs/annexes/standards/iso19139.md @@ -11963,7 +11963,7 @@ Those values are defined in the standard but hidden when editing. | code | label | description | |---------------------------------------------|------------------------------------------------|-------------| -| map staticMap interactiveMap featureCatalog | Map Static map Interactive map Feature catalog | | +| map map-static map-interactive featureCatalog | Map Static map Interactive map Feature catalog | | Displayed only if @@ -17436,7 +17436,7 @@ Those values are defined in the standard but hidden when editing. | code | label | description | |---------------------------------------------|------------------------------------------------|-------------| -| map staticMap interactiveMap featureCatalog | Map Static map Interactive map Feature catalog | | +| map map-static map-interactive featureCatalog | Map Static map Interactive map Feature catalog | | Displayed only if diff --git a/docs/manual/docs/api/img/dcat-in-download-menu.png b/docs/manual/docs/api/img/dcat-in-download-menu.png new file mode 100644 index 000000000000..5de86b80f02f Binary files /dev/null and b/docs/manual/docs/api/img/dcat-in-download-menu.png differ diff --git a/docs/manual/docs/api/rdf-dcat.md b/docs/manual/docs/api/rdf-dcat.md index bb2fa14fefc9..0d5eb499453f 100644 --- a/docs/manual/docs/api/rdf-dcat.md +++ b/docs/manual/docs/api/rdf-dcat.md @@ -1,24 +1,251 @@ -# RDF DCAT end point {#rdf-dcat} +# DCAT {#rdf-dcat} -!!! warning +The catalogue has the capability to convert ISO to DCAT format in various API endpoint. - Unavailable since version 4.0.0. - - There is no known sponsor or interested party for implementing RDF DCAT. - Interested parties may contact the project team for guidance and to express their intent. +## Supported DCAT profiles -The RDF DCAT end point provides a way of getting information about the catalog, the datasets and services, and links to distributed resources in a machine-readable format. The formats of the output are based on DCAT, an RDF vocabulary that is designed to facilitate interoperability between web-based data catalogs. +A base conversion is provided with complementary extensions for various profiles of DCAT: -Reference: +| Profile | Version | Description | URL | Conversion from | +|-----------------------------|---------|---------------------------------------------------------------------------------------------------------------------|------|------------------| +| W3C DCAT | 3 | Default W3C standard | https://www.w3.org/TR/vocab-dcat-3/ | ISO19115-3 | +| European DCAT-AP | 3.0.0 | DCAT profile for sharing information about Catalogues containing Datasets and Data Services descriptions in Europe | https://semiceu.github.io/DCAT-AP/releases/3.0.0/ | ISO19115-3 | +| European DCAT-AP-Mobility | 1.0.1 | mobilityDCAT-AP is a mobility-related extension of the DCAT-AP | https://mobilitydcat-ap.github.io/mobilityDCAT-AP/releases/ | ISO19115-3 | +| European DCAT-AP-HVD | 2.2.0 | DCAT-AP for a dataset that is subject to the requirements imposed by the High-Value Dataset implementing regulation | https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ | ISO19115-3 | +| European GeoDCAT-AP | 3.0.0 | | https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0/ | ISO19115-3 | +| European GeoDCAT-AP (SEMIC) | 3.0.0 | [XSLT conversion maintained by SEMIC](https://github.com/SEMICeu/iso-19139-to-dcat-ap/blob/main/iso-19139-to-dcat-ap.xsl) | https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0/ | ISO19139 | -* [Data Catalog Vocabulary (DCAT)](https://www.w3.org/TR/vocab-dcat-3/) -## Upgrading from GeoNetwork 3.0 Guidance +* The mapping is done from ISO19115-3 to DCAT* except for the SEMIC conversion which converts ISO19139 to GeoDCAT-AP. -RDF DCAT API is no longer available. +* When needed, an ISO19139 to or from ISO19115-3 conversion is applied (eg. a CSW request querying a catalog in ISO19115-3 using the SEMIC conversion). + +* DCAT output are not available for ISO19110 or Dublin core standards. + +## Past implementation + +[The first implementation of DCAT output was done in 2012](https://trac.osgeo.org/geonetwork/wiki/proposals/DCATandRDFServices) and was targeting interaction with semantic service and semantic sitemap support. DCAT output was available using a service named `rdf.search`. This service was deprecated in version 4.0.0 in favor of producing DCAT output in the [Catalog Service for the Web (CSW)](csw.md) or using the formatters API. + + +## Usage in the formatters API + +Each DCAT formats are available using a formatter eg. http://localhost:8080/geonetwork/srv/api/records/be44fe5a-65ca-4b70-9d29-ac5bf1f0ebc5/formatters/eu-dcat-ap + +To add the formatter in the record view download list, the user interface configuration can be updated: + +![image](img/dcat-in-download-menu.png) + + +User interface configuration: + +```json +{ + "mods": { + "search": { + "downloadFormatter": [ + { + "label": "exportMEF", + "url": "/formatters/zip?withRelated=false", + "class": "fa-file-zip-o" + }, + { + "label": "exportPDF", + "url": "/formatters/xsl-view?output=pdf&language=${lang}", + "class": "fa-file-pdf-o" + }, + { + "label": "exportXML", + "url": "/formatters/xml", + "class": "fa-file-code-o" + }, + { + "label": "W3C-DCAT", + "url": "/formatters/dcat" + }, + { + "label": "EU-DCAT-AP", + "url": "/formatters/eu-dcat-ap" + }, + { + "label": "EU-GEO-DCAT-AP", + "url": "/formatters/eu-geodcat-ap" + }, + { + "label": "EU-DCAT-AP-MOBILITY", + "url": "/formatters/eu-dcat-ap-mobility" + }, + { + "label": "EU-DCAT-AP-HVD", + "url": "/formatters/eu-dcat-ap-hvd" + } + ] +``` + + +## Usage in the CSW service + +All DCAT profiles are also accessible using CSW protocol. + +A `GetRecordById` operation can be used: http://localhost:8080/geonetwork/srv/eng/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetRecordById&ID=da165110-88fd-11da-a88f-000d939bc5d8&outputSchema=https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ and is equivalent to the API http://localhost:8080/geonetwork/srv/api/records/da165110-88fd-11da-a88f-000d939bc5d8/formatters/eu-dcat-ap-hvd?output=xml. + +A `GetRecords` operation can be used to retrieve a set of records: http://localhost:8080/geonetwork/srv/fre/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetRecords&outputSchema=http://data.europa.eu/930/&elementSetName=full&resultType=results&maxRecords=300 + +Use the `outputSchema` parameter to select the DCAT profile to use. The following values are supported: + + +| Profile | Output schema parameter | +|-----------------------------------------|-------------------------------------------------------| +| CSW | http://www.opengis.net/cat/csw/2.0.2 | +| ISO19115-3 | http://standards.iso.org/iso/19115/-3/mdb/2.0 | +| ISO19110 | http://www.isotc211.org/2005/gfc | +| ISO19139 | http://www.isotc211.org/2005/gmd | +| W3C DCAT | http://www.w3.org/ns/dcat#core | +| EU-DCAT-AP | http://data.europa.eu/r5r/ | +| EU-GeoDCAT-AP | http://data.europa.eu/930/ | +| EU-GeoDCAT-AP (SEMIC) | http://data.europa.eu/930/#semiceu | +| DCAT (past implementation - deprecated) | http://www.w3.org/ns/dcat# | +| EU-DCAT-AP-HVD | https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ | +| EU-DCAT-AP-Mobility | https://w3id.org/mobilitydcat-ap | + +When using GET request, it is recommended to encode URL characters in parameters (eg. `#` as `%23`) to avoid issues with the URL. + +Those values are listed in the `GetCapabilities` operation http://localhost:8080/geonetwork/srv/eng/csw?SERVICE=CSW&VERSION=2.0.2&REQUEST=GetCapabilities. + +```xml + + http://www.opengis.net/cat/csw/2.0.2 + http://standards.iso.org/iso/19115/-3/mdb/2.0 + http://www.isotc211.org/2005/gfc + http://www.isotc211.org/2005/gmd + http://data.europa.eu/930/ + http://data.europa.eu/930/#semiceu + http://data.europa.eu/r5r/ + http://www.w3.org/ns/dcat# + http://www.w3.org/ns/dcat#core + https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/ + https://w3id.org/mobilitydcat-ap +``` + +## Usage in OGC API Records + +For the time being, OGC API Records provides a simplified DCAT output (based on the index document). + +## DCAT validation + +The DCAT validation can be done using online validation tool: + +* https://www.itb.ec.europa.eu/shacl/dcat-ap/upload + +Depending on the target DCAT profile to use, it may be required to build proper ISO template and metadata record containing all required fields. Usually profiles are adding constraints for usage of specific vocabularies and fields (eg. [for High Value datasets, specific vocabularies are defined for categories, license, applicable legislations, ...](https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/#controlled-vocabularies-to-be-used)). + + +## Mapping considerations + +### Items under discussion + + +The mapping is done from ISO19115-3 to DCAT. The mapping may not cover all usages and may be adapted. This can be done in the `iso19115-3.2018` schema plugin in the `formatter/dcat*` XSLT files. + +Some points under discussion are: + +#### Object vs Reference: + +* Should we use object or reference for some fields (eg. contact, organisation, ...)? +* What should be the reference URI? +* Where is defined the reference URI in ISO? + +eg. + +* for the CatalogRecord reference URI is the `metadataLinkage` or the `metadataIdentifier`. +* for the Resource reference URI is the first resource identifier or the CatalogRecord reference URI with `#resource` suffix. +* for an organisation, the URI will be the first value in the following sequence: + +```xml +(cit:partyIdentifier/*/mcc:code/*/text(), +cit:contactInfo/*/cit:onlineResource/*/cit:linkage/gco:CharacterString/text(), +cit:name/gcx:Anchor/@xlink:href, +@uuid)[1] +``` + +#### Distribution model in DCAT and ISO + +In DCAT, a number of properties from the dataset are also defined in the distribution elements. +In ISO, an option could be to use multiple transfer options element to create multiple distribution elements with more detailed information in DCAT (eg. transfer size). + +In the mapping, should we repeat all the information about the dataset? Should we recommend to use multiple transfer options element in ISO? + +#### No equivalent field in ISO + +eg. Where to store `spdx:checksum` in ISO? Could be considered as an online resource id attribute as the checksum uniquely identify the resource. + + +#### Associated resources + +Links between resources are not always bidirectional so using the associated API would allow to populate more relations. +This is also mitigated when the complete RDF graph of the catalogue is retrieved as it will provide relations from all records. + + +### EU DCAT AP High Value Datasets + +When encoding in ISO datasets in the context of DCAT HVD, consider encoding the following properties: + +* Add a keyword pointing to the legislation [http://data.europa.eu/eli/reg_impl/2023/138/oj](http://data.europa.eu/eli/reg_impl/2023/138/oj) +* Add at least one keyword for the `dcatap:hvdCategory` from the [High-value dataset categories vocabulary](https://op.europa.eu/en/web/eu-vocabularies/dataset/-/resource?uri=http://publications.europa.eu/resource/dataset/high-value-dataset-category) + +See [DCAT AP HVD specification](https://semiceu.github.io/DCAT-AP/releases/2.2.0-hvd/) for other requirements. + +### EU DCAT mobility + +When encoding in ISO datasets in the context of DCAT Mobility, consider encoding the following properties: + +* `mobilitydcatap:mobilityTheme` (mandatory) is encoded as a keyword from the [mobility theme vocabulary](https://w3id.org/mobilitydcat-ap/mobility-theme) +* `mobilitydcatap:georeferencingMethod` (recommended) is encoded as a keyword from the [mobility georeferencing method vocabulary](https://w3id.org/mobilitydcat-ap/georeferencing-method/) +* `mobilitydcatap:networkCoverage` (recommended) is encoded as a keyword from the [mobility network coverage vocabulary](https://w3id.org/mobilitydcat-ap/network-coverage) +* `mobilitydcatap:transportMode` (recommended) is encoded as a keyword from the [mobility transport mode vocabulary](https://w3id.org/mobilitydcat-ap/transport-mode) + +See [DCAT AP Mobility specification](https://mobilitydcat-ap.github.io/mobilityDCAT-AP/releases/) for other requirements. + + +### SEMIC conversion compared to GeoNetwork conversion + +The main differences between the 2 conversions is that the GeoNetwork conversion **starts from ISO19115-3 instead of ISO19139** (to better support additional information provided in ISO19115-3 eg. date lifecycle, party identifiers and citation in data quality, feature catalogue, additional documentation, portrayal sections). **The conversion to GeoDCAT-AP is done as an extension of DCAT-AP which extends the core W3C DCAT** for easier customization and extension. This allows non EU countries to also use the base DCAT conversion. The conversion is less linear and easier to extend or customize. + +SEMIC conversion parameters `core`, `extended`, `include-deprecated` are not available in the GeoNetwork conversion which focus on version 3 of GeoDCAT-AP. + +Some of the differences in the GeoNetwork conversion are: + +* CatalogRecord / `dct:identifier` is prefixed with the code space if defined. +* CatalogRecord / `dct:title` and `dct:description` are set at CatalogRecord level and at the Resource level +* Resource / First resource identifier is used for `dct:identifer` (MobilityDCAT restrict it to 0..1), then additional ones are encoded in `adms:identifier` +* Resource / `dct:spatial` is only encoded using a `dcat:bbox` in GeoJSON (instead of WKT and GML and GeoJson and the `locn:geometry` which was kept for backward compatibility with GeoDCAT-AP v1.*) +* Resource / `dct:temporal` is only encoded using a `dcat:startDate` and `dcat:endDate` (and do not add same information in `schemas:startDate` and `schemas:endDate` which was kept for backward compatibility with GeoDCAT-AP v1.*) +* Portrayal, specification, report online link are encoded using `foaf:page` instead of `foaf:landingPage` +* `prov:qualifiedAttribution` element are not created because `dcat:creator|publisher|contactPoint|..` already provide the same information. +* Keyword / When encoded with `Anchor`, `dcat:theme` encoded with only a reference in SEMIC conversion and using `skos:Concept` in the GeoNetwork conversion (see discussion point above) + +```xml + +vs + + + Données de base (autre) + + +``` + +Additional properties supported: + +* CatalogRecord / `dct:issued` is added if exists in the metadata (added in ISO19115-3) +* CatalogRecord / `dct:language` is added if exists +* CatalogRecord / `cnt:characterEncoding` is added if exists +* Resource / `graphicOverview` is encoded as `foaf:page` +* Resource / Associated resources + * Source dataset are encoded using `dct:source` + * Associated resource are encoded using `dct:relation` and subtypes (eg. `isPartOf`) +* Party identifier (added in ISO19115-3) are used for `rdf:about` attribute for individual or organization + +Technical differences: + +* `normalize-space` is not applied to `abstract` or `lineage` (which lose the line breaks and basic formatting) -1. We recommend migrating to use of [Catalog Service for the Web (CSW)](csw.md) API to query and explore data. -2. When downloading using `GetRecord` make use of the `application/rdf+xml; charset=UTF-8` output format. - - This will allow retrieving records in the same document format as previously provided by RDF DCAT api. diff --git a/docs/manual/docs/customizing-application/configuring-faceted-search.md b/docs/manual/docs/customizing-application/configuring-faceted-search.md index 7af888bdc0f9..15a35e166ecb 100644 --- a/docs/manual/docs/customizing-application/configuring-faceted-search.md +++ b/docs/manual/docs/customizing-application/configuring-faceted-search.md @@ -338,6 +338,23 @@ When using a generic field like `tag.default` and including only a subset of key }, ``` +To translate the label `IDP_TOPICS`, 2 options: + +* Use the translation API to add your custom translation in the database for the facet key `facet-IDP_TOPICS` (see the Admin console --> Settings --> Languages). +* Or declare a meta property `labels` in the facet configuration: + +``` js +"IDP_TOPICS": { + "terms": { + ... + "meta": { + "labels": { + "eng": "IDP topics", + "fre": "Thèmes IDP" + }, +``` + + ## Decorate aggregations {#configuring-facet-decorator} All aggregations can be decorated by an icon or an image in the home page or in other pages. The decorator is configured in the `meta` properties of the facet: @@ -467,7 +484,7 @@ A date range field: "resourceTemporalDateRange": { "gnBuildFilterForRange": { "field": "resourceTemporalDateRange", - "buckets": "2021 - 1970", + "buckets": 51, //"2021 - 1970", "dateFormat": "YYYY", "vegaDateFormat": "%Y", "from": "1970", diff --git a/docs/manual/docs/install-guide/installing-index.md b/docs/manual/docs/install-guide/installing-index.md index e3ea8950d685..870e764f3ed3 100644 --- a/docs/manual/docs/install-guide/installing-index.md +++ b/docs/manual/docs/install-guide/installing-index.md @@ -1,38 +1,43 @@ # Installing search platform -The GeoNetwork search engine is built on top of Elasticsearch. The platform is used to index records and also to analyze WFS data (See [Analyze and visualize data](../user-guide/analyzing/data.md) ). +The GeoNetwork search engine is built on top of Elasticsearch. The platform is used to index records and also to index WFS data (See [Analyze and visualize data](../user-guide/analyzing/data.md) ). GeoNetwork requires an [Elasticsearch](https://www.elastic.co/products/elasticsearch) instance to be installed next to the catalog. + ## Elasticsearch compatibility +Elasticsearch Java client version: 8.14.3 + | Elasticsearch Version | Compatibility | |-----------------------| ------------- | -| Elasticsearch 7.15.x | minimum | -| Elasticsearch 8.11.3 | tested | +| Elasticsearch 8.14.3 | recommended | +| Elasticsearch 8.14.x | minimum | + +Older version may be supported but are untested. ## Installation === "Manual installation" - 1. **Download:** Elasticsearch 8.x (`8.11.3` tested, minimum `7.15.x`) from and unzip the file. + 1. **Download:** Elasticsearch `8.14.3` from and unzip the file. ``` shell - wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.11.3.tar.gz - tar xvfz elasticsearch-8.11.3.tar.gz + wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-8.14.3.tar.gz + tar xvfz elasticsearch-8.14.3.tar.gz ``` 2. **Start**: Manually start Elasticsearch using: ``` shell - elasticsearch-8.11.3/bin/elasticsearch + elasticsearch-8.14.3/bin/elasticsearch ``` 3. **Stop**: Manually stop Elasticsearch using: ``` shell - elasticsearch-8.11.3/bin/elasticsearch stop + elasticsearch-8.14.3/bin/elasticsearch stop ``` === "Install using Maven" diff --git a/docs/manual/docs/overview/authors.md b/docs/manual/docs/overview/authors.md index 60099cdf93ae..106fba51a028 100644 --- a/docs/manual/docs/overview/authors.md +++ b/docs/manual/docs/overview/authors.md @@ -9,17 +9,17 @@ In brief the committee votes on proposals on the geonetwork-dev mailinglist. Pro ### Members of the Project Steering Committee - Jeroen Ticheler (jeroen ticheler * geocat net) [GeoCat](https://www.geocat.net) - Chair -- Francois Prunayre [Titellus](https://titellus.net) - Simon Pigot [CSIRO](https://www.csiro.au) - Florent Gravin [CamptoCamp](https://camptocamp.com) - Jose Garcia [GeoCat](https://www.geocat.net) -- Jo Cook [Astun Technology](https://www.astuntechnology.com) - Paul van Genuchten [ISRIC](https://www.isric.org) ### Former members of the PSC +- Jo Cook [Astun Technology](https://www.astuntechnology.com) - Patrizia Monteduro (Patrizia Monteduro * fao org) [FAO-UN](https://www.fao.org) - Emanuele Tajariol (e tajariol * mclink it - GeoSolutions) +- Francois Prunayre - Jesse Eichar - Andrea Carboni (acarboni * crisalis-tech com - Independent consultant) - Archie Warnock (warnock * awcubed com) [A/WWW Enterprises](https://www.awcubed.com) diff --git a/docs/manual/docs/overview/change-log/history/index.md b/docs/manual/docs/overview/change-log/history/index.md index 298f544c4018..2401afe752f1 100644 --- a/docs/manual/docs/overview/change-log/history/index.md +++ b/docs/manual/docs/overview/change-log/history/index.md @@ -12,6 +12,8 @@ This series is under **active development** by our community, with new features, ### 4.4 +- [Version 4.4.6](../version-4.4.6.md) +- [Version 4.4.5](../version-4.4.5.md) - [Version 4.4.4](../version-4.4.4.md) - [Version 4.4.3](../version-4.4.3.md) - [Version 4.4.2](../version-4.4.2.md) @@ -26,6 +28,8 @@ This series is under **active use** by our community, with regular improvements, ### 4.2 +- [Version 4.2.11](../version-4.2.11.md) +- [Version 4.2.10](../version-4.2.10.md) - [Version 4.2.9](../version-4.2.9.md) - [Version 4.2.8](../version-4.2.8.md) - [Version 4.2.7](../version-4.2.7.md) diff --git a/docs/manual/docs/overview/change-log/index.md b/docs/manual/docs/overview/change-log/index.md index cfb550e923d1..5d0623fa2e65 100644 --- a/docs/manual/docs/overview/change-log/index.md +++ b/docs/manual/docs/overview/change-log/index.md @@ -3,5 +3,5 @@ Notable changes made to GeoNetwork opensource including new features, migration instructions, and bug fixes. - [Version 4.4.5](version-4.4.4.md) -- [Version 4.2.9](version-4.2.9.md) +- [Version 4.2.11](version-4.2.11.md) - [Release History](history/index.md) diff --git a/docs/manual/docs/overview/change-log/version-4.2.11.md b/docs/manual/docs/overview/change-log/version-4.2.11.md new file mode 100644 index 000000000000..88d7e7f702d8 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.2.11.md @@ -0,0 +1,18 @@ +# Version 4.2.11 {#version-4211} + +GeoNetwork 4.2.11 release is a minor release. + +## List of changes + +Release highlights: + +- [Upgrade jQuery to version 3.7.1](https://github.com/geonetwork/core-geonetwork/pull/8105) +- [CSW / Fix parsing date values for filters](https://github.com/geonetwork/core-geonetwork/pull/8417) +- [Fix harvester execution logs added to previous logs](https://github.com/geonetwork/core-geonetwork/pull/8388) +- [Register user / allow to configured allowed email domains](https://github.com/geonetwork/core-geonetwork/pull/8207) +- [Register user / allow to select the group where the user wants to register](https://github.com/geonetwork/core-geonetwork/pull/8195) +- [WebDav harvester / Add support for XSLT filter process](https://github.com/geonetwork/core-geonetwork/pull/8423) +- [Update home page "browse by" to display facet as label if there is only one](https://github.com/geonetwork/core-geonetwork/pull/8449) +- [Use UI language for metadata selection export to CSV / PDF](https://github.com/geonetwork/core-geonetwork/pull/8274) +- +and more \... see [4.2.11 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.2.11+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?q=is%3Apr+milestone%3A4.2.11+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.6.md b/docs/manual/docs/overview/change-log/version-4.4.6.md new file mode 100644 index 000000000000..1e9a6a2a2148 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.6.md @@ -0,0 +1,35 @@ +# Version 4.4.6 {#version-446} + +GeoNetwork 4.4.6 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +Due to [Elasticsearch update to 8.14.3](https://github.com/geonetwork/core-geonetwork/pull/8337) it is recommended to use 8.14.x version of Elasticsearch server. + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + + +## List of changes + +Major changes: + +* [Add support for external management named properties in JCloud](https://github.com/geonetwork/core-geonetwork/pull/8357) + +* [Use UI language for metadata selection export to CSV / PDF. + ](https://github.com/geonetwork/core-geonetwork/pull/8262) + +* [WebDav harvester / Add support for XSLT filter process](https://github.com/geonetwork/core-geonetwork/pull/8243) + +* [Register user / allow to configured allowed email domains](https://github.com/geonetwork/core-geonetwork/pull/8186) + +* [Register user / allow to select the group where the user wants to register](https://github.com/geonetwork/core-geonetwork/pull/8176) + +* [Support multiple DOI servers](https://github.com/geonetwork/core-geonetwork/pull/8098) + +* [Standard / DCAT (and profiles) export ](https://github.com/geonetwork/core-geonetwork/pull/7600) + +and more \... see [4.4.6-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.6+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.6+is%3Aclosed) for full details. diff --git a/docs/manual/docs/user-guide/associating-resources/doi.md b/docs/manual/docs/user-guide/associating-resources/doi.md index cabc05ecba56..3f61601ebcaa 100644 --- a/docs/manual/docs/user-guide/associating-resources/doi.md +++ b/docs/manual/docs/user-guide/associating-resources/doi.md @@ -7,9 +7,21 @@ The catalogue support DOI creation using: - [DataCite API](https://support.datacite.org/docs/mds-api-guide). - EU publication office API -Configure the API access point in the `admin console --> settings`: +Configure the DOI API access point to publish the metadata in the `Admin console --> Settings --> Doi servers`: -![](img/doi-admin-console.png) +![](img/doi-create-server.png) + +Providing the following information: + +- `Name`: A descriptive name for the server. +- `Description`: (Optional) A verbose description of the server. +- `DataCite API endpoint`: The API url, usually https://mds.datacite.org or https://mds.test.datacite.org for testing. +- `DataCite username` / `DataCite password`: Credentials required to publish the DOI resources. +- `Landing page URL template`: The URL to use to register the DOI. A good default for GeoNetwork is http://localhost:8080/geonetwork/srv/resources/records/{{uuid}}. The landing page URL MUST contains the UUID of the record. +- `Final DOI URL prefix`: (Optional) Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API. +- `DOI pattern`: Default is `{{uuid}}` but the DOI structure can be customized with database id and/or record group eg. `example-{{groupOwner}}-{{id}}`. +- `DataCite prefix`: Usually looks like `10.xxxx`. You will be allowed to register DOI names only under the prefixes that have been assigned to you. +- `Record groups`: (Optional) When creating a DOI, only DOI server(s) associated with the record group are proposed. If record group is not associated with any DOI servers, then DOI servers with no group are proposed. A record can be downloaded using the DataCite format from the API using: diff --git a/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png new file mode 100644 index 000000000000..efccf6030657 Binary files /dev/null and b/docs/manual/docs/user-guide/associating-resources/img/doi-create-server.png differ diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-csw.md b/docs/manual/docs/user-guide/harvesting/harvesting-csw.md index 614687eb4716..dc94a777d4a0 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-csw.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-csw.md @@ -4,16 +4,38 @@ This harvester will connect to a remote CSW server and retrieve metadata records ## Adding a CSW harvester -The figure above shows the options available: - -- **Site** - Options about the remote site. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the CSW harvester. - - *Service URL* - The URL of the capabilities document of the CSW server to be harvested. eg. . This document is used to discover the location of the services to call to query and retrieve metadata. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. - - *Use account* - Account credentials for basic HTTP authentication on the CSW server. -- **Search criteria** - Using the Add button, you can add several search criteria. You can query only the fields recognised by the CSW protocol. -- **Options** - Scheduling options. -- **Options** - Specific harvesting options for this harvester. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. +To create a CSW harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `CSW`: + +![](img/add-csw-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC CSW 2.0.2** + - *Service URL*: The URL of the capabilities document of the CSW server to be harvested. eg. . This document is used to discover the location of the services to call to query and retrieve metadata. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the CSW server. + - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest. + - *Search options*: + - *Sort by*: Define sort option to retrieve the results. Sorting by 'identifier:A' means by UUID with alphabetical order. Any CSW queryables can be used in combination with A or D for setting the ordering. + - *Output Schema*: The metadata standard to request the metadata records from the CSW server. + - *Distributed search*: Enables the distributed search in remote server (if the remote server supports it). When this option is enabled, the remote catalog cascades the search to the Federated CSW servers that has configured. + +- **Configure response processing for CSW** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *Check for duplicate resources based on the resource identifier*: If checked, ignores metadata with a resource identifier (`gmd:identificationInfo/*/gmd:citation/gmd:CI_Citation/gmd:identifier/*/gmd:code/gco:CharacterString`) that is assigned to other metadata record in the catalog. It only applies to records in ISO19139 or ISO profiles. + - *XPath filter*: (Optional) When record is retrived from remote server, check an XPath expression to accept or discard the record. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - **Privileges** - Assign privileges to harvested metadata. -- **Categories** diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md index 5e0b6b3ab54a..900deeafc4cd 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-filesystem.md @@ -4,21 +4,35 @@ This harvester will harvest metadata as XML files from a filesystem available on ## Adding a Local File System harvester -The figure above shows the options available: - -- **Site** - Options about the remote site. - - *Name* - This is a short description of the filesystem harvester. It will be shown in the harvesting main page as the name for this instance of the Local Filesystem harvester. - - *Directory* - The path name of the directory containing the metadata (as XML files) to be harvested. - - *Recurse* - If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found. - - *Keep local if deleted at source* - If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. -- **Options** - Scheduling options. -- **Harvested Content** - Options that are applied to harvested content. - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. -- **Privileges** - Assign privileges to harvested metadata. -- **Categories** +To create a Local File System harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Directory`: + +![](img/add-filesystem-harvester.png) + +Providing the following information: -!!! Notes +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - - in order to be successfully harvested, metadata records retrieved from the file system must match a metadata schema in the local GeoNetwork instance +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Directory** + - *Directory*: The path name of the directory containing the metadata (as XML files) to be harvested. The directory must be accessible by GeoNetwork. + - *Also search in subfolders*: If checked and the *Directory* path contains other directories, then the harvester will traverse the entire file system tree in that directory and add all metadata files found. + - *Script to run before harvesting* + - *Type of record* + +- **Configure response processing for filesystem** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Update catalog record only if file was updated* + - *Keep local even if deleted at source*: If checked then metadata records that have already been harvested will be kept even if they have been deleted from the *Directory* specified. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md new file mode 100644 index 000000000000..de085a9bb9bb --- /dev/null +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork-2.md @@ -0,0 +1,9 @@ +# GeoNetwork 2.0 Harvester {#gn2_harvester} + +## Upgrading from GeoNetwork 2.0 Guidance + +GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues. + +* Harvesting metadata from a v2.0 server requires this harvesting type. +* Old 2.0 servers can still harvest from 2.1 servers +* Due to the fact that GeoNetwork 2.0 is no longer suitable for production use, this harvesting type is deprecated. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md index de085a9bb9bb..3c692b5e3ecd 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geonetwork.md @@ -1,9 +1,43 @@ -# GeoNetwork 2.0 Harvester {#gn2_harvester} +# GeoNetwork 2.1-3.X Harvester -## Upgrading from GeoNetwork 2.0 Guidance +This harvester will connect to a remote GeoNetwork server that uses versions from 2.1-3.X and retrieve metadata records that match the query parameters. -GeoNetwork 2.1 introduced a new powerful harvesting engine which is not compatible with GeoNetwork version 2.0 based catalogues. +## Adding a GeoNetwork 2.1-3.X harvester -* Harvesting metadata from a v2.0 server requires this harvesting type. -* Old 2.0 servers can still harvest from 2.1 servers -* Due to the fact that GeoNetwork 2.0 is no longer suitable for production use, this harvesting type is deprecated. +To create a GeoNetwork 2.1-3.X harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoNetwork (from 2.1 to 3.x)`: + +![](img/add-geonetwork-3-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to GeoNetwork (from 2.1 to 3.x)** + - *Catalog URL*: + - The remote URL of the GeoNetwork server from which metadata will be harvested. The URL should contain the catalog name, for example: http://www.fao.org/geonetwork. + - Additionally, it should be configured the node name, usually the value `srv`. + - *Search filter*: (Optional) Define the filter to retrieve the remote metadata. + - *Catalog*: (Optional) Select the portal in the remote server to harvest. + +- **Configure response processing for GeoNetwork** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server. + - *Use full MEF format*: If checked, uses MEF format instead of XML to retrieve the remote metadata. Recommended to metadata with files. + - *Use change date for comparison*: If checked, uses change date to detect changes on remote server. + - *Set category if it exists locally*: If checked, uses the category set on the metadata in the remote server also locally (assuming it exists locally). Applies only when using MEF format for the harvesting. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas). + + It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS` + + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md index e8887286ea32..ec16a07b9aef 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-geoportal.md @@ -4,24 +4,38 @@ This harvester will connect to a remote GeoPortal version 9.3.x or 10.x server a ## Adding a GeoPortal REST harvester -The figure above shows the options available: - -- **Site** - Options about the remote site. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the GeoPortal REST harvester. - - *Base URL* - The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing harvested metadata records in the search results. -- **Search criteria** - Using the Add button, you can add several search criteria. You can query any field on the GeoPortal server using the Lucene query syntax described at . -- **Options** - Scheduling options. -- **Harvested Content** - Options that are applied to harvested content. - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. See notes section below for typical usage. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. +To create a GeoPortal REST harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `GeoPortal REST`: + +![](img/add-geoportalrest-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to GeoPortal REST** + - *URL*: The base URL of the GeoPortal server to be harvested. eg. . The harvester will add the additional path required to access the REST services on the GeoPortal server. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + - *Search filter*: (Optional) You can query any field on the GeoPortal server using the Lucene query syntax described at . + +- **Configure response processing for geoPREST** + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + - **Privileges** - Assign privileges to harvested metadata. -- **Categories** + !!! Notes - - this harvester uses two REST services from the GeoPortal API: + - This harvester uses two REST services from the GeoPortal API: - `rest/find/document` with searchText parameter to return an RSS listing of metadata records that meet the search criteria (maximum 100000) - `rest/document` with id parameter from each result returned in the RSS listing - - this harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc. - - typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier + - This harvester has been tested with GeoPortal 9.3.x and 10.x. It can be used in preference to the CSW harvester if there are issues with the handling of the OGC standards etc. + - Typically ISO19115 metadata produced by the Geoportal software will not have a 'gmd' prefix for the namespace `http://www.isotc211.org/2005/gmd`. GeoNetwork XSLTs will not have any trouble understanding this metadata but will not be able to map titles and codelists in the viewer/editor. To fix this problem, please select the ``Add-gmd-prefix`` XSLT for the *Apply this XSLT to harvested records* in the **Harvested Content** set of options described earlier diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md index cf0463636343..6c528feb7e29 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-oaipmh.md @@ -1,36 +1,49 @@ # OAIPMH Harvesting {#oaipmh_harvester} -This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol. +This is a harvesting protocol that is widely used among libraries. GeoNetwork implements version 2.0 of the protocol. An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata. ## Adding an OAI-PMH harvester -An OAI-PMH server implements a harvesting protocol that GeoNetwork, acting as a client, can use to harvest metadata. +To create a OAI-PMH harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OAI/PMH`: -Configuration options: +![](img/add-oaipmh-harvester.png) -- **Site** - Options describing the remote site. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the OAIPMH harvester. - - *URL* - The URL of the OAI-PMH server from which metadata will be harvested. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. - - *Use account* - Account credentials for basic HTTP authentication on the OAIPMH server. -- **Search criteria** - This allows you to select metadata records for harvest based on certain criteria: - - *From* - You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field. - - *Until* - Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested. - - *Set* - An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here. - - *Prefix* - 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers. - - You can use the Add button to add more than one Search Criteria set. Search Criteria sets can be removed by clicking on the small cross at the top left of the set. +Providing the following information: -!!! note +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - the 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). +- **Configure connection to OGC CSW 2.0.2** + - *URL*: The URL of the OAI-PMH server from which metadata will be harvested. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the OAIPMH server. + - *Search filter*: (Optional) Define the search criteria below to restrict the records to harvest. + - *From*: You can provide a start date here. Any metadata whose last change date is equal to or greater than this date will be harvested. To add or edit a value for this field you need to use the icon alongside the text box. This field is optional so if you don't provide a start date the constraint is dropped. Use the icon to clear the field. + - *Until*: Functions in the same way as the *From* parameter but adds an end constraint to the last change date search. Any metadata whose last change data is less than or equal to this data will be harvested. + - *Set*: An OAI-PMH server classifies metadata into sets (like categories in GeoNetwork). You can request all metadata records that belong to a set (and any of its subsets) by specifying the name of that set here. + - *Prefix*: 'Prefix' means metadata format. The oai_dc prefix must be supported by all OAI-PMH compliant servers. + + !!! note + + The 'OAI provider sets' drop down next to the *Set* text box and the 'OAI provider prefixes' drop down next to the *Prefix* textbox are initially blank. After specifying the connection URL, you can press the **Retrieve Info** button, which will connect to the remote OAI-PMH server, retrieve all supported sets and prefixes and fill the drop downs with these values. Selecting a value from either of these drop downs will fill the appropriate text box with the selected value. +- **Configure response processing for oaipmh** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. -- **Options** - Scheduling Options. -- **Privileges** -- **Categories** !!! Notes - - if you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format. - - when you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information. - - the id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata. + - If you request the oai_dc output format, GeoNetwork will convert it to Dublin Core format. + - When you edit a previously created OAIPMH harvester instance, both the *set* and *prefix* drop down lists will be empty. You have to press the retrieve info button again to connect to the remote server and retrieve set and prefix information. + - The id of the remote server must be a UUID. If not, metadata can be harvested but during hierarchical propagation id clashes could corrupt harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md index 52c88c134d48..70f45cf75d63 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-ogcwxs.md @@ -11,27 +11,46 @@ An OGC service implements a GetCapabilities operation that GeoNetwork, acting as ## Adding an OGC Service Harvester -Configuration options: - -- **Site** - - *Name* - The name of the catalogue and will be one of the search criteria. - - *Type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0). - - *Service URL* - The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like . - - *Metadata language* - Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator. - - *ISO topic category* - Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets". - - *Type of import* - By default, the harvester produces one service metadata record. Check boxes in this group determine the other metadata that will be produced. - - *Create metadata for layer elements using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document. - - *Create metadata for layer elements using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option. - - *Create thumbnails for WMS layers*: If harvesting from an OGC WMS, then checking this options means that thumbnails will be created during harvesting. - - *Target schema* - The metadata schema of the dataset metadata records that will be created by this harvester. - - *Icon* - The default icon displayed as attribution logo for metadata created by this harvester. -- **Options** - Scheduling Options. -- **Privileges** -- **Category for service** - Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources"). -- **Category for datasets** - Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets"). +To create a OGC Service harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC Web Services`: + +![](img/add-ogcwebservices-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC Web Services** + - *Service URL*: The service URL is the URL of the service to contact (without parameters like "REQUEST=GetCapabilities", "VERSION=", \...). It has to be a valid URL like . + - *Service type* - The type of OGC service indicates if the harvester has to query for a specific kind of service. Supported type are WMS (1.0.0, 1.1.1, 1.3.0), WFS (1.0.0 and 1.1.0), WCS (1.0.0), WPS (0.4.0 and 1.0.0), CSW (2.0.2) and SOS (1.0.0). + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + +- **Configure response processing for ogcwxs** + - *Build service metadata record from a template*: + - *Category for service metadata*: (Optional) Metadata for the harvested service is assigned to the category selected in this option (eg. "interactive resources"). + - *Create record for each layer only using GetCapabilities information*: Checking this option means that the harvester will loop over datasets served by the service as described in the GetCapabilities document. + - *Import record for each layer using MetadataURL attributes*: Checkthis option means that the harvester will generate metadata from an XML document referenced in the MetadataUrl attribute of the dataset in the GetCapabilities document. If the document referred to by this attribute is not valid (eg. unknown schema, bad XML format), the GetCapabilities document is used as per the previous option. + - *Build dataset metadata records from a template* + - *Create thumbnail*: If checked, when harvesting from an OGC Web Map Service (WMS) that supports WGS84 projection, thumbnails for the layers metadata will be created during harvesting. + - *Category for datasets*: Metadata for the harvested datasets is assigned to the category selected in this option (eg. "datasets"). + + - *ISO category*: (Optional) Used to populate the topic category element in the metadata. It is recommended to choose one as the topic category is mandatory for the ISO19115/19139 standard if the hierarchical level is "datasets". + - *Metadata language*: Required field that will define the language of the metadata. It should be the language used by the OGC web service administrator. + - *Output schema*: The metadata schema of the dataset metadata records that will be created by this harvester. The value should be an XSLT process which is used by the harvester to convert the GetCapabilities document to metadata records from that schema. If in doubt, use the default value `iso19139`. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *XSL transformation to apply*: (Optional) The referenced XSL transform will be applied to each metadata record before it is added to GeoNetwork. + + +- **Privileges** - Assign privileges to harvested metadata. + !!! Notes - - every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead - - thumbnails can only be generated when harvesting an OGC Web Map Service (WMS). The WMS should support the WGS84 projection - - the chosen *Target schema* must have the support XSLTs which are used by the harvester to convert the GetCapabilities statement to metadata records from that schema. If in doubt, use iso19139. + - Every time the harvester runs, it will remove previously harvested records and create new records. GeoNetwork will generate the uuid for all metadata (both service and datasets). The exception to this rule is dataset metadata created using the MetadataUrl tag is in the GetCapabilities document, in that case, the uuid of the remote XML document is used instead diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md index 7f4f99cb913d..32cdd4df7805 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-sde.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-sde.md @@ -1,55 +1,60 @@ # Harvesting an ARCSDE Node {#sde_harvester} -This is a harvesting protocol for metadata stored in an ArcSDE installation. +This is a harvesting protocol for metadata stored in an ArcSDE installation. The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139. ## Adding an ArcSDE harvester -The harvester identifies the ESRI metadata format: ESRI ISO, ESRI FGDC to apply the required xslts to transform metadata to ISO19139. Configuration options: +To create an ArcSDE harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `ArcSDE`: + +![](img/add-arcsde-harvester.png) + +Providing the following information: - **Identification** - - *Name* - This is a short description of the node. It will be shown in the harvesting main page. - - *Group* - User admin of this group and catalog administrator can manage this node. - - *Harvester user* - User that owns the harvested metadata. -- **Schedule** - Schedule configuration to execute the harvester. -- **Configuration for protocol ArcSDE** - - *Server* - ArcSde server IP address or name. - - *Port* - ArcSde service port (typically 5151) or ArcSde database port, depending on the connection type selected, see below the *Connection type* section. - - *Database name* - ArcSDE instance name (typically esri_sde). - - *ArcSde version* - ArcSde version to harvest. The data model used by ArcSde is different depending on the ArcSde version. + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Database** + - *Server*: ArcSDE server IP address or name. + - *Port*: ArcSDE service port (typically 5151) or ArcSDE database port, depending on the connection type selected, see below the *Connection type* section. + - *Database name*: ArcSDE instance name (typically esri_sde). + - *ArcSDE version: ArcSDE version to harvest. The data model used by ArcSDE is different depending on the ArcSDE version. - *Connection type* - - *ArcSde service* - Uses the ArcSde service to retrieve the metadata. + - *ArcSDE service*: Uses the ArcSDE service to retrieve the metadata. !!! note - Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed. - - ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder INSTALL_DIR_GEONETWORK/WEB-INF/lib), as these are proprietary libraries not distributed with GeoNetwork. - - The following jars are required: - - - jpe_sdk.jar - - jsde_sdk.jar - - dummy-api-XXX.jar must be removed from INSTALL_DIR/web/geonetwork/WEB-INF/lib + Additional installation steps are required to use the ArcSDE harvester because it needs proprietary ESRI Java api jars to be installed. + ArcSDE Java API libraries need to be installed by the user in GeoNetwork (folder `INSTALL_DIR_GEONETWORK/WEB-INF/lib`), as these are proprietary libraries not distributed with GeoNetwork. - - *Database direct connection* - Uses a database connection (JDBC) to retrieve the metadata. With + The following jars are required: - !!! note + - jpe_sdk.jar + - jsde_sdk.jar - Database direct connection requires to copy JDBC drivers in INSTALL_DIR_GEONETWORK/WEB-INF/lib. + `dummy-api-XXX.jar` must be removed from `INSTALL_DIR/web/geonetwork/WEB-INF/lib`. + - *Database direct connection*: Uses a database connection (JDBC) to retrieve the metadata. + + !!! note + + Database direct connection requires to copy JDBC drivers in `INSTALL_DIR_GEONETWORK/WEB-INF/lib`. !!! note Postgres JDBC drivers are distributed with GeoNetwork, but not for Oracle or SqlServer. - - *Database type* - ArcSde database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*. - - *Username* - Username to connect to ArcSDE server. - - *Password* - Password of the ArcSDE user. -- **Advanced options for protocol arcsde** - - *Validate records before import* - Defines the criteria to reject metadata that is invalid according to XSD and schematron rules. + - *Database type* - ArcSDE database type: Oracle, Postgres, SqlServer. Only available if connection type is configured to *Database direct connection*. + - *Remote authentication*: Credentials to connect to the ArcSDE server. + +- **Configure response processing for arcsde** + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). - Accept all metadata without validation. - Accept metadata that are XSD valid. - Accept metadata that are XSD and schematron valid. + - **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md index 775b4a9d1a93..e7243dc8421f 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-simpleurl.md @@ -4,47 +4,72 @@ This harvester connects to a remote server via a simple URL to retrieve metadata ## Adding a simple URL harvester -- **Site** - Options about the remote site. +To create a Simple URL harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Simple URL`: - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the harvester. - - *Service URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20` - - *loopElement* - Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets` - - *numberOfRecordPath* : Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits` - - *recordIdPath* : Property containing the record id. eg. `datasetid` - - *pageFromParam* : Property indicating the first record item on the current "page" eg. `start` - - *pageSizeParam* : Property indicating the number of records containned in the current "page" eg. `rows` - - *toISOConversion* : Name of the conversion schema to use, which must be available as XSL on the GN instance. eg. `OPENDATASOFT-to-ISO19115-3-2018` +![](img/add-simpleurl-harvester.png) - !!! note +Providing the following information: - GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`. +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). - **Sample configuration for opendatasoft** +- **Configure connection to Simple URL** + - *URL* - The URL of the server to be harvested. This can include pagination params like `?start=0&rows=20` + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the server. + - *Element to loop on*: Propery/element containing a list of the record entries. (Indicated as an absolute path from the document root.) eg. `/datasets` + - *Element for the UUID of each record* : Property containing the record id. eg. `datasetid` + - *Pagination parameters*: (optional). + - *Element for the number of records to collect*: Property indicating the total count of record entries. (Indicated as an absolute path from the document root.) eg. `/nhits` + - *From URL parameter*: Property indicating the first record item on the current "page" eg. `start` + - *Size URL parameter*: Property indicating the number of records containned in the current "page" eg. `rows` + +- **Configure response processing for Simple URL** - - *loopElement* - `/datasets` - - *numberOfRecordPath* : `/nhits` - - *recordIdPath* : `datasetid` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `OPENDATASOFT-to-ISO19115-3-2018` + - *XSL transformation to apply*: Name of the conversion schema to use, which must be available as XSL on the GeoNetwork instance. eg. `OPENDATASOFT-to-ISO19115-3-2018` - **Sample configuration for ESRI** + !!! note - - *loopElement* - `/dataset` - - *numberOfRecordPath* : `/result/count` - - *recordIdPath* : `landingPage` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `ESRIDCAT-to-ISO19115-3-2018` + GN looks for schemas by name in . These schemas might internally include schemas from other locations like . To indicate the `fromJsonOpenDataSoft` schema for example, from the latter location directly in the admin UI the following syntax can be used: `schema:iso19115-3.2018:convert/fromJsonOpenDataSoft`. - **Sample configuration for DKAN** - - - *loopElement* - `/result/0` - - *numberOfRecordPath* : `/result/count` - - *recordIdPath* : `id` - - *pageFromParam* : `start` - - *pageSizeParam* : `rows` - - *toISOConversion* : `DKAN-to-ISO19115-3-2018` + - *Batch edits*: (Optional) Allows to update harvested records, using XPATH syntax. It can be used to add, replace or delete element. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. - **Privileges** - Assign privileges to harvested metadata. + + +## Sample configurations + +### Sample configuration for opendatasoft + +- *Element to loop on* - `/datasets` +- *Element for the number of records to collect* : `/nhits` +- *Element for the UUID of each record* : `datasetid` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `OPENDATASOFT-to-ISO19115-3-2018` + +### Sample configuration for ESRI + +- *Element to loop on* - `/dataset` +- *Element for the number of records to collect* : `/result/count` +- *Element for the UUID of each record* : `landingPage` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `ESRIDCAT-to-ISO19115-3-2018` + +### Sample configuration for DKAN + +- *Element to loop on* - `/result/0` +- *Element for the number of records to collect* : `/result/count` +- *Element for the UUID of each record* : `id` +- *From URL parameter* : `start` +- *Size URL parameter* : `rows` +- *XSL transformation to apply* : `DKAN-to-ISO19115-3-2018` diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md index 2c988d58e340..bb4716c75089 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-thredds.md @@ -4,35 +4,33 @@ THREDDS catalogs describe inventories of datasets. They are organised in a hiera ## Adding a THREDDS Catalog Harvester -The available options are: - -- **Site** - - *Name* - This is a short description of the THREDDS catalog. It will be shown in the harvesting main page as the name of this THREDDS harvester instance. - - *Catalog URL* - The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below. - - *Metadata language* - Use this option to specify the language of the metadata to be harvested. - - *ISO topic category* - Use this option to specify the ISO topic category of service metadata. - - *Create ISO19119 metadata for all services in catalog* - Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself. - - *Create metadata for Collection datasets* - Select this option to generate metadata for each collection dataset (THREDDS dataset containing other datasets). Creation of metadata can be customised using options that are displayed when this option is selected as described further below. - - *Create metadata for Atomic datasets* - Select this option to generate metadata for each atomic dataset (THREDDS dataset not containing other datasets -- for example cataloguing a netCDF dataset). Creation of metadata can be customised using options that are displayed when this option is selected as described further below. - - *Ignore harvesting attribute* - Select this option to harvest metadata for selected datasets regardless of the harvest attribute for the dataset in the THREDDS catalog. If this option is not selected, metadata will only be created for datasets that have a harvest attribute set to true. - - *Extract DIF metadata elements and create ISO metadata* - Select this option to generate ISO metadata for datasets in the THREDDS catalog that have DIF metadata elements. When this option is selected a list of schemas is shown that have a DIFToISO.xsl stylesheet available (see for example `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/DIFToISO.xsl`). Metadata is generated by reading the DIF metadata items in the THREDDS into a DIF format metadata record and then converting that DIF record to ISO using the DIFToISO stylesheet. - - *Extract Unidata dataset discovery metadata using fragments* - Select this option when the metadata in your THREDDS or netCDF/ncml datasets follows Unidata dataset discovery conventions (see ). You will need to write your own stylesheets to extract this metadata as fragments and define a template to combine with the fragments. When this option is selected the following additional options will be shown: - - *Select schema for output metadata records* - choose the ISO metadata schema or profile for the harvested metadata records. Note: only the schemas that have THREDDS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets). - - *Stylesheet to create metadata fragments* - Select a stylesheet to use to convert metadata for the dataset (THREDDS metadata and netCDF ncml where applicable) into metadata fragments. These stylesheets can be found in the directory convert/ThreddsToFragments in the schema directory eg. for iso19139 this would be `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/ThreddsToFragments`. - - *Create subtemplates for fragments and XLink them into template* - Select this option to create a subtemplate (=metadata fragment stored in GeoNetwork catalog) for each metadata fragment generated. - - *Template to combine with fragments* - Select a template that will be filled in with the metadata fragments generated for each dataset. The generated metadata fragments are used to replace referenced elements in the templates with an xlink to a subtemplate if the *Create subtemplates* option is checked. If *Create subtemplates* is not checked, then the fragments are simply copied into the template metadata record. - - For Atomic Datasets , one additional option is provided *Harvest new or modified datasets only*. If this option is checked only datasets that have been modified or didn't exist when the harvester was last run will be harvested. - - *Create Thumbnails* - Select this option to create thumbnails for WMS layers in referenced WMS services - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. -- **Options** - Scheduling Options. -- **Privileges** -- **Category for Service** - Select the category to assign to the ISO19119 service records for the THREDDS services. -- **Category for Datasets** - Select the category to assign the generated metadata records (and any subtemplates) to. - -At the bottom of the page there are the following buttons: - -- **Back** - Go back to the main harvesting page. The harvesting definition is not added. -- **Save** - Saves this harvester definition creating a new harvesting instance. After the save operation has completed, the main harvesting page will be displayed. +To create a THREDDS Catalog harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `Thredds Catalog`: + +![](img/add-threddscatalog-harvester.png) + +Providing the following information: + +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to Thredds catalog** + - *Service URL*: The remote URL of the THREDDS Catalog from which metadata will be harvested. This must be the xml version of the catalog (i.e. ending with .xml). The harvester will crawl through all datasets and services defined in this catalog creating metadata for them as specified by the options described further below. + +- **Configure response processing for thredds** + - *Language*: Use this option to specify the language of the metadata to be harvested. + - *ISO19115 Topic category for output metadata records*: Use this option to specify the ISO topic category of service metadata. + - *Create ISO19119 metadata for all services in the thredds catalog*: Select this option to generate iso19119 metadata for services defined in the THREDDS catalog (eg. OpenDAP, OGC WCS, ftp) and for the THREDDS catalog itself. + - *Select schema for output metadata records*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`. + - *Dataset title*: (Optional) Title for the dataset. Default is catalog url. + - *Dataset abstract*: (Optional) Abstract for the dataset. Default is 'Thredds Dataset'. + - *Geonetwork category to assign to dataset metadata records* - Select the category to assign to the ISO19119 service records for the THREDDS services. + - *Geonetwork category to assign to dataset metadata records* - Select the category to assign the generated metadata records (and any subtemplates) to. + +- **Privileges** - Assign privileges to harvested metadata. ## More about harvesting THREDDS DIF metadata elements with the THREDDS Harvester diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md index 4313483f627b..cdd6b12434ac 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-webdav.md @@ -4,19 +4,35 @@ This harvesting type uses the WebDAV (Distributed Authoring and Versioning) prot ## Adding a WebDAV harvester -- **Site** - Options about the remote site. - - *Subtype* - Select WebDAV or WAF according to the type of server being harvested. - - *Name* - This is a short description of the remote site. It will be shown in the harvesting main page as the name for this instance of the WebDAV harvester. - - *URL* - The remote URL from which metadata will be harvested. Each file found that ends with .xml is assumed to be a metadata record. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. - - *Use account* - Account credentials for basic HTTP authentication on the WebDAV/WAF server. -- **Options** - Scheduling options. -- **Options** - Specific harvesting options for this harvester. - - *Validate* - If checked, the metadata will be validated after retrieval. If the validation does not pass, the metadata will be skipped. - - *Recurse* - When the harvesting engine will find folders, it will recursively descend into them. -- **Privileges** - Assign privileges to harvested metadata. -- **Categories** +To create a WebDAV harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `WebDAV / WAF`: + +![](img/add-webdav-harvester.png) + +Providing the following information: -!!! Notes +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. - - The same metadata could be harvested several times by different instances of the WebDAV harvester. This is not good practise because copies of the same metadata record will have a different UUID. +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to WebDAV / WAF** + - *URL*: The remote URL from which metadata will be harvested. Each file found that has the extension `.xml` is assumed to be a metadata record. + - *Type of protocol*: Select WebDAV or WAF according to the type of server being harvested. + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WebDAV/WAF server. + - *Also search in subfolders*: When the harvesting engine will find folders, it will recursively descend into them. + +- **Configure response processing for webdav** + - *Action on UUID collision*: When a harvester finds the same uuid on a record collected by another method (another harvester, importer, dashboard editor,...), should this record be skipped (default), overriden or generate a new UUID? + - *XSL filter name to apply*: (Optional) The XSL filter is applied to each metadata record. The filter is a process which depends on the schema (see the `process` folder of the schemas). + + It could be composed of parameter which will be sent to XSL transformation using the following syntax: `anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS` + + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + - *Category*: (Optional) A GeoNetwork category to assign to each metadata record. + +- **Privileges** - Assign privileges to harvested metadata. diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md index 16abfa13bb74..c198e5f59669 100644 --- a/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md +++ b/docs/manual/docs/user-guide/harvesting/harvesting-wfs-features.md @@ -2,26 +2,43 @@ Metadata can be present in the tables of a relational databases, which are commonly used by many organisations. Putting an OGC Web Feature Service (WFS) over a relational database will allow metadata to be extracted via standard query mechanisms. This harvesting type allows the user to specify a GetFeature query and map information from the features to fragments of metadata that can be linked or copied into a template to create metadata records. +An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records. + ## Adding an OGC WFS GetFeature Harvester -An OGC web feature service (WFS) implements a GetFeature query operation that returns data in the form of features (usually rows from related tables in a relational database). GeoNetwork, acting as a client, can read the GetFeature response and apply a user-supplied XSLT stylesheet to produce metadata fragments that can be linked or copied into a user-supplied template to build metadata records. +To create a OGC WFS GetFeature harvester go to `Admin console` > `Harvesting` and select `Harvest from` > `OGC WFS GetFeature`: + +![](img/add-wfsgetfeature-harvester.png) -The available options are: +Providing the following information: -- **Site** - - *Name* - This is a short description of the harvester. It will be shown in the harvesting main page as the name for this WFS GetFeature harvester. - - *Service URL* - The bare URL of the WFS service (no OGC params required) - - *Metadata language* - The language that will be used in the metadata records created by the harvester +- **Identification** + - *Node name and logo*: A unique name for the harvester and, optionally, a logo to assign to the harvester. + - *Group*: Group which owns the harvested records. Only the catalog administrator or users with the profile `UserAdmin` of this group can manage the harvester. + - *User*: User who owns the harvested records. + +- **Schedule**: Scheduling options to execute the harvester. If disabled, the harvester must be run manually from the harvester page. If enabled, a scheduling expression using cron syntax should be configured ([See examples](https://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/crontrigger)). + +- **Configure connection to OGC CSW 2.0.2** + - *Service URL*: The bare URL of the WFS service (no OGC params required). + - *Remote authentication*: If checked, should be provided the credentials for basic HTTP authentication on the WFS server. - *OGC WFS GetFeature Query* - The OGC WFS GetFeature query used to extract features from the WFS. - - *Schema for output metadata records* - choose the metadata schema or profile for the harvested metadata records. Note: only the schemas that have WFS fragment stylesheets will be displayed in the list (see the next option for the location of these stylesheets). - - *Stylesheet to create fragments* - User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`. - - *Save large response to disk* - Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM. - - *Create subtemplates* - Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template. - - *Template to use to build metadata using fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. - - *Category for records built with linked fragments* - Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. -- **Options** -- **Privileges** -- **Category for subtemplates** - When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here. + +- **Configure response processing for wfsfeatures** + - *Language*: The language that will be used in the metadata records created by the harvester. + - *Metadata standard*: The metadata standard to create the metadata. It should be a valid metadata schema installed in GeoNetwork, by default `iso19139`. + - *Save large response to disk*: Check this box if you expect the WFS GetFeature response to be large (eg. greater than 10MB). If checked, the GetFeature response will be saved to disk in a temporary file. Each feature will then be extracted from the temporary file and used to create the fragments and metadata records. If not checked, the response will be held in RAM. + - *Stylesheet to create fragments*: User-supplied stylesheet that transforms the GetFeature response to a metadata fragments document (see below for the format of that document). Stylesheets exist in the WFSToFragments directory which is in the convert directory of the selected output schema. eg. for the iso19139 schema, this directory is `GEONETWORK_DATA_DIR/config/schema_plugins/iso19139/convert/WFSToFragments`. + - *Create subtemplates*: Check this box if you want the harvested metadata fragments to be saved as subtemplates in the metadata catalog and xlink'd into the metadata template (see next option). If not checked, the fragments will be copied into the metadata template. + - *Select template to combine with fragments*: Choose the metadata template that will be combined with the harvested metadata fragments to create metadata records. This is a standard GeoNetwork metadata template record. + - *Category for directory entries*: (Optional) When fragments are saved to GeoNetwork as subtemplates they will be assigned to the category selected here. + - *Validate records before import*: Defines the criteria to reject metadata that is invalid according to XML structure (XSD) and validation rules (schematron). + - Accept all metadata without validation. + - Accept metadata that are XSD valid. + - Accept metadata that are XSD and schematron valid. + +- **Privileges** - Assign privileges to harvested metadata. + ## More about turning the GetFeature Response into metadata fragments diff --git a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md b/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md deleted file mode 100644 index 47722c374644..000000000000 --- a/docs/manual/docs/user-guide/harvesting/harvesting-z3950.md +++ /dev/null @@ -1,90 +0,0 @@ -# Z3950 Harvesting {#z3950_harvester} - -Z3950 is a remote search and harvesting protocol that is commonly used to permit search and harvest of metadata. Although the protocol is often used for library catalogs, significant geospatial metadata catalogs can also be searched using Z3950 (eg. the metadata collections of the Australian Government agencies that participate in the Australian Spatial Data Directory - ASDD). This harvester allows the user to specify a Z3950 query and retrieve metadata records from one or more Z3950 servers. - -## Adding a Z3950 Harvester - -The available options are: - -- **Site** - - *Name* - A short description of this Z3950 harvester. It will be shown in the harvesting main page using this name. - - *Z3950 Server(s)* - These are the Z3950 servers that will be searched. You can select one or more of these servers. - - *Z3950 Query* - Specify the Z3950 query to use when searching the selected Z3950 servers. At present this field is known to support the Prefix Query Format (also known as Prefix Query Notation) which is described at this URL: . See below for more information and some simple examples. - - *Icon* - An icon to assign to harvested metadata. The icon will be used when showing search results. -- **Options** - Scheduling options. -- **Harvested Content** - - *Apply this XSLT to harvested records* - Choose an XSLT here that will convert harvested records to a different format. - - *Validate* - If checked, records that do not/cannot be validated will be rejected. -- **Privileges** -- **Categories** - -!!! note - - this harvester automatically creates a new Category named after each of the Z3950 servers that return records. Records that are returned by a server are assigned to the category named after that server. - - -## More about PQF Z3950 Queries - -PQF is a rather arcane query language. It is based around the idea of attributes and attribute sets. The most common attribute set used for geospatial metadata in Z3950 servers is the GEO attribute set (which is an extension of the BIB-1 and GILS attribute sets - see ). So all PQF queries to geospatial metadata Z3950 servers should start off with @attrset geo. - -The most useful attribute types in the GEO attribute set are as follows: - -| @attr number | Meaning | Description | -|---------------|------------|--------------------------------------------------| -| 1 | Use | What field to search | -| 2 | Relation | How to compare the term specified | -| 4 | Structure | What type is the term? eg. date, numeric, phrase | -| 5 | Truncation | How to truncate eg. right | - -In GeoNetwork the numeric values that can be specified for `@attr 1` map to the lucene index field names as follows: - -| @attr 1= | Lucene index field | ISO19139 element | -|----------------------|-------------------------------|-------------------------------------------------------------------------------------------------------------| -| 1016 | any | All text from all metadata elements | -| 4 | title, altTitle | gmd:identificationInfo//gmd:citation//gmd:title/gco:CharacterString | -| 62 | abstract | gmd:identificationInfo//gmd:abstract/gco:CharacterString | -| 1012 | _changeDate | Not a metadata element (maintained by GeoNetwork) | -| 30 | createDate | gmd:MD_Metadata/gmd:dateStamp/gco:Date | -| 31 | publicationDate | gmd:identificationInfo//gmd:citation//gmd:date/gmd:='publication' | -| 2072 | tempExtentBegin | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:begin(Position) | -| 2073 | tempExtentEnd | gmd:identificationInfo//gmd:extent//gmd:temporalElement//gml:end(Position) | -| 2012 | fileId | gmd:MD_Metadata/gmd:fileIdentifier/* | -| 12 | identifier | gmd:identificationInfo//gmd:citation//gmd:identifier//gmd:code/* | -| 21,29,2002,3121,3122 | keyword | gmd:identificationInfo//gmd:keyword/* | -| 2060 | northBL,eastBL,southBL,westBL | gmd:identificationInfo//gmd:extent//gmd:EX_GeographicBoundingBox/gmd:westBoundLongitude*/gco:Decimal (etc) | - -Note that this is not a complete set of the mappings between Z3950 GEO attribute set and the GeoNetwork lucene index field names for ISO19139. Check out INSTALL_DIR/web/geonetwork/xml/search/z3950Server.xsl and INSTALL_DIR/web/geonetwork/xml/schemas/iso19139/index-fields.xsl for more details and annexe A of the GEO attribute set for Z3950 at for more details. - -Common values for the relation attribute (`@attr=2`): - -| @attr 2= | Description | -|-----------|--------------------------| -| 1 | Less than | -| 2 | Less than or equal to | -| 3 | Equals | -| 4 | Greater than or equal to | -| 5 | Greater than | -| 6 | Not equal to | -| 7 | Overlaps | -| 8 | Fully enclosed within | -| 9 | Encloses | -| 10 | Fully outside of | - -So a simple query to get all metadata records that have the word 'the' in any field would be: - -`@attrset geo @attr 1=1016 the` - -- `@attr 1=1016` means that we are doing a search on any field in the metadata record - -A more sophisticated search on a bounding box might be formulated as: - -`@attrset geo @attr 1=2060 @attr 4=201 @attr 2=7 "-36.8262 142.6465 -44.3848 151.2598` - -- `@attr 1=2060` means that we are doing a bounding box search -- `@attr 4=201` means that the query contains coordinate strings -- `@attr 2=7` means that we are searching for records whose bounding box overlaps the query box specified at the end of the query - -!!! Notes - - - Z3950 servers must be configured for GeoNetwork in `INSTALL_DIR/web/geonetwork/WEB-INF/classes/JZKitConfig.xml.tem` - - every time the harvester runs, it will remove previously harvested records and create new ones. diff --git a/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png new file mode 100644 index 000000000000..258c163bfdac Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-arcsde-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png new file mode 100644 index 000000000000..e6e484359b92 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-csw-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png new file mode 100644 index 000000000000..0e0f0d66bfdc Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-filesystem-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png new file mode 100644 index 000000000000..002459bae7d3 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geonetwork-3-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png new file mode 100644 index 000000000000..31d60f997e76 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-geoportalrest-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png new file mode 100644 index 000000000000..5d50e1dce3ec Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png new file mode 100644 index 000000000000..a6ad14e6a544 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-oaipmh-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png new file mode 100644 index 000000000000..2734781c718e Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-ogcwebservices-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png new file mode 100644 index 000000000000..6f7af0255a95 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-simpleurl-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png new file mode 100644 index 000000000000..a326a4b7c790 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-threddscatalog-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png new file mode 100644 index 000000000000..4b36e089b8d3 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-webdav-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png new file mode 100644 index 000000000000..bd3646bc0cf3 Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/add-wfsgetfeature-harvester.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-history.png b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png new file mode 100644 index 000000000000..f9064c1a8f3c Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-history.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png new file mode 100644 index 000000000000..b311bb2ec8ee Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvester-statistics.png differ diff --git a/docs/manual/docs/user-guide/harvesting/img/harvesters.png b/docs/manual/docs/user-guide/harvesting/img/harvesters.png new file mode 100644 index 000000000000..bd008fdef7ca Binary files /dev/null and b/docs/manual/docs/user-guide/harvesting/img/harvesters.png differ diff --git a/docs/manual/docs/user-guide/harvesting/index.md b/docs/manual/docs/user-guide/harvesting/index.md index 46f52f782c55..abea85ff38c6 100644 --- a/docs/manual/docs/user-guide/harvesting/index.md +++ b/docs/manual/docs/user-guide/harvesting/index.md @@ -6,7 +6,8 @@ Harvesting is the process of ingesting metadata from remote sources and storing The following sources can be harvested: -- [GeoNetwork 2.0 Harvester](harvesting-geonetwork.md) +- [GeoNetwork 2.1-3.X Harvester](harvesting-geonetwork.md) +- [GeoNetwork 2.0 Harvester](harvesting-geonetwork-2.md) - [Harvesting CSW services](harvesting-csw.md) - [Harvesting OGC Services](harvesting-ogcwxs.md) - [Simple URL harvesting (opendata)](harvesting-simpleurl.md) @@ -17,7 +18,6 @@ The following sources can be harvested: - [GeoPortal REST Harvesting](harvesting-geoportal.md) - [THREDDS Harvesting](harvesting-thredds.md) - [WFS GetFeature Harvesting](harvesting-wfs-features.md) -- [Z3950 Harvesting](harvesting-z3950.md) ## Mechanism overview @@ -134,79 +134,45 @@ The script will add the certificate to the JVM keystore, if you run it as follow $ ./ssl_key_import.sh https_server_name 443 -## The main page +## Harvesting page -To access the harvesting main page you have to be logged in as an administrator. From the administration page, select the harvest shortcut. The harvesting main page will then be displayed. +To access the harvesting main page you have to be logged in with a profile `Administrator` or `UserAdmin`. From the `Admin console` menu, select the option `Harvesting`. -The page shows a list of the currently defined harvesters and a set of buttons for management functions. The meaning of each column in the list of harvesters is as follows: +The page shows a list of the currently defined harvesters with information about the status of the harvesters: -1. *Select* Check box to select one or more harvesters. The selected harvesters will be affected by the first row of buttons (activate, deactivate, run, remove). For example, if you select three harvesters and press the Remove button, they will all be removed. -2. *Name* This is the harvester name provided by the administrator. -3. *Type* The harvester type (eg. GeoNetwork, WebDAV etc\...). -4. *Status* An icon showing current status. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and status descriptions. -5. *Errors* An icon showing the result of the last harvesting run, which could have succeeded or not. See [Harvesting Status and Error Icons](index.md#admin_harvesting_status) for the different icons and error descriptions. Hovering the cursor over the icon will show detailed information about the last harvesting run. -6. *Run at* and *Every*: Scheduling of harvester runs. Essentially the time of the day + how many hours between repeats and on which days the harvester will run. -7. *Last run* The date, in ISO 8601 format, of the most recent harvesting run. -8. *Operation* A list of buttons/links to operations on a harvester. - - Selecting *Edit* will allow you to change the parameters for a harvester. - - Selecting *Clone* will allow you to create a clone of this harvester and start editing the details of the clone. - - Selecting *History* will allow you to view/change the harvesting history for a harvester - see [Harvest History](index.md#harvest_history). +![](img/harvesters.png) -At the bottom of the list of harvesters are two rows of buttons. The first row contains buttons that can operate on a selected set of harvesters. You can select the harvesters you want to operate on using the check box in the Select column and then press one of these buttons. When the button finishes its action, the check boxes are cleared. Here is the meaning of each button: +The following information is shown for each harvester: -1. *Activate* When a new harvester is created, the status is *inactive*. Use this button to make it *active* and start the harvester(s) according to the schedule it has/they have been configured to use. -2. *Deactivate* Stops the harvester(s). Note: this does not mean that currently running harvest(s) will be stopped. Instead, it means that the harvester(s) will not be scheduled to run again. -3. *Run* Start the selected harvesters immediately. This is useful for testing harvester setups. -4. *Remove* Remove all currently selected harvesters. A dialogue will ask the user to confirm the action. +- **Last run**: Date on which the harvester was last run. +- **Total**: It is the total number of metadata found remotely. Metadata with the same id are considered as one. +- **Updated**: Number of metadata that are present locally but needed to be updated because their last modification date was different from the remote one. +- **Unchanged**: Number of local metadata that have not been modified. Its remote last modification date has not changed. -The second row contains general purpose buttons. Here is the meaning of each button: +At the bottom of the harvester list there are the following buttons: -1. *Back* Simply returns to the main administration page. -2. *Add* This button creates a new harvester. -3. *Refresh* Refreshes the current list of harvesters from the server. This can be useful to see if the harvesting list has been altered by someone else or to get the status of any running harvesters. -4. *History* Show the harvesting history of all harvesters. See [Harvest History](index.md#harvest_history) for more details. +1. *Harvest from*: Allows you to select the type of harvester to create. +2. *Clone*: Creates a new harvester, using the information of an existing harvester. +3. *Refresh*: Refreshes the list of harvesters. -## Harvesting Status and Error Icons {#admin_harvesting_status} +### Adding new harvesters -## Harvesting result tips +To add a new harvester, click on the `Harvest from` button. A drop-down list with all available harvesting protocols will appear. -When a harvester runs and completes, a tool tip showing detailed information about the harvesting process is shown in the **Errors** column for the harvester. If the harvester succeeded then hovering the cursor over the tool tip will show a table, with some rows labelled as follows: +![](img/add-harvester.png) -- **Total** - This is the total number of metadata found remotely. Metadata with the same id are considered as one. -- **Added** - Number of metadata added to the system because they were not present locally. -- **Removed** - Number of metadata that have been removed locally because they are not present in the remote server anymore. -- **Updated** - Number of metadata that are present locally but that needed to be updated because their last change date was different from the remote one. -- **Unchanged** - Local metadata left unchanged. Their remote last change date did not change. -- **Unknown schema** - Number of skipped metadata because their format was not recognised by GeoNetwork. -- **Unretrievable** - Number of metadata that were ready to be retrieved from the remote server but for some reason there was an exception during the data transfer process. -- **Bad Format** - Number of skipped metadata because they did not have a valid XML representation. -- **Does not validate** - Number of metadata which did not validate against their schema. These metadata were harvested with success but skipped due to the validation process. Usually, there is an option to force validation: if you want to harvest these metadata anyway, simply turn/leave it off. -- **Thumbnails/Thumbnails failed** - Number of metadata thumbnail images added/that could not be added due to some failure. -- **Metadata URL attribute used** - Number of layers/featuretypes/coverages that had a metadata URL that could be used to link to a metadata record (OGC Service Harvester only). -- **Services added** - Number of ISO19119 service records created and added to the catalogue (for THREDDS catalog harvesting only). -- **Collections added** - Number of collection dataset records added to the catalogue (for THREDDS catalog harvesting only). -- **Atomics added** - Number of atomic dataset records added to the catalogue (for THREDDS catalog harvesting only). -- **Subtemplates added** - Number of subtemplates (= fragment visible in the catalog) added to the metadata catalog. -- **Subtemplates removed** - Number of subtemplates (= fragment visible in the catalog) removed from the metadata catalog. -- **Fragments w/Unknown schema** - Number of fragments which have an unknown metadata schema. -- **Fragments returned** - Number of fragments returned by the harvester. -- **Fragments matched** - Number of fragments that had identifiers that in the template used by the harvester. -- **Existing datasets** - Number of metadata records for datasets that existed when the THREDDS harvester was run. -- **Records built** - Number of records built by the harvester from the template and fragments. -- **Could not insert** - Number of records that the harvester could not insert into the catalog (usually because the record was already present eg. in the Z3950 harvester this can occur if the same record is harvested from different servers). +You can choose the type of harvesting you want to do. Supported harvesters and details on what to do next can be found in the following sections. -## Adding new harvesters +### Harvester History {#harvest_history} -The Add button in the main page allows you to add new harvesters. A drop down list is then shown with all the available harvester protocols. +Each time a harvester is run, a log file is generated of what was harvested and/or what went wrong (e.g., an exception report). To view the harvester history, select a harvester in the harvester list and select the `Harvester history` tab on the harvester page: -You can choose the type of harvest you intend to perform and press *Add* to begin the process of adding the harvester. The supported harvesters and details of what to do next are in the following sections: +![](img/harvester-history.png) -## Harvest History {#harvest_history} +Once the harvester history is displayed, it is possible to download the log file of the harvester run and delete the harvester history. -Each time a harvester is run, it generates a status report of what was harvested and/or what went wrong (eg. exception report). These reports are stored in a table in the database used by GeoNetwork. The entire harvesting history for all harvesters can be recalled using the History button on the Harvesting Management page. The harvest history for an individual harvester can also be recalled using the History link in the Operations for that harvester. +### Harvester records -Once the harvest history has been displayed it is possible to: +When a harvester is executed, you can see the list of harvested metadata and some statistics about the metadata. Select a harvester in the list of harvesters and select the `Metadata records` tab on the harvester page: -- expand the detail of any exceptions -- sort the history by harvest date (or in the case of the history of all harvesters, by harvester name) -- delete any history entry or the entire history +![](img/harvester-statistics.png) diff --git a/docs/manual/docs/user-guide/publishing/managing-privileges.md b/docs/manual/docs/user-guide/publishing/managing-privileges.md index 7670dbff69e9..2bb33525d010 100644 --- a/docs/manual/docs/user-guide/publishing/managing-privileges.md +++ b/docs/manual/docs/user-guide/publishing/managing-privileges.md @@ -16,11 +16,11 @@ Below is a brief description for each privilege to help you identify which ones **Publish**: Users in the specified group/s are able to view the metadata eg. if it matches search criteria entered by such a user. -**Download**: Users in the specified group/s are able to download the data. - **Interactive Map**: Users in the specified group/s are able to get an interactive map. The interactive map has to be created separately using a Web Map Server such as GeoServer, which is distributed with GeoNetwork. -**Featured**: When randomly selected by GeoNetwork, the metadata record can appear in the `Featured` section of the GeoNetwork home page. +**Download**: Users in the specified group/s are able to download the data. + +**Editing**: Users in the specified group/s are able to edit the metadata, if they have the *editor* profile. **Notify**: Users in the specified group receive notification if data attached to the metadata record is downloaded. @@ -57,12 +57,13 @@ Any user (logged in or not) can view the public metadata. An *administrator* can edit any metadata. -A *reviewer* can edit a metadata if: +A *reviewer* / *editor* can edit a metadata if: + +* They are the metadata owner. + +* The metadata has editing privilege in the group(s) where the user is a *reviewer* / *editor*. -- The metadata owner is member of one of the groups assigned to the reviewer. -- They are the metadata owner. -A *User Administrator* or an *Editor* can only edit metadata they created. # Setting Privileges diff --git a/docs/manual/docs/user-guide/workflow/life-cycle.md b/docs/manual/docs/user-guide/workflow/life-cycle.md index c1bf876157fa..70416aa45111 100644 --- a/docs/manual/docs/user-guide/workflow/life-cycle.md +++ b/docs/manual/docs/user-guide/workflow/life-cycle.md @@ -1,26 +1,29 @@ # Life cycle -## Record life cycle +Metadata records can have a lifecycle that typically goes through one or more states. This is an optional feature that +can be activated on demand (see [Activate the metadata workflow](#activate-workflow)). -Metadata records have a lifecycle that typically goes through one or more states. For example, when a record is: +For example, when a record is: - created and edited by an `Editor` it is in the `Draft` state. -- being reviewed by a `content reviewer`, or a review is requested brings the record to `Submitted` state. +- being reviewed by a `Content reviewer`, or a review is requested brings the record to `Submitted` state. - completed and corrected by the `Content Reviewer` it is in the `Approved` state. - superseded or replaced the state is `Retired`. -The catalog has (an extensible) set of states that a metadata record can have: +The catalog has a set of states that a metadata record can have: -- `Unknown` - this is the default state - nothing is known about the status of the metadata record. - `Draft` - the record is under construction or being edited. - `Submitted` - the record has been submitted for approval to a content review. - `Approved` - the content reviewer has reviewed and approved the metadata record. -- `Rejected` - the content reviewer has reviewed and rejected the metadata record. - `Retired` - the record has been retired. -Workflow can be enabled for the full catalogue, certain groups or on an individual record level. +When the metadata workflow is activated, the existing records are set in a special status `Unknown`. -In the last case, to enable workflow and change the status from `Unknown` to `Draft`, click the `enable workflow` button in the metadata view: +## Activate the metadata workflow {#activate-workflow} + +To enable the record life cycle, activate the metadata workflow. It can be activated for the full catalogue, certain groups, or on an individual record. + +In the case of activating for an individual record: enable workflow in a metadata, change the status from `Unknown` to `Draft`, and then click the `Enable workflow` button in the metadata view: ![](img/workflow-enable.png) @@ -28,10 +31,11 @@ In the last case, to enable workflow and change the status from `Unknown` to `Dr To use the workflow for metadata records created before enabling it, you must use the above option. +To enable workflow for the full catalogue or certain groups, check `Administration` --> `Settings` --> `Metadata Workflow`. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again. -To enable workflow for the full catalogue or certain groups, check Administration --> Settings --> Metadata Workflow. In workflow mode, in case approved records are modified, you're working on a copy of the approved record. Changes on the record will not be visible to users outside your group until the modified record is approved again. +## Usage -When done editing you can submit a record for review by a content reviewer. The submit button is available on the `manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer. +When done editing you can submit a record for review by a content reviewer. The submit button is available on the `Manage record` menu in the metadata view. A popup will open in which you can leave a message for the content reviewer. ![](img/submit-for-review.png) diff --git a/docs/manual/mkdocs.yml b/docs/manual/mkdocs.yml index 73af7ac42b5b..cc3c19201166 100644 --- a/docs/manual/mkdocs.yml +++ b/docs/manual/mkdocs.yml @@ -149,10 +149,11 @@ nav: - overview/authors.md - 'Changelog': - overview/change-log/index.md - - overview/change-log/version-4.4.5.md - - overview/change-log/version-4.2.10.md + - overview/change-log/version-4.4.6.md + - overview/change-log/version-4.2.11.md - 'Release History': - overview/change-log/history/index.md + - overview/change-log/version-4.4.6.md - overview/change-log/version-4.4.5.md - overview/change-log/version-4.4.4.md - overview/change-log/version-4.4.3.md @@ -161,6 +162,7 @@ nav: - overview/change-log/version-4.4.0.md - 'Release History': - overview/change-log/history/index.md + - overview/change-log/version-4.2.11.md - overview/change-log/version-4.2.10.md - overview/change-log/version-4.2.9.md - overview/change-log/version-4.2.8.md @@ -294,6 +296,7 @@ nav: - user-guide/harvesting/harvesting-csw.md - user-guide/harvesting/harvesting-filesystem.md - user-guide/harvesting/harvesting-geonetwork.md + - user-guide/harvesting/harvesting-geonetwork-2.md - user-guide/harvesting/harvesting-geoportal.md - user-guide/harvesting/harvesting-oaipmh.md - user-guide/harvesting/harvesting-ogcwxs.md @@ -302,7 +305,6 @@ nav: - user-guide/harvesting/harvesting-thredds.md - user-guide/harvesting/harvesting-webdav.md - user-guide/harvesting/harvesting-wfs-features.md - - user-guide/harvesting/harvesting-z3950.md - user-guide/export/index.md - 'Administration': - administrator-guide/index.md @@ -321,6 +323,7 @@ nav: - administrator-guide/managing-users-and-groups/creating-group.md - administrator-guide/managing-users-and-groups/creating-user.md - administrator-guide/managing-users-and-groups/user-self-registration.md + - administrator-guide/managing-users-and-groups/user-reset-password.md - 'Classification Systems': - administrator-guide/managing-classification-systems/index.md - administrator-guide/managing-classification-systems/managing-categories.md diff --git a/docs/manual/pom.xml b/docs/manual/pom.xml index 4ac33875de14..a0d9a973a963 100644 --- a/docs/manual/pom.xml +++ b/docs/manual/pom.xml @@ -27,7 +27,7 @@ gn-docs org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-guide diff --git a/docs/pom.xml b/docs/pom.xml index 6330049c7b09..0621811afc10 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-docs diff --git a/doi/pom.xml b/doi/pom.xml index 8f087e09ce3e..213485493053 100644 --- a/doi/pom.xml +++ b/doi/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java index c21d0f3c4a0f..012c710585e8 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java @@ -32,8 +32,8 @@ import org.fao.geonet.domain.*; import org.fao.geonet.kernel.AccessManager; import org.fao.geonet.kernel.ApplicableSchematron; -import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.SchematronValidator; +import org.fao.geonet.kernel.datamanager.base.BaseMetadataManager; import org.fao.geonet.kernel.datamanager.base.BaseMetadataSchemaUtils; import org.fao.geonet.kernel.datamanager.base.BaseMetadataUtils; import org.fao.geonet.kernel.schema.MetadataSchema; @@ -41,12 +41,10 @@ import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.repository.SchematronRepository; -import org.fao.geonet.utils.Log; import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; -import org.springframework.beans.factory.annotation.Autowired; import java.io.IOException; import java.nio.file.Files; @@ -60,11 +58,9 @@ /** * Class to register/unregister DOIs using the Datacite Metadata Store (MDS) API. + *

+ * See ... * - * See https://support.datacite.org/docs/mds-api-guide - * - * @author Jose García - * @author Francois Prunayre */ public class DoiManager { private static final String DOI_ADD_XSL_PROCESS = "process/doi-add.xsl"; @@ -75,112 +71,52 @@ public class DoiManager { public static final String DOI_DEFAULT_URL = "https://doi.org/"; public static final String DOI_DEFAULT_PATTERN = "{{uuid}}"; - private IDoiClient client; - private String doiPrefix; - private String doiPattern; - private String landingPageTemplate; - private boolean initialised = false; - private boolean isMedra = false; - - DataManager dm; - SettingManager sm; - BaseMetadataSchemaUtils schemaUtils; - - @Autowired - BaseMetadataUtils metadataUtils; - - @Autowired - SchematronValidator validator; - - @Autowired - DoiBuilder doiBuilder; + private final SettingManager sm; + private final BaseMetadataSchemaUtils schemaUtils; + private final BaseMetadataManager metadataManager; + private final BaseMetadataUtils metadataUtils; + private final SchematronValidator validator; + private final DoiBuilder doiBuilder; + private final SchematronRepository schematronRepository; + + + public DoiManager(final SettingManager sm, final BaseMetadataSchemaUtils schemaUtils, + final BaseMetadataManager metadataManager, final BaseMetadataUtils metadataUtils, + final SchematronValidator validator, final DoiBuilder doiBuilder, + final SchematronRepository schematronRepository) { + this.sm = sm; + this.schemaUtils = schemaUtils; + this.metadataManager = metadataManager; + this.metadataUtils = metadataUtils; + this.validator = validator; + this.doiBuilder = doiBuilder; + this.schematronRepository = schematronRepository; - @Autowired - SchematronRepository schematronRepository; - - - public DoiManager() { - sm = ApplicationContextHolder.get().getBean(SettingManager.class); - dm = ApplicationContextHolder.get().getBean(DataManager.class); - schemaUtils = ApplicationContextHolder.get().getBean(BaseMetadataSchemaUtils.class); - - loadConfig(); } - public boolean isInitialised() { - return initialised; + private IDoiClient createDoiClient(DoiServer doiServer) { + boolean isMedra = isMedraServer(doiServer); + return isMedra ? + new DoiMedraClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()) : + new DoiDataciteClient(doiServer.getUrl(), doiServer.getUsername(), doiServer.getPassword(), doiServer.getPublicUrl()); } - /** - * Check parameters and build the client. - * - */ - public void loadConfig() { - initialised = false; - if (sm != null) { - - String serverUrl = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIURL); - String doiPublicUrl = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPUBLICURL), - DOI_DEFAULT_URL); - String username = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIUSERNAME); - String password = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPASSWORD); - - doiPrefix = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIKEY); - doiPattern = StringUtils.defaultIfEmpty( - sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_DOIPATTERN), - DOI_DEFAULT_PATTERN - ); - - landingPageTemplate = sm.getValue(DoiSettings.SETTING_PUBLICATION_DOI_LANDING_PAGE_TEMPLATE); - - final boolean emptyUrl = StringUtils.isEmpty(serverUrl); - final boolean emptyUsername = StringUtils.isEmpty(username); - final boolean emptyPassword = StringUtils.isEmpty(password); - final boolean emptyPrefix = StringUtils.isEmpty(doiPrefix); - if (emptyUrl || - emptyUsername || - emptyPassword || - emptyPrefix) { - StringBuilder report = new StringBuilder("DOI configuration is not complete. Check in System Configuration to fill the DOI configuration."); - if (emptyUrl) { - report.append("\n* URL MUST be set"); - } - if (emptyUsername) { - report.append("\n* Username MUST be set"); - } - if (emptyPassword) { - report.append("\n* Password MUST be set"); - } - if (emptyPrefix) { - report.append("\n* Prefix MUST be set"); - } - Log.warning(DoiSettings.LOGGER_NAME, - report.toString()); - } else { - Log.debug(DoiSettings.LOGGER_NAME, - "DOI configuration looks perfect."); - isMedra = serverUrl.contains(MEDRA_SEARCH_KEY); - this.client = - isMedra ? - new DoiMedraClient(serverUrl, username, password, doiPublicUrl) : - new DoiDataciteClient(serverUrl, username, password, doiPublicUrl); - initialised = true; - } - } - } + public String checkDoiUrl(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - public String checkDoiUrl(AbstractMetadata metadata) { - return doiBuilder.create(doiPattern, doiPrefix, metadata); + return doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); } - public Map check(ServiceContext serviceContext, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { + public Map check(ServiceContext serviceContext, DoiServer doiServer, AbstractMetadata metadata, Element dataciteMetadata) throws Exception { Map conditions = new HashMap<>(); - checkInitialised(); + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); conditions.put(DoiConditions.API_CONFIGURED, true); - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - checkPreConditions(metadata, doi); + IDoiClient doiClient = createDoiClient(doiServer); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + checkPreConditions(doiClient, metadata, doi); conditions.put(DoiConditions.RECORD_IS_PUBLIC, true); conditions.put(DoiConditions.STANDARD_SUPPORT, true); @@ -188,26 +124,26 @@ public Map check(ServiceContext serviceContext, AbstractMetadat // ** Convert to DataCite format Element dataciteFormatMetadata = dataciteMetadata == null ? - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi) : dataciteMetadata; - checkPreConditionsOnDataCite(metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi) : dataciteMetadata; + checkPreConditionsOnDataCite(doiClient, metadata, doi, dataciteFormatMetadata, serviceContext.getLanguage()); conditions.put(DoiConditions.DATACITE_FORMAT_IS_VALID, true); return conditions; } - public Map register(ServiceContext context, AbstractMetadata metadata) throws Exception { + public Map register(ServiceContext context, DoiServer doiServer, AbstractMetadata metadata) throws Exception { Map doiInfo = new HashMap<>(3); // The new DOI for this record - String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); + String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); doiInfo.put("doi", doi); // The record in datacite format Element dataciteFormatMetadata = - convertXmlToDataCiteFormat(metadata.getDataInfo().getSchemaId(), - metadata.getXmlData(false), doi); + convertXmlToDataCiteFormat(doiServer, metadata.getDataInfo().getSchemaId(), + metadata.getXmlData(false), doi); try { - check(context, metadata, dataciteFormatMetadata); + check(context, doiServer, metadata, dataciteFormatMetadata); } catch (ResourceAlreadyExistException ignore) { // Update DOI doiInfo.put("update", "true"); @@ -215,7 +151,8 @@ public Map register(ServiceContext context, AbstractMetadata met throw e; } - createDoi(context, metadata, doiInfo, dataciteFormatMetadata); + IDoiClient doiClient = createDoiClient(doiServer); + createDoi(context, doiClient, doiServer, metadata, doiInfo, dataciteFormatMetadata); checkDoiCreation(metadata, doiInfo); return doiInfo; @@ -230,7 +167,7 @@ public Map register(ServiceContext context, AbstractMetadata met * @throws IOException * @throws JDOMException */ - private void checkPreConditions(AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { + private void checkPreConditions(IDoiClient doiClient, AbstractMetadata metadata, String doi) throws DoiClientException, IOException, JDOMException, ResourceAlreadyExistException { // Record MUST be public AccessManager am = ApplicationContextHolder.get().getBean(AccessManager.class); boolean visibleToAll = false; @@ -239,11 +176,11 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do } catch (Exception e) { throw new DoiClientException(String.format( "Failed to check if record '%s' is visible to all for DOI creation." + - " Error is %s.", + " Error is %s.", metadata.getUuid(), e.getMessage())) .withMessageKey("exception.doi.failedVisibilityCheck") .withDescriptionKey("exception.doi.failedVisibilityCheck.description", - new String[]{ metadata.getUuid(), e.getMessage() }); + new String[]{metadata.getUuid(), e.getMessage()}); } if (!visibleToAll) { @@ -251,7 +188,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "Record '%s' is not public and we cannot request a DOI for such a record. Publish this record first.", metadata.getUuid())) .withMessageKey("exception.doi.recordNotPublic") - .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{ metadata.getUuid() }); + .withDescriptionKey("exception.doi.recordNotPublic.description", new String[]{metadata.getUuid()}); } // Record MUST not contains a DOI @@ -259,7 +196,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do String currentDoi = metadataUtils.getDoi(metadata.getUuid()); if (StringUtils.isNotEmpty(currentDoi)) { // Current doi does not match the one going to be inserted. This is odd - String newDoi = client.createPublicUrl(doi); + String newDoi = doiClient.createPublicUrl(doi); if (!currentDoi.equals(newDoi)) { throw new DoiClientException(String.format( "Record '%s' already contains a DOI %s which is not equal " + @@ -269,7 +206,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do "an existing DOI.", metadata.getUuid(), currentDoi, currentDoi, newDoi)) .withMessageKey("exception.doi.resourcesContainsDoiNotEqual") - .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{ metadata.getUuid(), currentDoi, currentDoi, newDoi }); + .withDescriptionKey("exception.doi.resourcesContainsDoiNotEqual.description", new String[]{metadata.getUuid(), currentDoi, currentDoi, newDoi}); } throw new ResourceAlreadyExistException(String.format( @@ -279,7 +216,7 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do metadata.getUuid(), currentDoi, currentDoi)) .withMessageKey("exception.doi.resourceContainsDoi") .withDescriptionKey("exception.doi.resourceContainsDoi.description", - new String[]{ metadata.getUuid(), currentDoi, currentDoi }); + new String[]{metadata.getUuid(), currentDoi, currentDoi}); } } catch (ResourceNotFoundException e) { final MetadataSchema schema = schemaUtils.getSchema(metadata.getDataInfo().getSchemaId()); @@ -299,24 +236,23 @@ private void checkPreConditions(AbstractMetadata metadata, String doi) throws Do schema.getName())) .withMessageKey("exception.doi.missingSavedquery") .withDescriptionKey("exception.doi.missingSavedquery.description", - new String[]{ metadata.getUuid(), schema.getName(), - SavedQuery.DOI_GET, e.getMessage(), - schema.getName() }); + new String[]{metadata.getUuid(), schema.getName(), + SavedQuery.DOI_GET, e.getMessage(), + schema.getName()}); } } /** * Check conditions on DataCite side. + * * @param metadata * @param doi * @param dataciteMetadata * @param language */ - private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { + private void checkPreConditionsOnDataCite(IDoiClient doiClient, AbstractMetadata metadata, String doi, Element dataciteMetadata, String language) throws DoiClientException, ResourceAlreadyExistException { // * DataCite API is up an running ? - - try { List validations = new ArrayList<>(); List applicableSchematron = Lists.newArrayList(); @@ -341,7 +277,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, StringBuilder message = new StringBuilder(); if (!failures.isEmpty()) { message.append("

    "); - failures.forEach(f -> message.append("
  • ").append(((Element)f).getTextNormalize()).append("
  • ")); + failures.forEach(f -> message.append("
  • ").append(((Element) f).getTextNormalize()).append("
  • ")); message.append("
"); throw new DoiClientException(String.format( @@ -349,9 +285,9 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), failures.size(), message)) .withMessageKey("exception.doi.recordNotConformantMissingInfo") .withDescriptionKey("exception.doi.recordNotConformantMissingInfo.description", - new String[]{ metadata.getUuid(), String.valueOf(failures.size()), message.toString() }); + new String[]{metadata.getUuid(), String.valueOf(failures.size()), message.toString()}); } - } catch (IOException|JDOMException e) { + } catch (IOException | JDOMException e) { throw new DoiClientException(String.format( "Record '%s' is not conform with DataCite validation rules for mandatory fields. Error is: %s. " + "Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. " + @@ -360,7 +296,7 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordNotConformantMissingMandatory") .withDescriptionKey("exception.doi.recordNotConformantMissingMandatory.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // XSD validation @@ -375,24 +311,24 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid())) .withMessageKey("exception.doi.recordInvalid") .withDescriptionKey("exception.doi.recordInvalid.description", - new String[]{ metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid() }); + new String[]{metadata.getUuid(), e.getMessage(), sm.getNodeURL(), metadata.getUuid()}); } // * MDS / DOI does not exist already // curl -i --user username:password https://mds.test.datacite.org/doi/10.5072/GN // Return 404 - final String doiResponse = client.retrieveDoi(doi); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse != null) { throw new ResourceAlreadyExistException(String.format( "Record '%s' looks to be already published on DataCite with DOI '%s'. DOI on Datacite point to: %s. " + "If the DOI is not correct, remove it from the record and ask for a new one.", metadata.getUuid(), - client.createUrl("doi") + "/" + doi, + doiClient.createUrl("doi") + "/" + doi, doi, doi, doiResponse)) .withMessageKey("exception.doi.resourceAlreadyPublished") - .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{ metadata.getUuid(), - client.createUrl("doi") + "/" + doi, - doi, doi, doiResponse }); + .withDescriptionKey("exception.doi.resourceAlreadyPublished.description", new String[]{metadata.getUuid(), + doiClient.createUrl("doi") + "/" + doi, + doi, doi, doiResponse}); } // TODO: Could be relevant at some point to return states (draft/findable) @@ -404,10 +340,12 @@ private void checkPreConditionsOnDataCite(AbstractMetadata metadata, String doi, /** * Use the DataCite API to register the new DOI. + * * @param context * @param metadata */ - private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { + private void createDoi(ServiceContext context, IDoiClient doiClient, DoiServer doiServer, + AbstractMetadata metadata, Map doiInfo, Element dataciteMetadata) throws Exception { // * Now, let's create the DOI // picking a DOI name, @@ -418,29 +356,30 @@ private void createDoi(ServiceContext context, AbstractMetadata metadata, Map doi } - public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { - checkInitialised(); + public void unregisterDoi(DoiServer doiServer, AbstractMetadata metadata, ServiceContext context) throws DoiClientException, ResourceNotFoundException { + checkInitialised(doiServer); + checkCanHandleMetadata(doiServer, metadata); - final String doi = doiBuilder.create(doiPattern, doiPrefix, metadata); - final String doiResponse = client.retrieveDoi(doi); + IDoiClient doiClient = createDoiClient(doiServer); + final String doi = doiBuilder.create(doiServer.getPattern(), doiServer.getPrefix(), metadata); + final String doiResponse = doiClient.retrieveDoi(doi); if (doiResponse == null) { throw new ResourceNotFoundException(String.format( "Record '%s' is not available on DataCite. DOI '%s' does not exist.", @@ -467,12 +408,12 @@ public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) thr Element md = metadata.getXmlData(false); String doiUrl = metadataUtils.getDoi(metadata.getUuid()); - client.deleteDoiMetadata(doi); - client.deleteDoi(doi); + doiClient.deleteDoiMetadata(doi); + doiClient.deleteDoi(doi); Element recordWithoutDoi = removeDOIValue(doiUrl, metadata.getDataInfo().getSchemaId(), md); - dm.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, + metadataManager.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, context.getLanguage(), new ISODate().toString(), true, IndexingMode.full); } catch (Exception ex) { throw new DoiClientException(String.format( @@ -485,10 +426,9 @@ public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) thr /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ - public Element setDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); + public Element setDOIValue(IDoiClient doiClient, String doi, String schema, Element md) throws Exception { + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { String message = String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", @@ -501,7 +441,7 @@ public Element setDOIValue(String doi, String schema, Element md) throws Excepti .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - String doiPublicUrl = client.createPublicUrl(""); + String doiPublicUrl = doiClient.createPublicUrl(""); Map params = new HashMap<>(1); params.put("doi", doi); @@ -511,10 +451,9 @@ public Element setDOIValue(String doi, String schema, Element md) throws Excepti /** * Sets the DOI URL value in the metadata record using the process DOI_ADD_XSL_PROCESS. - * */ public Element removeDOIValue(String doi, String schema, Element md) throws Exception { - Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); + Path styleSheet = schemaUtils.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { String message = String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", @@ -540,9 +479,9 @@ public Element removeDOIValue(String doi, String schema, Element md) throws Exce * @return The record converted into the DataCite format. * @throws Exception if there is no conversion available. */ - private Element convertXmlToDataCiteFormat(String schema, Element md, String doi) throws Exception { - final Path styleSheet = dm.getSchemaDir(schema).resolve( - isMedra ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); + private Element convertXmlToDataCiteFormat(DoiServer doiServer, String schema, Element md, String doi) throws Exception { + final Path styleSheet = schemaUtils.getSchemaDir(schema).resolve( + isMedraServer(doiServer) ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); final boolean exists = Files.exists(styleSheet); if (!exists) { String message = String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", @@ -555,17 +494,53 @@ private Element convertXmlToDataCiteFormat(String schema, Element md, String doi .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put(DOI_ID_PARAMETER, doi); return Xml.transform(md, styleSheet, params); } - private void checkInitialised() throws DoiClientException { - if (!initialised) { - throw new DoiClientException("DOI configuration is not complete. Check System Configuration and set the DOI configuration.") + private void checkInitialised(DoiServer doiServer) throws DoiClientException { + final boolean emptyUrl = StringUtils.isEmpty(doiServer.getUrl()); + final boolean emptyUsername = StringUtils.isEmpty(doiServer.getUsername()); + final boolean emptyPassword = StringUtils.isEmpty(doiServer.getPassword()); + final boolean emptyPrefix = StringUtils.isEmpty(doiServer.getPrefix()); + + if (emptyUrl || + emptyUsername || + emptyPassword || + emptyPrefix) { + throw new DoiClientException("DOI server configuration is not complete. Check the DOI server configuration to complete it.") .withMessageKey("exception.doi.configurationMissing") .withDescriptionKey("exception.doi.configurationMissing.description", new String[]{}); + + } + } + + /** + * Checks if the DOI server can handle the metadata: + * - The DOI server is not publishing metadata for certain metadata group(s) or + * - it publishes metadata from the metadata group owner. + * + * @param doiServer The DOI server. + * @param metadata The metadata to process. + * @throws DoiClientException + */ + private void checkCanHandleMetadata(DoiServer doiServer, AbstractMetadata metadata) throws DoiClientException { + if (!doiServer.getPublicationGroups().isEmpty()) { + Integer groupOwner = metadata.getSourceInfo().getGroupOwner(); + + if (doiServer.getPublicationGroups().stream().noneMatch(g -> g.getId() == groupOwner)) { + throw new DoiClientException( + String.format("DOI server '%s' can not handle the metadata with UUID '%s'.", + doiServer.getName(), metadata.getUuid())) + .withMessageKey("exception.doi.serverCanNotHandleRecord") + .withDescriptionKey("exception.doi.serverCanNotHandleRecord.description", new String[]{doiServer.getName(), metadata.getUuid()}); + } } + } + private boolean isMedraServer(DoiServer doiServer) { + return doiServer.getUrl().contains(MEDRA_SEARCH_KEY); + } } diff --git a/domain/pom.xml b/domain/pom.xml index 812778871e85..fceb67a0d28a 100644 --- a/domain/pom.xml +++ b/domain/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 @@ -90,6 +90,10 @@ org.springframework.data spring-data-jpa + + org.springframework.data + spring-data-envers + org.springframework spring-test diff --git a/domain/src/main/java/org/fao/geonet/auditable/UsernameAuditorAware.java b/domain/src/main/java/org/fao/geonet/auditable/UsernameAuditorAware.java new file mode 100644 index 000000000000..3f98f4ea5f29 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/auditable/UsernameAuditorAware.java @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.auditable; + +import java.util.Optional; +import org.fao.geonet.domain.User; +import org.springframework.data.domain.AuditorAware; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; + +/** + * Extracts the current user used, to store the information in the auditable entities. + */ +public class UsernameAuditorAware implements AuditorAware { + + @Override + public Optional getCurrentAuditor() { + Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); + + if (authentication == null || !authentication.isAuthenticated()) { + return Optional.empty(); + } + + return Optional.of(((User) authentication.getPrincipal()).getUsername()); + } +} diff --git a/domain/src/main/java/org/fao/geonet/auditable/package-info.java b/domain/src/main/java/org/fao/geonet/auditable/package-info.java new file mode 100644 index 000000000000..48c47d3fabf8 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/auditable/package-info.java @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +@NonNullApi +package org.fao.geonet.auditable; + +import org.springframework.lang.NonNullApi; diff --git a/domain/src/main/java/org/fao/geonet/domain/DoiServer.java b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java new file mode 100644 index 000000000000..90c93c31c6d4 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/domain/DoiServer.java @@ -0,0 +1,284 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.domain; + +import org.fao.geonet.entitylistener.DoiServerEntityListenerManager; +import org.hibernate.annotations.Type; + +import javax.persistence.*; +import java.util.HashSet; +import java.util.Set; + +@Entity +@Table(name = "Doiservers") +@Cacheable +@Access(AccessType.PROPERTY) +@EntityListeners(DoiServerEntityListenerManager.class) +@SequenceGenerator(name = DoiServer.ID_SEQ_NAME, initialValue = 100, allocationSize = 1) +public class DoiServer extends GeonetEntity { + static final String ID_SEQ_NAME = "doiserver_id_seq"; + + private int id; + private String name; + private String description; + private String url; + private String username; + private String password; + private String landingPageTemplate; + private String publicUrl; + private String pattern = "{{uuid}}"; + private String prefix; + private Set publicationGroups = new HashSet<>(); + + /** + * Get the id of the DOI server.

This is autogenerated and when a new DOI server is created + * the DOI server will be assigned a new value.

+ * + * @return the id of the DOI server. + */ + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = ID_SEQ_NAME) + @Column(nullable = false) + public int getId() { + return id; + } + + /** + * Set the id of the DOI server.

If you want to update an existing DOI server then you should + * set this id to the DOI server you want to update and set the other values to the desired + * values.

+ * + * @param id the id of the group. + * @return this DOI server object + */ + public DoiServer setId(int id) { + this.id = id; + return this; + } + + /** + * Get the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation for the length value.

+ * + * @return DOI server name + */ + @Column(nullable = false, length = 32) + public String getName() { + return name; + } + + /** + * Set the basic/default name of the DOI server. This is non-translated and can be used to look + * up the DOI server like an id can.

This is a required property.

There is a max length + * to the name allowed. See the annotation on {@link #getName()} for the length value.

+ */ + public DoiServer setName(String name) { + this.name = name; + return this; + } + + /** + * Get a description of the DOI server. + * + * @return the description. + */ + @Column(length = 255) + public String getDescription() { + return description; + } + + /** + * Set the DOI server description. + * + * @param description the description. + * @return this DOI server object. + */ + public DoiServer setDescription(String description) { + this.description = description; + return this; + } + + + /** + * Get the API URL for the DOI server. + * + * @return the DOI server API URL. + */ + @Column(nullable = false, length = 255) + public String getUrl() { + return url; + } + + /** + * Set the REST API configuration URL for the DOI server. + * + * @param url the server URL. + * @return this DOI server object. + */ + public DoiServer setUrl(String url) { + this.url = url; + return this; + } + + /** + * Get the username to use for connecting to the DOI server. + * + * @return the username. + */ + @Column(length = 128) + public String getUsername() { + return username; + } + + public DoiServer setUsername(String username) { + this.username = username; + return this; + } + + /** + * Get the password to use for connecting to the DOI server. + * + * @return the password. + */ + @Column(length = 128) + @Type(type="encryptedString") + public String getPassword() { + return password; + } + + public DoiServer setPassword(String password) { + this.password = password; + return this; + } + + /** + * Set the DOI landing page URL template. + * + * @param landingPageTemplate the landing page URL template. + * @return this DOI server object. + */ + public DoiServer setLandingPageTemplate(String landingPageTemplate) { + this.landingPageTemplate = landingPageTemplate; + return this; + } + + /** + * Get the DOI landing page URL template. + * + * @return the landing page URL template. + */ + @Column(nullable = false, length = 255) + public String getLandingPageTemplate() { + return landingPageTemplate; + } + + /** + * Set the DOI URL prefix. + * + * @param publicUrl the URL prefix. + * @return this DOI server object. + */ + public DoiServer setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + return this; + } + + /** + * Get the DOI URL prefix. + * + * @return the URL prefix. + */ + @Column(nullable = false, length = 255) + public String getPublicUrl() { + return publicUrl; + } + + /** + * Set the DOI identifier pattern. + * + * @param pattern the identifier pattern. + * @return this DOI server object. + */ + public DoiServer setPattern(String pattern) { + this.pattern = pattern; + return this; + } + + /** + * Get the DOI identifier pattern. + * + * @return the identifier pattern. + */ + @Column(nullable = false, length = 255) + public String getPattern() { + return pattern; + } + + + /** + * Set the DOI prefix. + * + * @param prefix the DOI prefix. + * @return this DOI server object. + */ + public DoiServer setPrefix(String prefix) { + this.prefix = prefix; + return this; + } + + /** + * Get the DOI prefix. + * + * @return the DOI prefix. + */ + @Column(nullable = false, length = 15) + public String getPrefix() { + return prefix; + } + + /** + * Sets the groups which metadata should be published to the DOI server. + * + * @param publicationGroups Publication groups. + * @return + */ + public void setPublicationGroups(Set publicationGroups) { + this.publicationGroups = publicationGroups; + } + + /** + * Get the groups which metadata is published to the DOI server. + * + * @return Publication groups. + */ + @ManyToMany(fetch = FetchType.EAGER, cascade = CascadeType.PERSIST) + @JoinTable( + name = "doiservers_group", + joinColumns = @JoinColumn(name = "doiserver_id"), + inverseJoinColumns = @JoinColumn(name = "group_id")) + public Set getPublicationGroups() { + return publicationGroups; + } +} diff --git a/domain/src/main/java/org/fao/geonet/domain/Group.java b/domain/src/main/java/org/fao/geonet/domain/Group.java index 19fe92d6eb76..ccab64a875ff 100644 --- a/domain/src/main/java/org/fao/geonet/domain/Group.java +++ b/domain/src/main/java/org/fao/geonet/domain/Group.java @@ -41,6 +41,8 @@ import javax.persistence.ElementCollection; import javax.persistence.Entity; import javax.persistence.EntityListeners; +import javax.persistence.Enumerated; +import javax.persistence.EnumType; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; @@ -84,6 +86,7 @@ public class Group extends Localized implements Serializable { private MetadataCategory defaultCategory; private List allowedCategories; private Boolean enableAllowedCategories; + private Profile minimumProfileForPrivileges; /** * Get the id of the group. @@ -348,4 +351,24 @@ public Group setEnableAllowedCategories(Boolean enableAllowedCategories) { this.enableAllowedCategories = enableAllowedCategories; return this; } + + /** + * Get the minimum profile required to update privileges for this group. + * + * @return {@link Profile} the minimum profile required to update privileges for this group. + */ + @Enumerated(EnumType.STRING) + public Profile getMinimumProfileForPrivileges() { + return minimumProfileForPrivileges; + } + + /** + * Set the minimum profile required to update privileges for this group. + * @param minimumProfileForPrivileges the minimum {@link Profile} required to update privileges for this group. + * @return this group entity object. + */ + public Group setMinimumProfileForPrivileges(Profile minimumProfileForPrivileges) { + this.minimumProfileForPrivileges = minimumProfileForPrivileges; + return this; + } } diff --git a/domain/src/main/java/org/fao/geonet/domain/Profile.java b/domain/src/main/java/org/fao/geonet/domain/Profile.java index e3a24e71074e..b6e680d0ee07 100644 --- a/domain/src/main/java/org/fao/geonet/domain/Profile.java +++ b/domain/src/main/java/org/fao/geonet/domain/Profile.java @@ -73,31 +73,70 @@ public static Profile findProfileIgnoreCase(String profileName) { return null; } - public Set getParents() { - HashSet parents = new HashSet(); + /** + * Retrieves all direct child profiles of the current profile. + * Child profiles have fewer permissions than parents. + * + * @return A set containing profiles that have this profile as a parent. + */ + public Set getChildren() { + HashSet children = new HashSet(); for (Profile profile : values()) { if (profile.parents.contains(this)) { - parents.add(profile); + children.add(profile); } } + return children; + } + + /** + * Retrieves the direct parent profiles of the current profile. + * Parent profiles have more permissions than children. + * + * @return A set of profiles that are direct parents of this profile. + */ + public Set getParents() { return parents; } - public Set getAll() { - HashSet all = new HashSet(); - all.add(this); - for (Profile parent : getParents()) { - all.addAll(parent.getAll()); + /** + * Retrieves the profile and all of its children recursively. + * The returned set will include the profile itself. + * Child profiles have fewer permissions than parents. + * + * @return A {@link Set} containing the profile and all of its children. + */ + public Set getProfileAndAllChildren() { + HashSet profiles = new HashSet(); + profiles.add(this); + for (Profile child : getChildren()) { + profiles.addAll(child.getProfileAndAllChildren()); } - return all; + return profiles; + } + + /** + * Retrieves the profile and all of its parents recursively. + * The returned set will include the profile itself. + * Parent profiles have more permissions than children. + * + * @return A {@link Set} containing the profile and all of its parents. + */ + public Set getProfileAndAllParents() { + Set profiles = new HashSet<>(); + profiles.add(this); + for (Profile parent : getParents()) { + profiles.addAll(parent.getProfileAndAllParents()); + } + return profiles; } public Element asElement() { Element elResult = new Element(PROFILES_ELEM_NAME); - for (Profile profile : getAll()) { + for (Profile profile : getProfileAndAllChildren()) { if (profile == Guest) continue; @@ -109,7 +148,7 @@ public Element asElement() { public Set getAllNames() { HashSet names = new HashSet(); - for (Profile p : getAll()) { + for (Profile p : getProfileAndAllChildren()) { names.add(p.name()); } return names; diff --git a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java index 26d5bf4ccc92..47f36e604064 100644 --- a/domain/src/main/java/org/fao/geonet/domain/StatusValue.java +++ b/domain/src/main/java/org/fao/geonet/domain/StatusValue.java @@ -24,9 +24,11 @@ package org.fao.geonet.domain; import org.fao.geonet.entitylistener.StatusValueEntityListenerManager; +import org.springframework.http.MediaType; import javax.persistence.*; +import java.util.Arrays; import java.util.Map; /** @@ -223,20 +225,141 @@ public static final class Status { * List of predefined status part of the events. Those values are the default * one for GeoNetwork and may be modified in the database. */ - public static final class Events { - public static final String RECORDCREATED = "50"; - public static final String RECORDUPDATED = "51"; - public static final String ATTACHMENTADDED = "52"; - public static final String ATTACHMENTDELETED = "53"; - public static final String RECORDOWNERCHANGE = "54"; - public static final String RECORDGROUPOWNERCHANGE = "55"; - public static final String RECORDPRIVILEGESCHANGE = "56"; - public static final String RECORDCATEGORYCHANGE = "57"; - public static final String RECORDVALIDATIONTRIGGERED = "58"; - public static final String RECORDSTATUSCHANGE = "59"; - public static final String RECORDPROCESSINGCHANGE = "60"; - public static final String RECORDDELETED = "61"; - public static final String RECORDIMPORTED = "62"; - public static final String RECORDRESTORED = "63"; + public enum Events { + RECORDCREATED(50, false, MediaType.APPLICATION_JSON, null), + RECORDUPDATED(51, true, MediaType.APPLICATION_XML, MediaType.APPLICATION_XML), + ATTACHMENTADDED(52, false, MediaType.TEXT_PLAIN, null), + ATTACHMENTDELETED(53, false, null, MediaType.TEXT_PLAIN), + RECORDOWNERCHANGE(54, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON), + RECORDGROUPOWNERCHANGE(55, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON), + RECORDPRIVILEGESCHANGE(56, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON), + RECORDCATEGORYCHANGE(57, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON), + RECORDVALIDATIONTRIGGERED(58, false, MediaType.TEXT_PLAIN, null), + RECORDSTATUSCHANGE(59, false, MediaType.APPLICATION_JSON, MediaType.APPLICATION_JSON), + RECORDPROCESSINGCHANGE(60, true, MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML), + RECORDDELETED(61, true, null, MediaType.APPLICATION_XML), + RECORDIMPORTED(62, false, MediaType.APPLICATION_JSON, null), + RECORDRESTORED(63, true, MediaType.APPLICATION_XML, MediaType.APPLICATION_XML); + + /** + * The id of the event. + */ + private final Integer id; + /** + * The restore function currently supports these states + */ + private final boolean isSupportedRestoreStatus; + + /** + * Indicates mime type of the current state will be stored in. + * This is generally json or xml and if it is null then the state is not supported. + */ + private final MediaType currentStateFormat; + + /** + * Indicates mime type of the previous state will be stored in. + * This is generally json or xml and if it is null then the state is not supported. + */ + private final MediaType previousStateFormat; + + /** + * Constructor. + * + * @param id the id of the event. + * @param isSupportedRestoreStatus the restore function currently supports these states. + * @param currentStateFormat indicates mime type of the current state will be stored in. + * @param previousStateFormat indicates mime type of the current state will be stored in. + */ + Events(Integer id, boolean isSupportedRestoreStatus, MediaType currentStateFormat, MediaType previousStateFormat) { + this.id = id; + this.isSupportedRestoreStatus = isSupportedRestoreStatus; + this.currentStateFormat = currentStateFormat; + this.previousStateFormat = previousStateFormat; + } + + /** + * Get the id of the event. + * + * @return the id of the event. + */ + public Integer getId() { + return id; + } + + /** + * Get the event from the id. + * + * @param id the id of the event. + * @return the event. + */ + public static Events fromId(Integer id) { + return Arrays.stream(values()) + .filter(event -> event.getId().equals(id)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No event found with id: " + id)); + } + + /** + * Get the code of the event. + * The code currently is the string representation of the id. + * + * @return the code of the event. + */ + public String getCode() { + return String.valueOf(id); + } + + /** + * Get the event from the code. + * + * @param code the code of the event. + * @return the event. + */ + public static Events fromCode(String code) { + return Arrays.stream(values()) + .filter(event -> event.getCode().equals(code)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No event found with code: " + code)); + } + + /** + * Get the mime type of the current state will be stored in. + * + * @return the mime type of the current state will be stored in. + */ + public MediaType getCurrentStateFormat() { + return currentStateFormat; + } + + /** + * Get the mime type of the previous state will be stored in. + * + * @return the mime type of the previous state will be stored in. + */ + public MediaType getPreviousStateFormat() { + return previousStateFormat; + } + + /** + * Identify if the current status supports restoring the values. + * This is mostly for restoring xml metadata records. + * + * @return true if supported. + */ + public boolean isSupportedRestoreStatus() { + return isSupportedRestoreStatus; + } + + /** + * Get an array of the event that support restoring statuses. + * This is mostly for restoring xml metadata records. + * + * @return list of events with isSupportedRestoreStatus set to true. + */ + public static Events[] getSupportedRestoreStatuses() { + return Arrays.stream(values()) + .filter(Events::isSupportedRestoreStatus) + .toArray(Events[]::new); + } } } diff --git a/domain/src/main/java/org/fao/geonet/domain/auditable/AuditableEntity.java b/domain/src/main/java/org/fao/geonet/domain/auditable/AuditableEntity.java new file mode 100644 index 000000000000..536f12b536d1 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/domain/auditable/AuditableEntity.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.domain.auditable; + +import javax.persistence.Column; +import javax.persistence.EntityListeners; +import javax.persistence.MappedSuperclass; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Date; +import org.hibernate.envers.Audited; +import org.springframework.data.annotation.CreatedBy; +import org.springframework.data.annotation.CreatedDate; +import org.springframework.data.annotation.LastModifiedBy; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.jpa.domain.support.AuditingEntityListener; + +/** + * Base class for auditable entities, providing fields for creation and last modification user / dates. + */ +@MappedSuperclass +@EntityListeners(AuditingEntityListener.class) +@Audited +public abstract class AuditableEntity { + @CreatedBy + @Column(name = "created_by") + @JsonIgnore + protected String createdBy; + + @LastModifiedBy + @Column(name = "last_modified_by", nullable = true, updatable = true) + @JsonIgnore + protected String lastModifiedBy; + + @LastModifiedDate + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "last_modified_date", nullable = true, updatable = true) + @JsonIgnore + protected Date lastModifiedDate; + + @CreatedDate + @Column(name = "created_date") + @Temporal(TemporalType.TIMESTAMP) + @JsonIgnore + private Date createdDate; + + public String getCreatedBy() { + return createdBy; + } + + public void setCreatedBy(String createdBy) { + this.createdBy = createdBy; + } + + public Date getCreatedDate() { + return createdDate; + } + + public void setCreatedDate(Date createdDate) { + this.createdDate = createdDate; + } + + public String getLastModifiedBy() { + return lastModifiedBy; + } + + public void setLastModifiedBy(String lastModifiedBy) { + this.lastModifiedBy = lastModifiedBy; + } + + public Date getLastModifiedDate() { + return lastModifiedDate; + } + + public void setLastModifiedDate(Date lastModifiedDate) { + this.lastModifiedDate = lastModifiedDate; + } +} + diff --git a/domain/src/main/java/org/fao/geonet/domain/auditable/UserAuditable.java b/domain/src/main/java/org/fao/geonet/domain/auditable/UserAuditable.java new file mode 100644 index 000000000000..232b8ae3cbf8 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/domain/auditable/UserAuditable.java @@ -0,0 +1,275 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.domain.auditable; + +import javax.annotation.Nonnull; +import javax.persistence.Access; +import javax.persistence.AccessType; +import javax.persistence.Entity; +import javax.persistence.Id; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import org.apache.commons.lang.StringUtils; +import org.fao.geonet.domain.Address; +import org.fao.geonet.domain.User; +import org.fao.geonet.domain.UserGroup; +import org.hibernate.envers.Audited; + +/** + * An entity to audit the changes for user entities. + * + * @see org.fao.geonet.domain.User + */ +@Entity +@Access(AccessType.PROPERTY) +@Audited(withModifiedFlag = true) +public class UserAuditable extends AuditableEntity { + + private int id; + private String profile; + private String username; + private String name; + private String surname; + private String emailAddress; + private String organisation; + private String address; + private String city; + private String state; + private String zip; + private String country; + private String kind; + private String groupsRegisteredUser; + private String groupsEditor; + private String groupsReviewer; + private String groupsUserAdmin; + private boolean enabled; + + public static UserAuditable build(User user, List userGroups) { + UserAuditable userAuditable = new UserAuditable(); + + userAuditable.setId(user.getId()); + userAuditable.setUsername(user.getUsername()); + userAuditable.setName(user.getName()); + userAuditable.setSurname(user.getSurname()); + userAuditable.setEnabled(user.isEnabled()); + userAuditable.setKind(user.getKind()); + userAuditable.setOrganisation(user.getOrganisation()); + userAuditable.setProfile(user.getProfile().name()); + if (!user.getEmailAddresses().isEmpty()) { + // A user can have only 1 address defined in the UI. + userAuditable.setEmailAddress((String) user.getEmailAddresses().toArray()[0]); + } + if (!user.getAddresses().isEmpty()) { + // A user can have only 1 address defined in the UI. + Address userAddress = (Address) user.getAddresses().toArray()[0]; + userAuditable.setAddress(userAddress.getAddress()); + userAuditable.setZip(userAddress.getZip()); + userAuditable.setState(userAddress.getState()); + userAuditable.setCity(userAddress.getCity()); + userAuditable.setCountry(userAddress.getCountry()); + } + userAuditable.setEnabled(user.isEnabled()); + + Set groupsRegisteredUserList = new TreeSet<>(); + Set groupsEditorList = new TreeSet<>(); + Set groupsReviewerList = new TreeSet<>(); + Set groupsUserAdminList = new TreeSet<>(); + + // Groups + if (userGroups != null) { + userGroups.forEach(userGroup -> { + switch (userGroup.getProfile()) { + case RegisteredUser: + groupsRegisteredUserList.add(userGroup.getGroup().getName()); + break; + case Editor: + groupsEditorList.add(userGroup.getGroup().getName()); + break; + case Reviewer: + groupsReviewerList.add(userGroup.getGroup().getName()); + break; + case UserAdmin: + groupsUserAdminList.add(userGroup.getGroup().getName()); + break; + default: + break; + } + }); + } + + + userAuditable.setGroupsRegisteredUser(StringUtils.join(groupsRegisteredUserList, ",")); + userAuditable.setGroupsEditor(StringUtils.join(groupsEditorList, ",")); + userAuditable.setGroupsReviewer(StringUtils.join(groupsReviewerList, ",")); + userAuditable.setGroupsUserAdmin(StringUtils.join(groupsUserAdminList, ",")); + + return userAuditable; + } + + @Id + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getProfile() { + return profile; + } + + public void setProfile(String profile) { + this.profile = profile; + } + + @Nonnull + public String getUsername() { + return username; + } + + public void setUsername(@Nonnull String username) { + this.username = username; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getSurname() { + return surname; + } + + public void setSurname(String surname) { + this.surname = surname; + } + + public String getEmailAddress() { + return emailAddress; + } + + public void setEmailAddress(String emailAddresses) { + this.emailAddress = emailAddresses; + } + + public String getOrganisation() { + return organisation; + } + + public void setOrganisation(String organisation) { + this.organisation = organisation; + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public String getCity() { + return city; + } + + public void setCity(String city) { + this.city = city; + } + + public String getState() { + return state; + } + + public void setState(String state) { + this.state = state; + } + + public String getZip() { + return zip; + } + + public void setZip(String zip) { + this.zip = zip; + } + + public String getCountry() { + return country; + } + + public void setCountry(String country) { + this.country = country; + } + + public String getKind() { + return kind; + } + + public void setKind(String kind) { + this.kind = kind; + } + + public String getGroupsRegisteredUser() { + return groupsRegisteredUser; + } + + public void setGroupsRegisteredUser(String groupsRegisteredUser) { + this.groupsRegisteredUser = groupsRegisteredUser; + } + + public String getGroupsEditor() { + return groupsEditor; + } + + public void setGroupsEditor(String groupsEditor) { + this.groupsEditor = groupsEditor; + } + + public String getGroupsReviewer() { + return groupsReviewer; + } + + public void setGroupsReviewer(String groupsReviewer) { + this.groupsReviewer = groupsReviewer; + } + + public String getGroupsUserAdmin() { + return groupsUserAdmin; + } + + public void setGroupsUserAdmin(String groupsUserAdmin) { + this.groupsUserAdmin = groupsUserAdmin; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/domain/src/main/java/org/fao/geonet/domain/page/Page.java b/domain/src/main/java/org/fao/geonet/domain/page/Page.java index fb3247a4308f..563e58663fa3 100644 --- a/domain/src/main/java/org/fao/geonet/domain/page/Page.java +++ b/domain/src/main/java/org/fao/geonet/domain/page/Page.java @@ -23,10 +23,13 @@ package org.fao.geonet.domain.page; import java.io.Serializable; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import javax.annotation.Nullable; import javax.persistence.Basic; +import javax.persistence.CascadeType; import javax.persistence.CollectionTable; import javax.persistence.Column; import javax.persistence.ElementCollection; @@ -35,10 +38,14 @@ import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; import javax.persistence.Lob; +import javax.persistence.ManyToMany; import javax.persistence.Table; import org.fao.geonet.domain.GeonetEntity; +import org.fao.geonet.domain.Group; import org.hibernate.annotations.Type; /** @@ -56,6 +63,7 @@ public class Page extends GeonetEntity implements Serializable { private PageFormat format; private List sections; private PageStatus status; + private Set groups = new LinkedHashSet<>(); private String label; private String icon; @@ -64,7 +72,7 @@ public Page() { } - public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon) { + public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat format, List sections, PageStatus status, String label, String icon, Set groups) { super(); this.pageIdentity = pageIdentity; this.data = data; @@ -74,10 +82,11 @@ public Page(PageIdentity pageIdentity, byte[] data, String link, PageFormat form this.status = status; this.label = label; this.icon = icon; + this.groups = groups; } public enum PageStatus { - PUBLIC, PUBLIC_ONLY, PRIVATE, HIDDEN; + PUBLIC, PUBLIC_ONLY, GROUPS, PRIVATE, HIDDEN; } public enum PageFormat { @@ -146,6 +155,28 @@ public String getIcon() { return icon; } + /** + * Get all the page's groups. + * + * @return all the page's groups. + */ + @ManyToMany(fetch = FetchType.EAGER, cascade = {CascadeType.DETACH, CascadeType.PERSIST, CascadeType.REFRESH}) + @JoinTable(name = "spg_page_group", joinColumns = {@JoinColumn(name = "language"), @JoinColumn(name = "linktext")}, + inverseJoinColumns = {@JoinColumn(name = "groupid", referencedColumnName = "id", unique = false)}) + public Set getGroups() { + return groups; + } + + /** + * Set all the page's groups. + * + * @param groups all the page's groups. + * @return this group object + */ + public void setGroups(Set groups) { + this.groups = groups; + } + public void setPageIdentity(PageIdentity pageIdentity) { this.pageIdentity = pageIdentity; } diff --git a/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java new file mode 100644 index 000000000000..8d4af1bdf927 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/entitylistener/DoiServerEntityListenerManager.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.entitylistener; + +import org.fao.geonet.domain.DoiServer; + +import javax.persistence.*; + +public class DoiServerEntityListenerManager extends AbstractEntityListenerManager { + @PrePersist + public void prePresist(final DoiServer entity) { + handleEvent(PersistentEventType.PrePersist, entity); + } + + @PreRemove + public void preRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PreRemove, entity); + } + + @PostPersist + public void postPersist(final DoiServer entity) { + handleEvent(PersistentEventType.PostPersist, entity); + } + + @PostRemove + public void postRemove(final DoiServer entity) { + handleEvent(PersistentEventType.PostRemove, entity); + } + + @PreUpdate + public void preUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PreUpdate, entity); + } + + @PostUpdate + public void postUpdate(final DoiServer entity) { + handleEvent(PersistentEventType.PostUpdate, entity); + } + + @PostLoad + public void postLoad(final DoiServer entity) { + handleEvent(PersistentEventType.PostLoad, entity); + } +} diff --git a/domain/src/main/java/org/fao/geonet/repository/BaseAuditableRepository.java b/domain/src/main/java/org/fao/geonet/repository/BaseAuditableRepository.java new file mode 100644 index 000000000000..0a7cb21747af --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/repository/BaseAuditableRepository.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.repository; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.repository.NoRepositoryBean; +import org.springframework.data.repository.history.RevisionRepository; + +@NoRepositoryBean +public interface BaseAuditableRepository extends RevisionRepository, JpaRepository { +} diff --git a/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java new file mode 100644 index 000000000000..25ca32429ce0 --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/repository/DoiServerRepository.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; + +import java.util.Optional; + +public interface DoiServerRepository extends + GeonetRepository, + JpaSpecificationExecutor { + + Optional findOneById(int id); +} diff --git a/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java b/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java index 617de58a3dc4..5faa3f84f127 100644 --- a/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/GroupRepository.java @@ -52,6 +52,14 @@ public interface GroupRepository extends GeonetRepository, Group @Nullable Group findByEmail(@Nonnull String email); + /** + * Find all groups with a minimumProfileForPrivileges not equal to null. + * These groups are "restricted". + * + * @return a list of groups with a minimumProfileForPrivileges not equal to null + */ + @Nullable + List findByMinimumProfileForPrivilegesNotNull(); public @Nullable diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java b/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java index b382a4dd876e..9e226a86a329 100644 --- a/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/MetadataDraftRepository.java @@ -30,6 +30,8 @@ import org.fao.geonet.domain.MetadataDraft; import org.springframework.data.jpa.repository.JpaSpecificationExecutor; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; /** * Data Access object for the {@link MetadataDraft} entities. @@ -68,4 +70,51 @@ public interface MetadataDraftRepository */ @Nonnull List findAllByHarvestInfo_Uuid(@Nonnull String uuid); + + /** + * Get the metadata after preforming a search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ + @Query(value = "SELECT replace(data, :search, :replace) FROM MetadataDraft m " + + "WHERE uuid = :uuid", + nativeQuery = true) + String selectOneWithSearchAndReplace( + @Param("uuid") String uuid, + @Param("search") String search, + @Param("replace") String replace); + + /** + * Get the metadata after preforming a regex search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ + @Query(value = "SELECT regexp_replace(data, :pattern, :replace) FROM MetadataDraft m " + + "WHERE uuid = :uuid", + nativeQuery = true) + String selectOneWithRegexSearchAndReplace( + @Param("uuid") String uuid, + @Param("pattern") String search, + @Param("replace") String replace); + + /** + * Get the metadata after preforming a regex search and replace on it with regex flags. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @param flags The regex flags to use. + * @return The metadata with the search and replace applied. + */ + @Query(value = "SELECT regexp_replace(data, :pattern, :replace, :flags) FROM MetadataDraft m " + + "WHERE uuid = :uuid", + nativeQuery = true) + String selectOneWithRegexSearchAndReplaceWithFlags( + @Param("uuid") String uuid, + @Param("pattern") String search, + @Param("replace") String replace, + @Param("flags") String flags); } diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java b/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java index 3a8fefb49550..eba9c43f0ee3 100644 --- a/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/MetadataRepository.java @@ -76,8 +76,13 @@ public interface MetadataRepository extends GeonetRepository, @Nonnull List findAllByHarvestInfo_Uuid(@Nonnull String uuid); - - + /** + * Get the metadata after preforming a search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ @Query(value = "SELECT replace(data, :search, :replace) FROM metadata m " + "WHERE uuid = :uuid", nativeQuery = true) @@ -86,6 +91,13 @@ String selectOneWithSearchAndReplace( @Param("search") String search, @Param("replace") String replace); + /** + * Get the metadata after preforming a regex search and replace on it. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @return The metadata with the search and replace applied. + */ @Query(value = "SELECT regexp_replace(data, :pattern, :replace) FROM metadata m " + "WHERE uuid = :uuid", nativeQuery = true) @@ -94,6 +106,14 @@ String selectOneWithRegexSearchAndReplace( @Param("pattern") String search, @Param("replace") String replace); + /** + * Get the metadata after preforming a regex search and replace on it with regex flags. + * @param uuid The UUID of the metadata to search for. + * @param search The string to search for. + * @param replace The string to replace the search string with. + * @param flags The regex flags to use. + * @return The metadata with the search and replace applied. + */ @Query(value = "SELECT regexp_replace(data, :pattern, :replace, :flags) FROM metadata m " + "WHERE uuid = :uuid", nativeQuery = true) diff --git a/domain/src/main/java/org/fao/geonet/repository/UserAuditableRepository.java b/domain/src/main/java/org/fao/geonet/repository/UserAuditableRepository.java new file mode 100644 index 000000000000..86ec4cc8f4ea --- /dev/null +++ b/domain/src/main/java/org/fao/geonet/repository/UserAuditableRepository.java @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.repository; + +import org.fao.geonet.domain.auditable.UserAuditable; + +/** + * Data Access object for accessing {@link UserAuditable} entities. + */ +public interface UserAuditableRepository extends BaseAuditableRepository { + +} diff --git a/domain/src/main/java/org/fao/geonet/repository/UserRepository.java b/domain/src/main/java/org/fao/geonet/repository/UserRepository.java index feaf720afb6a..b5ac5138653d 100644 --- a/domain/src/main/java/org/fao/geonet/repository/UserRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/UserRepository.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -45,6 +45,10 @@ public interface UserRepository extends GeonetRepository, JpaSpec /** * Find all users identified by the provided username ignoring the case. + * + * Old versions allowed to create users with the same username with different case. + * New versions do not allow this. + * * @param username the username. * @return all users with username equals ignore case the provided username. */ diff --git a/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustom.java b/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustom.java index 65e3162a22e0..21148980e141 100644 --- a/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustom.java +++ b/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustom.java @@ -61,7 +61,7 @@ public interface UserRepositoryCustom { */ @Nonnull List> findAllByGroupOwnerNameAndProfile(@Nonnull Collection metadataIds, - @Nullable Profile profil); + @Nullable Profile profile); /** * Find all the users that own at least one metadata element. diff --git a/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustomImpl.java b/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustomImpl.java index e5f1efa11662..4585548d9fef 100644 --- a/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustomImpl.java +++ b/domain/src/main/java/org/fao/geonet/repository/UserRepositoryCustomImpl.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -25,7 +25,6 @@ import org.fao.geonet.domain.*; import org.fao.geonet.utils.Log; -import org.springframework.data.domain.Sort; import org.springframework.data.jpa.domain.Specification; import javax.annotation.Nonnull; @@ -48,66 +47,83 @@ public class UserRepositoryCustomImpl implements UserRepositoryCustom { @PersistenceContext - private EntityManager _entityManager; + private EntityManager entityManager; @Override public User findOne(final String userId) { - return _entityManager.find(User.class, Integer.valueOf(userId)); + return entityManager.find(User.class, Integer.valueOf(userId)); } @Override - public User findOneByEmail(final String email) { - CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + public User findOneByEmail(@Nonnull final String email) { + CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery query = cb.createQuery(User.class); Root root = query.from(User.class); + Join joinedEmailAddresses = root.join(User_.emailAddresses); - query.where(cb.isMember(email, root.get(User_.emailAddresses))); - final List resultList = _entityManager.createQuery(query).getResultList(); + // Case in-sensitive email search + query.where(cb.equal(cb.lower(joinedEmailAddresses), email.toLowerCase())); + query.orderBy(cb.asc(root.get(User_.username))); + final List resultList = entityManager.createQuery(query).getResultList(); if (resultList.isEmpty()) { return null; } if (resultList.size() > 1) { - Log.error(Constants.DOMAIN_LOG_MODULE, "The database is inconsistent. There are multiple users with the email address: " + - email); + Log.error(Constants.DOMAIN_LOG_MODULE, String.format("The database is inconsistent. There are multiple users with the email address: %s", + email)); } return resultList.get(0); } @Override - public User findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(final String email) { - CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + public User findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(@Nonnull final String email) { + CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery query = cb.createQuery(User.class); Root root = query.from(User.class); + Join joinedEmailAddresses = root.join(User_.emailAddresses); final Path authTypePath = root.get(User_.security).get(UserSecurity_.authType); query.where(cb.and( - cb.isMember(email, root.get(User_.emailAddresses)), - cb.or(cb.isNull(authTypePath), cb.equal(cb.trim(authTypePath), "")))); - List results = _entityManager.createQuery(query).getResultList(); + // Case in-sensitive email search + cb.equal(cb.lower(joinedEmailAddresses), email.toLowerCase()), + cb.or(cb.isNull(authTypePath), cb.equal(cb.trim(authTypePath), ""))) + ).orderBy(cb.asc(root.get(User_.username))); + List results = entityManager.createQuery(query).getResultList(); if (results.isEmpty()) { return null; } else { + if (results.size() > 1) { + Log.error(Constants.DOMAIN_LOG_MODULE, String.format("The database is inconsistent. There are multiple users with the email address: %s", + email)); + } return results.get(0); } } @Override - public User findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(final String username) { - CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + public User findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(@Nonnull final String username) { + CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery query = cb.createQuery(User.class); Root root = query.from(User.class); final Path authTypePath = root.get(User_.security).get(UserSecurity_.authType); final Path usernamePath = root.get(User_.username); - query.where(cb.and(cb.equal(usernamePath, username), cb.or(cb.isNull(authTypePath), cb.equal(cb.trim(authTypePath), "")))); - List results = _entityManager.createQuery(query).getResultList(); - + // Case in-sensitive username search + query.where(cb.and( + cb.equal(cb.lower(usernamePath), username.toLowerCase()), + cb.or(cb.isNull(authTypePath), cb.equal(cb.trim(authTypePath), ""))) + ).orderBy(cb.asc(root.get(User_.username))); + List results = entityManager.createQuery(query).getResultList(); if (results.isEmpty()) { return null; } else { + if (results.size() > 1) { + Log.error(Constants.DOMAIN_LOG_MODULE, String.format("The database is inconsistent. There are multiple users with username: %s", + username)); + } return results.get(0); } } @@ -115,7 +131,7 @@ public User findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(final String usern @Nonnull @Override public List findDuplicatedUsernamesCaseInsensitive() { - CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery query = cb.createQuery(String.class); Root userRoot = query.from(User.class); @@ -123,14 +139,14 @@ public List findDuplicatedUsernamesCaseInsensitive() { query.groupBy(cb.lower(userRoot.get(User_.username))); query.having(cb.gt(cb.count(userRoot), 1)); - return _entityManager.createQuery(query).getResultList(); + return entityManager.createQuery(query).getResultList(); } @Override @Nonnull public List> findAllByGroupOwnerNameAndProfile(@Nonnull final Collection metadataIds, @Nullable final Profile profile) { - List> results = new ArrayList>(); + List> results = new ArrayList<>(); results.addAll(findAllByGroupOwnerNameAndProfileInternal(metadataIds, profile, false)); results.addAll(findAllByGroupOwnerNameAndProfileInternal(metadataIds, profile, true)); @@ -139,8 +155,8 @@ public List> findAllByGroupOwnerNameAndProfile(@Nonnull fina } private List> findAllByGroupOwnerNameAndProfileInternal(@Nonnull final Collection metadataIds, - @Nullable final Profile profile, boolean draft) { - CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + @Nullable final Profile profile, boolean draft) { + CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery query = cb.createQuery(Tuple.class); Root userRoot = query.from(User.class); @@ -148,22 +164,20 @@ private List> findAllByGroupOwnerNameAndProfileInternal(@Non Predicate metadataPredicate; Predicate ownerPredicate; - Root metadataRoot = null; - Root metadataDraftRoot = null; if (!draft) { - metadataRoot = query.from(Metadata.class); + Root metadataRoot = query.from(Metadata.class); query.multiselect(metadataRoot.get(Metadata_.id), userRoot); metadataPredicate = metadataRoot.get(Metadata_.id).in(metadataIds); ownerPredicate = cb.equal(metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner), userGroupRoot.get(UserGroup_.id).get(UserGroupId_.groupId)); } else { - metadataDraftRoot = query.from(MetadataDraft.class); - query.multiselect(metadataDraftRoot.get(MetadataDraft_.id), userRoot); - metadataPredicate = metadataDraftRoot.get(Metadata_.id).in(metadataIds); + Root metadataRoot = query.from(MetadataDraft.class); + query.multiselect(metadataRoot.get(MetadataDraft_.id), userRoot); + metadataPredicate = metadataRoot.get(MetadataDraft_.id).in(metadataIds); - ownerPredicate = cb.equal(metadataDraftRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner), + ownerPredicate = cb.equal(metadataRoot.get(MetadataDraft_.sourceInfo).get(MetadataSourceInfo_.groupOwner), userGroupRoot.get(UserGroup_.id).get(UserGroupId_.groupId)); } @@ -180,9 +194,9 @@ private List> findAllByGroupOwnerNameAndProfileInternal(@Non query.distinct(true); - List> results = new ArrayList>(); + List> results = new ArrayList<>(); - for (Tuple result : _entityManager.createQuery(query).getResultList()) { + for (Tuple result : entityManager.createQuery(query).getResultList()) { Integer mdId = (Integer) result.get(0); User user = (User) result.get(1); results.add(Pair.read(mdId, user)); @@ -193,7 +207,7 @@ private List> findAllByGroupOwnerNameAndProfileInternal(@Non @Nonnull @Override public List findAllUsersThatOwnMetadata() { - final CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + final CriteriaBuilder cb = entityManager.getCriteriaBuilder(); final CriteriaQuery query = cb.createQuery(User.class); final Root metadataRoot = query.from(Metadata.class); @@ -206,13 +220,13 @@ public List findAllUsersThatOwnMetadata() { query.where(ownerExpression); query.distinct(true); - return _entityManager.createQuery(query).getResultList(); + return entityManager.createQuery(query).getResultList(); } @Nonnull @Override public List findAllUsersInUserGroups(@Nonnull final Specification userGroupSpec) { - final CriteriaBuilder cb = _entityManager.getCriteriaBuilder(); + final CriteriaBuilder cb = entityManager.getCriteriaBuilder(); final CriteriaQuery query = cb.createQuery(User.class); final Root userGroupRoot = query.from(UserGroup.class); @@ -225,7 +239,7 @@ public List findAllUsersInUserGroups(@Nonnull final Specification { - + List findByPageIdentityLanguage(String language); + List findPageByStatus(Page.PageStatus status); + } diff --git a/domain/src/main/java/org/fao/geonet/repository/specification/UserGroupSpecs.java b/domain/src/main/java/org/fao/geonet/repository/specification/UserGroupSpecs.java index 5d6c5d265687..b07e3304dd43 100644 --- a/domain/src/main/java/org/fao/geonet/repository/specification/UserGroupSpecs.java +++ b/domain/src/main/java/org/fao/geonet/repository/specification/UserGroupSpecs.java @@ -38,6 +38,7 @@ import javax.persistence.criteria.Root; import java.util.HashSet; import java.util.List; +import java.util.Set; public final class UserGroupSpecs { @@ -84,6 +85,12 @@ public Predicate toPredicate(Root root, CriteriaQuery query, Crite }; } + /** + * Specification for retrieving all the userGroups with a given profile. + * + * @param profile The {@link Profile} to filter the userGroups. + * @return the query. + */ public static Specification hasProfile(final Profile profile) { return new Specification() { @Override @@ -95,6 +102,23 @@ public Predicate toPredicate(Root root, CriteriaQuery query, Crite }; } + /** + * Specification for retrieving all the userGroups with a {@link Profile} in a given set of profiles. + * + * @param profiles The {@link Set} of {@link Profile} to filter the userGroups. + * @return the query. + */ + public static Specification hasProfileIn(final Set profiles) { + return new Specification() { + @Override + public Predicate toPredicate(Root root, CriteriaQuery query, CriteriaBuilder cb) { + Path profileIdAttributePath = root.get(UserGroup_.id).get(UserGroupId_.profile); + Predicate profileIdInPredicate = profileIdAttributePath.in(profiles); + return profileIdInPredicate; + } + }; + } + /** * Specification for retrieving all the userGroups of a given user with a given profile. * diff --git a/domain/src/main/resources/config-spring-geonetwork.xml b/domain/src/main/resources/config-spring-geonetwork.xml index 23f50b235b0a..f9f171e0802c 100644 --- a/domain/src/main/resources/config-spring-geonetwork.xml +++ b/domain/src/main/resources/config-spring-geonetwork.xml @@ -33,7 +33,8 @@ + transaction-manager-ref="transactionManager" + factory-class="org.springframework.data.envers.repository.support.EnversRevisionRepositoryFactoryBean"/> @@ -69,4 +70,9 @@ class="org.springframework.orm.jpa.JpaTransactionManager"> + + + + + diff --git a/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java new file mode 100644 index 000000000000..bc8daaf4bb61 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/DoiServerRepositoryTest.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.domain.Group; +import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; +import org.jasypt.hibernate5.encryptor.HibernatePBEEncryptorRegistry; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.Assert.assertEquals; + +public class DoiServerRepositoryTest extends AbstractSpringDataTest { + + @Autowired + private DoiServerRepository doiServerRepository; + + @Autowired + private GroupRepository groupRepository; + + @PersistenceContext + EntityManager entityManager; + + @BeforeClass + public static void init() { + StandardPBEStringEncryptor strongEncryptor = new StandardPBEStringEncryptor(); + strongEncryptor.setPassword("testpassword"); + + HibernatePBEEncryptorRegistry registry = + HibernatePBEEncryptorRegistry.getInstance(); + registry.registerPBEStringEncryptor("STRING_ENCRYPTOR", strongEncryptor); + } + + public static DoiServer newDoiServer(AtomicInteger nextId) { + int id = nextId.incrementAndGet(); + return new DoiServer() + .setName("Name " + id) + .setDescription("Desc " + id) + .setUrl("http://server" + id) + .setUsername("username" + id) + .setPassword("password" + id) + .setLandingPageTemplate("http://landingpage" + id) + .setPublicUrl("http://publicurl" + id) + .setPattern("pattern" + id) + .setPrefix("prefix" + id); + } + + @Test + public void test_Save_Count_FindOnly_DeleteAll() throws Exception { + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServerRepository.deleteAll(); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertEquals(0, doiServerRepository.count()); + } + + @Test + public void testUpdate() throws Exception { + Group group1 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + Group group2 = groupRepository.save(GroupRepositoryTest.newGroup(_inc)); + + assertEquals(0, doiServerRepository.count()); + DoiServer doiServer = newDoiServer(); + doiServer.getPublicationGroups().add(group1); + + DoiServer savedDoiServer = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + doiServer.setId(savedDoiServer.getId()); + + assertEquals(1, doiServerRepository.count()); + Optional retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertEquals(true, retrievedDoiServerByIdOpt.isPresent()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + + doiServer.setName("New Name"); + doiServer.getPublicationGroups().add(group2); + DoiServer savedDoiServer2 = doiServerRepository.save(doiServer); + + doiServerRepository.flush(); + entityManager.flush(); + entityManager.clear(); + + assertSameContents(savedDoiServer, savedDoiServer2); + + assertEquals(1, doiServerRepository.count()); + retrievedDoiServerByIdOpt = doiServerRepository.findOneById(doiServer.getId()); + assertSameContents(doiServer, retrievedDoiServerByIdOpt.get()); + } + + + private DoiServer newDoiServer() { + return newDoiServer(_inc); + } +} diff --git a/domain/src/test/java/org/fao/geonet/repository/GroupRepositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/GroupRepositoryTest.java index a7be16cc8225..f45f0b4c1bcc 100644 --- a/domain/src/test/java/org/fao/geonet/repository/GroupRepositoryTest.java +++ b/domain/src/test/java/org/fao/geonet/repository/GroupRepositoryTest.java @@ -25,6 +25,7 @@ import org.fao.geonet.domain.Group; +import org.fao.geonet.domain.Profile; import org.fao.geonet.domain.ReservedGroup; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; @@ -131,6 +132,20 @@ public void testFindByEmail() throws Exception { assertNull(_repo.findByEmail("some wrong email")); } + @Test + public void testFindByMinimumProfileForPrivilegesNotNull() throws Exception { + Group savedGroup = _repo.save(newGroup().setMinimumProfileForPrivileges(Profile.Reviewer)); + Group savedGroup2 = _repo.save(newGroup()); + + _repo.flush(); + _entityManager.flush(); + _entityManager.clear(); + + List groups = _repo.findByMinimumProfileForPrivilegesNotNull(); + assertEquals(1, groups.size()); + assertSameContents(savedGroup, groups.get(0)); + } + @Test public void testFindReservedGroup() throws Exception { Group savedGroup = _repo.save(ReservedGroup.all.getGroupEntityTemplate()); diff --git a/domain/src/test/java/org/fao/geonet/repository/UserRepositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/UserRepositoryTest.java index a6e1ebb6dcae..66528b2e4ca0 100644 --- a/domain/src/test/java/org/fao/geonet/repository/UserRepositoryTest.java +++ b/domain/src/test/java/org/fao/geonet/repository/UserRepositoryTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -29,35 +29,34 @@ import org.fao.geonet.domain.*; import org.fao.geonet.repository.specification.UserGroupSpecs; import org.hamcrest.CoreMatchers; +import org.hamcrest.MatcherAssert; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.Sort; import org.springframework.data.jpa.domain.Specification; + import javax.annotation.Nullable; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; -import static junit.framework.Assert.assertNull; import static org.junit.Assert.*; public class UserRepositoryTest extends AbstractSpringDataTest { @Autowired - UserGroupRepository _userGroupRepository; + UserGroupRepository userGroupRepository; @Autowired - MetadataRepository _metadataRepo; + MetadataRepository metadataRepo; @Autowired - GroupRepository _groupRepo; + GroupRepository groupRepo; @Autowired - UserRepository _userRepo; + UserRepository userRepo; @PersistenceContext - private EntityManager _entityManager; + private EntityManager entityManager; public static User newUser(AtomicInteger inc) { String val = String.format("%04d", inc.incrementAndGet()); @@ -73,15 +72,15 @@ public void testNodeIdIsSetOnLoad() { assertNull(user.getSecurity().getNodeId()); - _userRepo.save(user); + userRepo.save(user); // save sets the nodeId assertNodeId(user); // loading should also set nodeid - assertNodeId(_userRepo.findAll().get(0)); - assertNodeId(_userRepo.findById(user.getId()).get()); - assertNodeId(_userRepo.findOneByUsername(user.getUsername())); - assertNodeId(_userRepo.findOneByEmail(user.getEmail())); - assertNodeId(_userRepo.findAllByProfile(user.getProfile()).get(0)); + assertNodeId(userRepo.findAll().get(0)); + assertNodeId(userRepo.findById(user.getId()).get()); + assertNodeId(userRepo.findOneByUsername(user.getUsername())); + assertNodeId(userRepo.findOneByEmail(user.getEmail())); + assertNodeId(userRepo.findAllByProfile(user.getProfile()).get(0)); } @@ -89,8 +88,8 @@ private void assertNodeId(User loaded4) { String testNodeId = "testNodeId"; assertEquals(testNodeId, loaded4.getSecurity().getNodeId()); loaded4.getSecurity().setNodeId(null); - _entityManager.flush(); - _entityManager.clear(); + entityManager.flush(); + entityManager.clear(); } @Test @@ -100,28 +99,33 @@ public void testFindByEmailAddress() { String add1b = "add1b"; user1.getEmailAddresses().add(add1); user1.getEmailAddresses().add(add1b); - user1 = _userRepo.save(user1); + user1 = userRepo.save(user1); User user2 = newUser(); String add2 = "add2"; String add2b = "add2b"; user2.getEmailAddresses().add(add2); user2.getEmailAddresses().add(add2b); - user2 = _userRepo.save(user2); + user2 = userRepo.save(user2); - User foundUser = _userRepo.findOneByEmail(add1); + User foundUser = userRepo.findOneByEmail(add1); assertNotNull(foundUser); assertEquals(user1.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByEmail(add1b); + foundUser = userRepo.findOneByEmail(add1b); assertNotNull(foundUser); assertEquals(user1.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByEmail(add2b); + foundUser = userRepo.findOneByEmail(add2b); assertNotNull(foundUser); assertEquals(user2.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByEmail("xjkjk"); + // Test case-insensitive + foundUser = userRepo.findOneByEmail(add2b.toUpperCase()); + assertNotNull(foundUser); + assertEquals(user2.getId(), foundUser.getId()); + + foundUser = userRepo.findOneByEmail("xjkjk"); assertNull(foundUser); } @@ -129,73 +133,114 @@ public void testFindByEmailAddress() { public void testFindByUsernameAndAuthTypeIsNullOrEmpty() { User user1 = newUser(); user1.getSecurity().setAuthType(""); - user1 = _userRepo.save(user1); + user1 = userRepo.save(user1); User user2 = newUser(); user2.getSecurity().setAuthType(null); - user2 = _userRepo.save(user2); + user2 = userRepo.save(user2); User user3 = newUser(); user3.getSecurity().setAuthType("nonull"); - _userRepo.save(user3); + userRepo.save(user3); - User foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user1.getUsername()); + User foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user1.getUsername()); assertNotNull(foundUser); assertEquals(user1.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user2.getUsername()); + foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user2.getUsername()); assertNotNull(foundUser); assertEquals(user2.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user3.getUsername()); + foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user3.getUsername()); assertNull(foundUser); - foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty("blarg"); + // Test case-insensitive + foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user3.getUsername().toUpperCase()); + assertNull(foundUser); + + foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty("blarg"); + assertNull(foundUser); + } + + + @Test + public void testFindOneByEmailAndSecurityAuthTypeIsNullOrEmpty() { + User user1 = newUser(); + user1.getSecurity().setAuthType(""); + user1.getEmailAddresses().add("user1@geonetwork.com"); + user1 = userRepo.save(user1); + + User user2 = newUser(); + user2.getSecurity().setAuthType(null); + user2.getEmailAddresses().add("user2@geonetwork.com"); + user2 = userRepo.save(user2); + + User user3 = newUser(); + user3.getSecurity().setAuthType("nonull"); + user3.getEmailAddresses().add("user3@geonetwork.com"); + userRepo.save(user3); + + User foundUser = userRepo.findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(user1.getEmail()); + assertNotNull(foundUser); + assertEquals(user1.getId(), foundUser.getId()); + + foundUser = userRepo.findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(user2.getEmail()); + assertNotNull(foundUser); + assertEquals(user2.getId(), foundUser.getId()); + + foundUser = userRepo.findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(user3.getEmail()); + assertNull(foundUser); + + // Test case-insensitive + foundUser = userRepo.findOneByEmailAndSecurityAuthTypeIsNullOrEmpty(user3.getEmail().toUpperCase()); + assertNull(foundUser); + + foundUser = userRepo.findOneByEmailAndSecurityAuthTypeIsNullOrEmpty("blarg"); assertNull(foundUser); } @Test public void testFindByUsername() { User user1 = newUser(); - user1 = _userRepo.save(user1); + user1 = userRepo.save(user1); - User foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user1.getUsername()); + User foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty(user1.getUsername()); assertNotNull(foundUser); assertEquals(user1.getId(), foundUser.getId()); - foundUser = _userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty("blarg"); + foundUser = userRepo.findOneByUsernameAndSecurityAuthTypeIsNullOrEmpty("blarg"); assertNull(foundUser); } @Test public void testFindAllByGroupOwnerNameAndProfile() { - Group group1 = _groupRepo.save(GroupRepositoryTest.newGroup(_inc)); - Group group2 = _groupRepo.save(GroupRepositoryTest.newGroup(_inc)); + Group group1 = groupRepo.save(GroupRepositoryTest.newGroup(_inc)); + Group group2 = groupRepo.save(GroupRepositoryTest.newGroup(_inc)); - User editUser = _userRepo.save(newUser().setProfile(Profile.Editor)); - User reviewerUser = _userRepo.save(newUser().setProfile(Profile.Reviewer)); - User registeredUser = _userRepo.save(newUser().setProfile(Profile.RegisteredUser)); - _userRepo.save(newUser().setProfile(Profile.Administrator)); + User editUser = userRepo.save(newUser().setProfile(Profile.Editor)); + User reviewerUser = userRepo.save(newUser().setProfile(Profile.Reviewer)); + User registeredUser = userRepo.save(newUser().setProfile(Profile.RegisteredUser)); + userRepo.save(newUser().setProfile(Profile.Administrator)); Metadata md1 = MetadataRepositoryTest.newMetadata(_inc); md1.getSourceInfo().setGroupOwner(group1.getId()); - md1 = _metadataRepo.save(md1); + md1 = metadataRepo.save(md1); Metadata md2 = MetadataRepositoryTest.newMetadata(_inc); md2.getSourceInfo().setGroupOwner(group1.getId()); - md2 = _metadataRepo.save(md2); + md2 = metadataRepo.save(md2); Metadata md3 = MetadataRepositoryTest.newMetadata(_inc); md3.getSourceInfo().setGroupOwner(group2.getId()); - _metadataRepo.save(md3); + metadataRepo.save(md3); - _userGroupRepository.save(new UserGroup().setGroup(group1).setUser(editUser).setProfile(Profile.Editor)); - _userGroupRepository.save(new UserGroup().setGroup(group2).setUser(registeredUser).setProfile(Profile.RegisteredUser)); - _userGroupRepository.save(new UserGroup().setGroup(group2).setUser(reviewerUser).setProfile(Profile.Editor)); - _userGroupRepository.save(new UserGroup().setGroup(group1).setUser(reviewerUser).setProfile(Profile.Reviewer)); + userGroupRepository.save(new UserGroup().setGroup(group1).setUser(editUser).setProfile(Profile.Editor)); + userGroupRepository.save(new UserGroup().setGroup(group2).setUser(registeredUser).setProfile(Profile.RegisteredUser)); + userGroupRepository.save(new UserGroup().setGroup(group2).setUser(reviewerUser).setProfile(Profile.Editor)); + userGroupRepository.save(new UserGroup().setGroup(group1).setUser(reviewerUser).setProfile(Profile.Reviewer)); - List> found = _userRepo.findAllByGroupOwnerNameAndProfile(Arrays.asList(md1.getId()), null); - Collections.sort(found, Comparator.comparing(s -> s.two().getName())); + List> found = userRepo.findAllByGroupOwnerNameAndProfile(List.of(md1.getId()), null); + found.sort(Comparator.comparing(s -> s.two().getName())); assertEquals(2, found.size()); assertEquals(md1.getId(), found.get(0).one().intValue()); @@ -203,9 +248,9 @@ public void testFindAllByGroupOwnerNameAndProfile() { assertEquals(editUser, found.get(0).two()); assertEquals(reviewerUser, found.get(1).two()); - found = _userRepo.findAllByGroupOwnerNameAndProfile(Arrays.asList(md1.getId()), null); + found = userRepo.findAllByGroupOwnerNameAndProfile(List.of(md1.getId()), null); // Sort by user name descending - Collections.sort(found, Comparator.comparing(s -> s.two().getName(), Comparator.reverseOrder())); + found.sort(Comparator.comparing(s -> s.two().getName(), Comparator.reverseOrder())); assertEquals(2, found.size()); assertEquals(md1.getId(), found.get(0).one().intValue()); @@ -214,13 +259,13 @@ public void testFindAllByGroupOwnerNameAndProfile() { assertEquals(reviewerUser, found.get(0).two()); - found = _userRepo.findAllByGroupOwnerNameAndProfile(Arrays.asList(md1.getId(), md2.getId()), null); + found = userRepo.findAllByGroupOwnerNameAndProfile(Arrays.asList(md1.getId(), md2.getId()), null); assertEquals(4, found.size()); int md1Found = 0; int md2Found = 0; - for (Pair record : found) { - if (record.one() == md1.getId()) { + for (Pair info : found) { + if (info.one() == md1.getId()) { md1Found++; } else { md2Found++; @@ -232,21 +277,21 @@ public void testFindAllByGroupOwnerNameAndProfile() { @Test public void testFindAllUsersInUserGroups() { - Group group1 = _groupRepo.save(GroupRepositoryTest.newGroup(_inc)); - Group group2 = _groupRepo.save(GroupRepositoryTest.newGroup(_inc)); + Group group1 = groupRepo.save(GroupRepositoryTest.newGroup(_inc)); + Group group2 = groupRepo.save(GroupRepositoryTest.newGroup(_inc)); - User editUser = _userRepo.save(newUser().setProfile(Profile.Editor)); - User reviewerUser = _userRepo.save(newUser().setProfile(Profile.Reviewer)); - User registeredUser = _userRepo.save(newUser().setProfile(Profile.RegisteredUser)); - _userRepo.save(newUser().setProfile(Profile.Administrator)); + User editUser = userRepo.save(newUser().setProfile(Profile.Editor)); + User reviewerUser = userRepo.save(newUser().setProfile(Profile.Reviewer)); + User registeredUser = userRepo.save(newUser().setProfile(Profile.RegisteredUser)); + userRepo.save(newUser().setProfile(Profile.Administrator)); - _userGroupRepository.save(new UserGroup().setGroup(group1).setUser(editUser).setProfile(Profile.Editor)); - _userGroupRepository.save(new UserGroup().setGroup(group2).setUser(registeredUser).setProfile(Profile.RegisteredUser)); - _userGroupRepository.save(new UserGroup().setGroup(group2).setUser(reviewerUser).setProfile(Profile.Editor)); - _userGroupRepository.save(new UserGroup().setGroup(group1).setUser(reviewerUser).setProfile(Profile.Reviewer)); + userGroupRepository.save(new UserGroup().setGroup(group1).setUser(editUser).setProfile(Profile.Editor)); + userGroupRepository.save(new UserGroup().setGroup(group2).setUser(registeredUser).setProfile(Profile.RegisteredUser)); + userGroupRepository.save(new UserGroup().setGroup(group2).setUser(reviewerUser).setProfile(Profile.Editor)); + userGroupRepository.save(new UserGroup().setGroup(group1).setUser(reviewerUser).setProfile(Profile.Reviewer)); - List found = Lists.transform(_userRepo.findAllUsersInUserGroups(UserGroupSpecs.hasGroupId(group1.getId())), - new Function() { + List found = Lists.transform(userRepo.findAllUsersInUserGroups(UserGroupSpecs.hasGroupId(group1.getId())), + new Function<>() { @Nullable @Override @@ -259,7 +304,7 @@ public Integer apply(@Nullable User input) { assertTrue(found.contains(editUser.getId())); assertTrue(found.contains(reviewerUser.getId())); - found = Lists.transform(_userRepo.findAllUsersInUserGroups(Specification.not(UserGroupSpecs.hasProfile(Profile.RegisteredUser) + found = Lists.transform(userRepo.findAllUsersInUserGroups(Specification.not(UserGroupSpecs.hasProfile(Profile.RegisteredUser) )), new Function() { @Nullable @@ -278,21 +323,20 @@ public Integer apply(@Nullable User input) { @Test public void testFindAllUsersThatOwnMetadata() { - - User editUser = _userRepo.save(newUser().setProfile(Profile.Editor)); - User reviewerUser = _userRepo.save(newUser().setProfile(Profile.Reviewer)); - _userRepo.save(newUser().setProfile(Profile.RegisteredUser)); - _userRepo.save(newUser().setProfile(Profile.Administrator)); + User editUser = userRepo.save(newUser().setProfile(Profile.Editor)); + User reviewerUser = userRepo.save(newUser().setProfile(Profile.Reviewer)); + userRepo.save(newUser().setProfile(Profile.RegisteredUser)); + userRepo.save(newUser().setProfile(Profile.Administrator)); Metadata md1 = MetadataRepositoryTest.newMetadata(_inc); md1.getSourceInfo().setOwner(editUser.getId()); - _metadataRepo.save(md1); + metadataRepo.save(md1); Metadata md2 = MetadataRepositoryTest.newMetadata(_inc); md2.getSourceInfo().setOwner(reviewerUser.getId()); - _metadataRepo.save(md2); + metadataRepo.save(md2); - List found = _userRepo.findAllUsersThatOwnMetadata(); + List found = userRepo.findAllUsersThatOwnMetadata(); assertEquals(2, found.size()); boolean editUserFound = false; @@ -318,20 +362,18 @@ public void testFindDuplicatedUsernamesCaseInsensitive() { User userNonDuplicated1 = newUser(); usernameDuplicated1.setUsername("userNamE1"); usernameDuplicated2.setUsername("usERNAME1"); - _userRepo.save(usernameDuplicated1); - _userRepo.save(usernameDuplicated2); - _userRepo.save(userNonDuplicated1); + userRepo.save(usernameDuplicated1); + userRepo.save(usernameDuplicated2); + userRepo.save(userNonDuplicated1); - List duplicatedUsernames = _userRepo.findDuplicatedUsernamesCaseInsensitive(); - assertThat("Duplicated usernames don't match the expected ones", + List duplicatedUsernames = userRepo.findDuplicatedUsernamesCaseInsensitive(); + MatcherAssert.assertThat("Duplicated usernames don't match the expected ones", duplicatedUsernames, CoreMatchers.is(Lists.newArrayList("username1"))); assertEquals(1, duplicatedUsernames.size()); } private User newUser() { - User user = newUser(_inc); - return user; + return newUser(_inc); } - } diff --git a/domain/src/test/java/org/fao/geonet/repository/specification/UserGroupSpecsTest.java b/domain/src/test/java/org/fao/geonet/repository/specification/UserGroupSpecsTest.java index 3a2af23e22ca..ba6e2ebcd6da 100644 --- a/domain/src/test/java/org/fao/geonet/repository/specification/UserGroupSpecsTest.java +++ b/domain/src/test/java/org/fao/geonet/repository/specification/UserGroupSpecsTest.java @@ -23,6 +23,7 @@ package org.fao.geonet.repository.specification; +import org.fao.geonet.domain.Profile; import org.fao.geonet.domain.ReservedGroup; import org.fao.geonet.domain.UserGroup; import org.fao.geonet.repository.*; @@ -34,6 +35,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.fao.geonet.repository.specification.UserGroupSpecs.*; @@ -89,6 +91,24 @@ public void testHasProfile() throws Exception { } } + @Test + public void testHasProfileIn() throws Exception { + UserGroup ug1 = newUserGroup(); + ug1.setProfile(Profile.Reviewer); + _repo.save(ug1); + _repo.save(newUserGroup()); + _repo.save(newUserGroup()); + _repo.save(newUserGroup()); + + Set profiles = Set.of(Profile.Editor, Profile.Reviewer); + + List found = _repo.findAll(hasProfileIn(profiles)); + + for (UserGroup userGroup : found) { + assertTrue(profiles.contains(userGroup.getProfile())); + } + } + @Test public void testIsReservedGroup() throws Exception { UserGroup ug1 = _repo.save(newUserGroup()); diff --git a/es/README.md b/es/README.md index c4b8cb15ac2b..d46d0574d06c 100644 --- a/es/README.md +++ b/es/README.md @@ -11,7 +11,7 @@ These configurations should not be used for a production deployment. 1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): ``` - docker pull docker.elastic.co/elasticsearch/elasticsearch:8.14.0 + docker pull docker.elastic.co/elasticsearch/elasticsearch:8.14.3 ``` 2. Use docker run, leaving 9200 available: @@ -21,7 +21,7 @@ These configurations should not be used for a production deployment. -e "discovery.type=single-node" \ -e "xpack.security.enabled=false" \ -e "xpack.security.enrollment.enabled=false" \ - docker.elastic.co/elasticsearch/elasticsearch:8.14.0 + docker.elastic.co/elasticsearch/elasticsearch:8.14.3 ``` 3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser @@ -61,8 +61,8 @@ Maven installation ensure you always are using the ``es.version`` version specif ## Manual installation -1. Download Elasticsearch 8.14.0 from https://www.elastic.co/downloads/elasticsearch -and copy to the ES module, e.g., ``es/elasticsearch-8.14.0` +1. Download Elasticsearch 8.14.3 from https://www.elastic.co/downloads/elasticsearch +and copy to the ES module, e.g., ``es/elasticsearch-8.14.3` 2. Disable the security @@ -127,7 +127,7 @@ Don't hesitate to propose a Pull Request with the new language. 1. Configure ES to start on server startup. It is recommended to protect `gn-records` index from the Internet access. - * Note that for debian-based servers the current deb download (8.14.0) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html + * Note that for debian-based servers the current deb download (8.14.3) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html # Troubleshoot diff --git a/es/docker-compose.yml b/es/docker-compose.yml index 6d30f675bb18..994c6089a01a 100644 --- a/es/docker-compose.yml +++ b/es/docker-compose.yml @@ -2,7 +2,7 @@ version: '3' services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 container_name: elasticsearch8 environment: - cluster.name=docker-cluster @@ -20,7 +20,7 @@ services: ports: - "9200:9200" kibana: - image: docker.elastic.co/kibana/kibana:8.14.0 + image: docker.elastic.co/kibana/kibana:8.14.3 container_name: kibana8 ports: - "5601:5601" diff --git a/es/es-dashboards/README.md b/es/es-dashboards/README.md index 9a2b75274878..e5e87790e31e 100644 --- a/es/es-dashboards/README.md +++ b/es/es-dashboards/README.md @@ -39,7 +39,7 @@ ## Manual installation -1. Download Kibana 8.14.0 from https://www.elastic.co/downloads/kibana +1. Download Kibana 8.14.3 from https://www.elastic.co/downloads/kibana 2. Set Kibana base path and index name in config/kibana.yml: @@ -81,7 +81,7 @@ Visit Kibana in a browser using one of the above links and go to 'Saved Objects' ### Production Use -Kibana can be installed from the debian files, and Kibana 8.14.0 is confirmed as working with Geonetwork 4.4.x. +Kibana can be installed from the debian files, and Kibana 8.14.3 is confirmed as working with Geonetwork 4.4.x. Set Kibana to start when the server starts up, using the instructions at https://www.elastic.co/guide/en/kibana/current/start-stop.html diff --git a/es/es-dashboards/pom.xml b/es/es-dashboards/pom.xml index 81ab69fcab16..19ccbe6aefee 100644 --- a/es/es-dashboards/pom.xml +++ b/es/es-dashboards/pom.xml @@ -27,7 +27,7 @@ gn-es org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/es/pom.xml b/es/pom.xml index 77d91442f846..4397414592bc 100644 --- a/es/pom.xml +++ b/es/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-es diff --git a/estest/pom.xml b/estest/pom.xml index e1285a801bd9..2c447fa74dd9 100644 --- a/estest/pom.xml +++ b/estest/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/events/pom.xml b/events/pom.xml index 332f074acbb2..f1a335196a48 100644 --- a/events/pom.xml +++ b/events/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork Events diff --git a/harvesters/pom.xml b/harvesters/pom.xml index ad15ba83add7..7a69abe98fab 100644 --- a/harvesters/pom.xml +++ b/harvesters/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java index cf30c71312cb..ce411b33256c 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/HarvesterUtil.java @@ -23,18 +23,19 @@ package org.fao.geonet.kernel.harvest.harvester; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; +import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.Pair; +import org.fao.geonet.kernel.GeonetworkDataDirectory; import org.fao.geonet.kernel.schema.MetadataSchema; import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.slf4j.LoggerFactory; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.Map; - /** * Created by francois on 3/7/14. */ @@ -74,8 +75,7 @@ public static Element processMetadata(MetadataSchema metadataSchema, Element md, String processName, Map processParams) { - - Path filePath = metadataSchema.getSchemaDir().resolve("process").resolve(processName + ".xsl"); + Path filePath = ApplicationContextHolder.get().getBean(GeonetworkDataDirectory.class).getXsltConversion(processName); if (!Files.exists(filePath)) { LOGGER.info(" processing instruction not found for {} schema. metadata not filtered.", metadataSchema.getName()); } else { diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java index 8ba9e1e31af3..5097d9a600cb 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -232,7 +232,7 @@ private void insertOrUpdate(Collection records, Collection records, List err params.useChangeDateForUpdate(), localUuids.getChangeDate(ri.uuid), true); log.info("Overriding record with uuid " + ri.uuid); - result.updatedMetadata++; if (params.isIfRecordExistAppendPrivileges()) { addPrivileges(id, params.getPrivileges(), localGroups, context); diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java index 2cd1100dc6de..254fac91f840 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/Harvester.java @@ -105,6 +105,7 @@ public HarvestResult harvest(Logger log) throws Exception { String[] urlList = params.url.split("\n"); boolean error = false; Aligner aligner = new Aligner(cancelMonitor, context, params, log); + Set listOfUuids = new HashSet<>(); for (String url : urlList) { log.debug("Loading URL: " + url); @@ -151,7 +152,6 @@ public HarvestResult harvest(Logger log) throws Exception { params.numberOfRecordPath, e.getMessage())); } } - Map allUuids = new HashMap<>(); try { List listOfUrlForPages = buildListOfUrl(params, numberOfRecordsToHarvest); for (int i = 0; i < listOfUrlForPages.size(); i++) { @@ -166,7 +166,6 @@ public HarvestResult harvest(Logger log) throws Exception { if (StringUtils.isNotEmpty(params.loopElement) || type == SimpleUrlResourceType.RDFXML) { Map uuids = new HashMap<>(); - try { if (type == SimpleUrlResourceType.XML) { collectRecordsFromXml(xmlObj, uuids, aligner); @@ -176,7 +175,7 @@ public HarvestResult harvest(Logger log) throws Exception { collectRecordsFromJson(jsonObj, uuids, aligner); } aligner.align(uuids, errors); - allUuids.putAll(uuids); + listOfUuids.addAll(uuids.keySet()); } catch (Exception e) { errors.add(new HarvestError(this.context, e)); log.error(String.format("Failed to collect record in response at path %s. Error is: %s", @@ -184,7 +183,6 @@ public HarvestResult harvest(Logger log) throws Exception { } } } - aligner.cleanupRemovedRecords(allUuids.keySet()); } catch (Exception t) { error = true; log.error("Unknown error trying to harvest"); @@ -198,11 +196,12 @@ public HarvestResult harvest(Logger log) throws Exception { errors.add(new HarvestError(context, t)); } - log.info("Total records processed in all searches :" + allUuids.size()); + log.info("Total records processed in all searches :" + listOfUuids.size()); if (error) { log.warning("Due to previous errors the align process has not been called"); } } + aligner.cleanupRemovedRecords(listOfUuids); return aligner.getResult(); } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/RDFUtils.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/RDFUtils.java index 9995652e4d1b..6694bf047f8f 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/RDFUtils.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/simpleurl/RDFUtils.java @@ -7,6 +7,7 @@ import org.apache.jena.riot.Lang; import org.apache.jena.riot.RDFDataMgr; import org.fao.geonet.Constants; +import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.ISODate; import org.fao.geonet.domain.Pair; import org.fao.geonet.utils.Log; @@ -81,6 +82,9 @@ public static HashMap getAllUuids(String feedUrl) throws Except public static HashMap getAllUuids(Element feed) throws Exception { Element rdfDocument = checkForMissingRdfAbout(feed); + // if xsi:schemaLocation is present on the rdf:RDF element Jena fails to parse the Element. + rdfDocument.removeAttribute("schemaLocation", Geonet.Namespaces.XSI); + Model model = ModelFactory.createMemModelMaker().createDefaultModel(); RDFDataMgr.read(model, IOUtils.toInputStream(Xml.getString(rdfDocument), StandardCharsets.UTF_8), diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java index 81dad939cad1..cf8717e52133 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -22,32 +22,21 @@ //============================================================================== package org.fao.geonet.kernel.harvest.harvester.webdav; -import java.util.LinkedList; -import java.util.List; -import java.util.UUID; +import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.lang.StringUtils; import org.fao.geonet.GeonetContext; import org.fao.geonet.Logger; import org.fao.geonet.constants.Geonet; -import org.fao.geonet.domain.AbstractMetadata; -import org.fao.geonet.domain.ISODate; -import org.fao.geonet.domain.Metadata; -import org.fao.geonet.domain.MetadataType; +import org.fao.geonet.domain.*; import org.fao.geonet.exceptions.NoSchemaMatchesException; import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.UpdateDatestamp; import org.fao.geonet.kernel.datamanager.IMetadataManager; import org.fao.geonet.kernel.harvest.BaseAligner; -import org.fao.geonet.kernel.harvest.harvester.CategoryMapper; -import org.fao.geonet.kernel.harvest.harvester.GroupMapper; -import org.fao.geonet.kernel.harvest.harvester.HarvestError; -import org.fao.geonet.kernel.harvest.harvester.HarvestResult; -import org.fao.geonet.kernel.harvest.harvester.IHarvester; -import org.fao.geonet.kernel.harvest.harvester.RecordInfo; -import org.fao.geonet.kernel.harvest.harvester.UriMapper; +import org.fao.geonet.kernel.harvest.harvester.*; import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.repository.MetadataRepository; import org.fao.geonet.repository.OperationAllowedRepository; @@ -94,7 +83,9 @@ class Harvester extends BaseAligner implements IHarvester errors = new LinkedList(); + private List errors = new LinkedList<>(); + private String processName; + private Map processParams = new HashMap<>(); public Harvester(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, WebDavParams params) { super(cancelMonitor); @@ -154,6 +145,10 @@ private void align(final List files) throws Exception { localGroups = new GroupMapper(context); localUris = new UriMapper(context, params.getUuid()); + Pair> filter = HarvesterUtil.parseXSLFilter(params.xslfilter); + processName = filter.one(); + processParams = filter.two(); + //----------------------------------------------------------------------- //--- remove old metadata for (final String uri : localUris.getUris()) { @@ -259,6 +254,7 @@ private void addMetadata(RemoteFile rf) throws Exception { case SKIP: log.info("Skipping record with uuid " + uuid); result.uuidSkipped++; + return; default: return; } @@ -292,6 +288,13 @@ private void addMetadata(RemoteFile rf) throws Exception { md = translateMetadataContent(context, md, schema); } + if (StringUtils.isNotEmpty(params.xslfilter)) { + md = HarvesterUtil.processMetadata(dataMan.getSchema(schema), + md, processName, processParams); + + schema = dataMan.autodetectSchema(md); + } + // // insert metadata // @@ -310,6 +313,11 @@ private void addMetadata(RemoteFile rf) throws Exception { date = rf.getChangeDate(); } } + + if (date == null) { + date = new ISODate(); + } + AbstractMetadata metadata = new Metadata(); metadata.setUuid(uuid); metadata.getDataInfo(). @@ -385,11 +393,11 @@ private Element retrieveMetadata(RemoteFile rf) { * harvester are applied. Also, it changes the ownership of the record so it is assigned to the * new harvester that last updated it. * @param rf - * @param record + * @param recordInfo * @param force * @throws Exception */ - private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) throws Exception { + private void updateMetadata(RemoteFile rf, RecordInfo recordInfo, boolean force) throws Exception { Element md = null; // Get the change date from the metadata content. If not possible, get it from the file change date if available @@ -411,8 +419,8 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //Update only if different String uuid = dataMan.extractUUID(schema, md); - if (!record.uuid.equals(uuid)) { - md = dataMan.setUUID(schema, record.uuid, md); + if (!recordInfo.uuid.equals(uuid)) { + md = dataMan.setUUID(schema, recordInfo.uuid, md); } } catch (Exception e) { log.error(" - Failed to set uuid for metadata with remote path : " + rf.getPath()); @@ -424,7 +432,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr date = dataMan.extractDateModified(schema, md); } catch (Exception ex) { log.error("WebDavHarvester - updateMetadata - Can't get metadata modified date for metadata id= " - + record.id + ", using current date for modified date"); + + recordInfo.id + ", using current date for modified date"); // WAF harvester, rf.getChangeDate() returns null if (rf.getChangeDate() != null) { date = rf.getChangeDate().getDateAndTime(); @@ -434,7 +442,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr } - if (!force && !rf.isMoreRecentThan(record.changeDate)) { + if (!force && !rf.isMoreRecentThan(recordInfo.changeDate)) { if (log.isDebugEnabled()) log.debug(" - Metadata XML not changed for path : " + rf.getPath()); result.unchangedMetadata++; @@ -454,8 +462,8 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //Update only if different String uuid = dataMan.extractUUID(schema, md); - if (!record.uuid.equals(uuid)) { - md = dataMan.setUUID(schema, record.uuid, md); + if (!recordInfo.uuid.equals(uuid)) { + md = dataMan.setUUID(schema, recordInfo.uuid, md); } } catch (Exception e) { log.error(" - Failed to set uuid for metadata with remote path : " + rf.getPath()); @@ -467,7 +475,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr date = dataMan.extractDateModified(schema, md); } catch (Exception ex) { log.error("WebDavHarvester - updateMetadata - Can't get metadata modified date for metadata id= " - + record.id + ", using current date for modified date"); + + recordInfo.id + ", using current date for modified date"); // WAF harvester, rf.getChangeDate() returns null if (rf.getChangeDate() != null) { date = rf.getChangeDate().getDateAndTime(); @@ -475,12 +483,16 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr } } - // Translate metadata if (params.isTranslateContent()) { md = translateMetadataContent(context, md, schema); } + if (StringUtils.isNotEmpty(params.xslfilter)) { + md = HarvesterUtil.processMetadata(dataMan.getSchema(schema), + md, processName, processParams); + } + // // update metadata // @@ -488,7 +500,7 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr boolean ufo = false; String language = context.getLanguage(); - final AbstractMetadata metadata = metadataManager.updateMetadata(context, record.id, md, validate, ufo, language, + final AbstractMetadata metadata = metadataManager.updateMetadata(context, recordInfo.id, md, validate, ufo, language, date, false, IndexingMode.none); if(force) { @@ -502,15 +514,15 @@ private void updateMetadata(RemoteFile rf, RecordInfo record, Boolean force) thr //--- the administrator could change privileges and categories using the //--- web interface so we have to re-set both OperationAllowedRepository repository = context.getBean(OperationAllowedRepository.class); - repository.deleteAllByMetadataId(Integer.parseInt(record.id)); - addPrivileges(record.id, params.getPrivileges(), localGroups, context); + repository.deleteAllByMetadataId(Integer.parseInt(recordInfo.id)); + addPrivileges(recordInfo.id, params.getPrivileges(), localGroups, context); metadata.getCategories().clear(); addCategories(metadata, params.getCategories(), localCateg, context, null, true); dataMan.flush(); - dataMan.indexMetadata(record.id, true); + dataMan.indexMetadata(recordInfo.id, true); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java index e6cc3af1a9d8..e745a5b33115 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavHarvester.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -28,40 +28,23 @@ import java.sql.SQLException; -//============================================================================= - public class WebDavHarvester extends AbstractHarvester { - //--------------------------------------------------------------------------- - //--- - //--- Add - //--- - //--------------------------------------------------------------------------- - @Override protected WebDavParams createParams() { return new WebDavParams(dataMan); } //--------------------------------------------------------------------------- + @Override protected void storeNodeExtra(WebDavParams params, String path, String siteId, String optionsId) throws SQLException { harvesterSettingsManager.add("id:" + siteId, "url", params.url); harvesterSettingsManager.add("id:" + siteId, "icon", params.icon); harvesterSettingsManager.add("id:" + optionsId, "validate", params.getValidate()); harvesterSettingsManager.add("id:" + optionsId, "recurse", params.recurse); harvesterSettingsManager.add("id:" + optionsId, "subtype", params.subtype); + harvesterSettingsManager.add("id:" + siteId, "xslfilter", params.xslfilter); } - //--------------------------------------------------------------------------- - //--- - //--- Variables - //--- - //--------------------------------------------------------------------------- - - //--------------------------------------------------------------------------- - //--- - //--- Harvest - //--- - //--------------------------------------------------------------------------- public void doHarvest(Logger log) throws Exception { log.info("WebDav doHarvest start"); Harvester h = new Harvester(cancelMonitor, log, context, params); diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java index d264bb908fb3..c32bfd40cda7 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/webdav/WebDavParams.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -29,61 +29,44 @@ import org.fao.geonet.kernel.harvest.harvester.AbstractParams; import org.jdom.Element; -//============================================================================= - public class WebDavParams extends AbstractParams { - //-------------------------------------------------------------------------- - //--- - //--- Constructor - //--- - //-------------------------------------------------------------------------- + /** * url of webdav folder to harvest */ public String url; - //--------------------------------------------------------------------------- - //--- - //--- Create : called when a new entry must be added. Reads values from the - //--- provided entry, providing default values - //--- - //--------------------------------------------------------------------------- /** * Icon to use for harvester */ public String icon; - //--------------------------------------------------------------------------- - //--- - //--- Update : called when an entry has changed and variables must be updated - //--- - //--------------------------------------------------------------------------- /** * If true recurse into directories. */ public boolean recurse; - //--------------------------------------------------------------------------- - //--- - //--- Other API methods - //--- - //--------------------------------------------------------------------------- /** * Flag indicating if WAFRetriever or WebDavRetriever should be used. */ public String subtype; - //--------------------------------------------------------------------------- - //--- - //--- Variables - //--- - //--------------------------------------------------------------------------- + /** + * The filter is a process (see schema/process folder) which depends on the schema. It could be + * composed of parameter which will be sent to XSL transformation using the following syntax : + *
+     * anonymizer?protocol=MYLOCALNETWORK:FILEPATH&email=gis@organisation.org&thesaurus=MYORGONLYTHEASURUS
+     * 
+ */ + public String xslfilter; + public WebDavParams(DataManager dm) { super(dm); } + @Override public void create(Element node) throws BadInputEx { super.create(node); @@ -92,12 +75,14 @@ public void create(Element node) throws BadInputEx { url = Util.getParam(site, "url", ""); icon = Util.getParam(site, "icon", ""); + xslfilter = Util.getParam(site, "xslfilter", ""); recurse = Util.getParam(opt, "recurse", false); subtype = Util.getParam(opt, "subtype", ""); } + @Override public void update(Element node) throws BadInputEx { super.update(node); @@ -106,6 +91,7 @@ public void update(Element node) throws BadInputEx { url = Util.getParam(site, "url", url); icon = Util.getParam(site, "icon", icon); + xslfilter = Util.getParam(site, "xslfilter", ""); recurse = Util.getParam(opt, "recurse", recurse); subtype = Util.getParam(opt, "subtype", subtype); @@ -117,6 +103,7 @@ public WebDavParams copy() { copy.url = url; copy.icon = icon; + copy.xslfilter = xslfilter; copy.setValidate(getValidate()); copy.recurse = recurse; @@ -131,7 +118,3 @@ public String getIcon() { return icon; } } - -//============================================================================= - - diff --git a/healthmonitor/pom.xml b/healthmonitor/pom.xml index adcc4d6f283e..6f71092a00fb 100644 --- a/healthmonitor/pom.xml +++ b/healthmonitor/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/index/pom.xml b/index/pom.xml index 8aa023929ec3..ec7d47af0ea3 100644 --- a/index/pom.xml +++ b/index/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 gn-index diff --git a/inspire-atom/pom.xml b/inspire-atom/pom.xml index effbea670891..7d4bf0cf74fc 100644 --- a/inspire-atom/pom.xml +++ b/inspire-atom/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java index a452d0733d0e..622f8fe4ca3f 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java +++ b/inspire-atom/src/main/java/org/fao/geonet/inspireatom/util/InspireAtomUtil.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -63,7 +63,7 @@ * @author Jose García */ public class InspireAtomUtil { - private final static String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; + private static final String EXTRACT_DATASETS_FROM_SERVICE_XSLT = "extract-datasetinfo-from-service-feed.xsl"; /** * Xslt process to get the related datasets in service metadata. @@ -395,7 +395,15 @@ public static String retrieveDatasetUuidFromIdentifier(EsSearchManager searchMan " \"value\": \"%s\"" + " }" + " }" + + " }," + + " {" + + " \"term\": {" + + " \"isPublishedToAll\": {" + + " \"value\": \"true\"" + + " }" + + " }" + " }" + + " ]" + " }" + "}"; diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java index 97091e008e1f..95871555b1db 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomDescribe.java @@ -24,6 +24,8 @@ import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -107,7 +109,7 @@ public class AtomDescribe { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java index a9133fe38a73..33d0ace61289 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomGetData.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -84,7 +86,7 @@ public class AtomGetData { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a data file related to dataset"), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java index 94eeb33e4ceb..a30dcbb03313 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomHarvester.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -61,7 +63,7 @@ public class AtomHarvester { @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { @ApiResponse(responseCode = "201", description = "Scan completed."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(CREATED) @ResponseBody diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java index 0e27e9c87632..5253d3146ac2 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomSearch.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -114,7 +115,7 @@ public class AtomSearch { ) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Get a list of feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@io.swagger.v3.oas.annotations.media.Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) public Object feeds( diff --git a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java index 87a255411b28..6c7b99ffbc20 100644 --- a/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java +++ b/inspire-atom/src/main/java/org/fao/geonet/services/inspireatom/AtomServiceDescription.java @@ -23,6 +23,8 @@ package org.fao.geonet.services.inspireatom; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -91,7 +93,7 @@ public class AtomServiceDescription { produces = MediaType.APPLICATION_XML_VALUE) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Feeds."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/jmeter/pom.xml b/jmeter/pom.xml index 4d7e0c7f02c3..f1906b9d728a 100644 --- a/jmeter/pom.xml +++ b/jmeter/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/listeners/pom.xml b/listeners/pom.xml index e60deaa21e43..082cf9bad297 100644 --- a/listeners/pom.xml +++ b/listeners/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork Events diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentAddedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentAddedListener.java index e8df7be7bb42..91faa0fc5018 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentAddedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentAddedListener.java @@ -31,7 +31,7 @@ public class AttachmentAddedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.ATTACHMENTADDED; + private final StatusValue.Events eventType = StatusValue.Events.ATTACHMENTADDED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentDeletedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentDeletedListener.java index e94a9680efc8..548e60dbc499 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentDeletedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/AttachmentDeletedListener.java @@ -31,7 +31,7 @@ public class AttachmentDeletedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.ATTACHMENTDELETED; + private final StatusValue.Events eventType = StatusValue.Events.ATTACHMENTDELETED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordCategoryChangeListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordCategoryChangeListener.java index 3412db6657a1..3c74a73c604a 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordCategoryChangeListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordCategoryChangeListener.java @@ -31,7 +31,7 @@ public class RecordCategoryChangeListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDCATEGORYCHANGE; + private final StatusValue.Events eventType = StatusValue.Events.RECORDCATEGORYCHANGE; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordCreatedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordCreatedListener.java index 99909f079a6c..581f695f1cc5 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordCreatedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordCreatedListener.java @@ -31,7 +31,7 @@ public class RecordCreatedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDCREATED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDCREATED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordDeletedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordDeletedListener.java index d3b02d855959..5f3b4d249b34 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordDeletedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordDeletedListener.java @@ -31,7 +31,7 @@ public class RecordDeletedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDDELETED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDDELETED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordGroupOwnerChangeListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordGroupOwnerChangeListener.java index bebd54bfa7c0..4f6d4a5a5414 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordGroupOwnerChangeListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordGroupOwnerChangeListener.java @@ -32,7 +32,7 @@ public class RecordGroupOwnerChangeListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDGROUPOWNERCHANGE; + private final StatusValue.Events eventType = StatusValue.Events.RECORDGROUPOWNERCHANGE; @Override public String getChangeMessage() { @@ -41,7 +41,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordImportedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordImportedListener.java index b3569c018c13..dae5ed3068aa 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordImportedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordImportedListener.java @@ -31,7 +31,7 @@ public class RecordImportedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDIMPORTED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDIMPORTED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordOwnerChangeListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordOwnerChangeListener.java index 434f8b5d5197..9ab4620aec58 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordOwnerChangeListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordOwnerChangeListener.java @@ -31,7 +31,7 @@ public class RecordOwnerChangeListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDOWNERCHANGE; + private final StatusValue.Events eventType = StatusValue.Events.RECORDOWNERCHANGE; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordPrivilegesChangeListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordPrivilegesChangeListener.java index 1662c04f5d6a..9ba5204054c2 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordPrivilegesChangeListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordPrivilegesChangeListener.java @@ -32,7 +32,7 @@ public class RecordPrivilegesChangeListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDPRIVILEGESCHANGE; + private final StatusValue.Events eventType = StatusValue.Events.RECORDPRIVILEGESCHANGE; @Override public String getChangeMessage() { @@ -41,7 +41,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordProcessingChangeListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordProcessingChangeListener.java index 811eb0dc8336..2134a0239d9e 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordProcessingChangeListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordProcessingChangeListener.java @@ -32,7 +32,7 @@ public class RecordProcessingChangeListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDPROCESSINGCHANGE; + private final StatusValue.Events eventType = StatusValue.Events.RECORDPROCESSINGCHANGE; @Override public String getChangeMessage() { @@ -41,7 +41,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordRestoredListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordRestoredListener.java index ad9a94b3c7c5..578ba20bdcba 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordRestoredListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordRestoredListener.java @@ -31,7 +31,7 @@ public class RecordRestoredListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDRESTORED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDRESTORED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordUpdatedListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordUpdatedListener.java index bcda0cf43d8d..958dbf8657cb 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordUpdatedListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordUpdatedListener.java @@ -31,7 +31,7 @@ public class RecordUpdatedListener extends GenericMetadataEventListener implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDUPDATED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDUPDATED; @Override public String getChangeMessage() { @@ -40,7 +40,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/RecordValidationTriggeredListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/RecordValidationTriggeredListener.java index a34d961f1603..e0e433d4aacb 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/RecordValidationTriggeredListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/RecordValidationTriggeredListener.java @@ -32,7 +32,7 @@ public class RecordValidationTriggeredListener extends GenericMetadataEventListe implements ApplicationListener { private String changeMessage = ""; - private String eventType = StatusValue.Events.RECORDVALIDATIONTRIGGERED; + private final StatusValue.Events eventType = StatusValue.Events.RECORDVALIDATIONTRIGGERED; @Override public String getChangeMessage() { @@ -41,7 +41,7 @@ public String getChangeMessage() { @Override public String getEventType() { - return eventType; + return eventType.getCode(); } @Override diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java index b335fc9cdeca..546571cec962 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java @@ -121,7 +121,7 @@ private void changeToApproved(AbstractMetadata md, MetadataStatus previousStatus status.setChangeDate(new ISODate()); status.setUserId(ServiceContext.get().getUserSession().getUserIdAsInt()); - metadataStatus.setStatusExt(status, false); + metadataStatus.setStatusExt(status, true); Log.trace(Geonet.DATA_MANAGER, "Metadata with id " + md.getId() + " automatically approved due to publishing."); } diff --git a/messaging/pom.xml b/messaging/pom.xml index 0bf61674795f..f9261e439327 100644 --- a/messaging/pom.xml +++ b/messaging/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/oaipmh/pom.xml b/oaipmh/pom.xml index dbe912e5e318..9ebbe0674c90 100644 --- a/oaipmh/pom.xml +++ b/oaipmh/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/pom.xml b/pom.xml index 4c491177d3fc..2dad9a15d5fb 100644 --- a/pom.xml +++ b/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork pom - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork opensource GeoNetwork opensource is a standards based, Free and Open Source catalog application to manage spatially referenced @@ -469,7 +469,7 @@ org.apache.jena apache-jena-libs pom - 3.17.0 + 4.10.0
@@ -554,6 +554,11 @@ commons-email 1.5 + + commons-codec + commons-codec + 1.15 + org.apache.xmlgraphics xmlgraphics-commons @@ -919,6 +924,13 @@ + + com.jayway.jsonpath + json-path + 2.4.0 + + + junit @@ -1155,6 +1167,11 @@ spring-data-jpa ${spring.jpa.version} + + org.springframework.data + spring-data-envers + ${spring.jpa.version} + com.fasterxml.jackson.datatype jackson-datatype-hibernate5 @@ -1280,7 +1297,7 @@ org.apache.jclouds jclouds-all - 2.3.0 + 2.5.0 @@ -1294,7 +1311,6 @@ json 20240205 - @@ -1408,6 +1424,7 @@ index datastorages translationproviders + auditable @@ -1437,6 +1454,21 @@ jmeter
+ + macOS-M-series + + + mac + aarch64 + + + + darwin-aarch64 + kibana.sh + darwin-aarch64 + tar.gz + + macOS @@ -1451,6 +1483,21 @@ darwin-x86 tar.gz + + + macOS_aarch64 + + + mac + aarch64 + + + + darwin-aarch64 + kibana.sh + darwin-aarch64 + tar.gz + windows @@ -1550,7 +1597,7 @@ 8080 8090 - 8.14.0 + 8.14.3 linux-x86_64 tar.gz http @@ -1614,12 +1661,12 @@ - 30.0 - 1.19.0 + 32.0 + 1.20.0 42.7.3 - 5.3.33 - 5.8.11 + 5.3.39 + 5.8.15 2.7.18 2.7.0 1.8.0 diff --git a/release/build.properties b/release/build.properties index f77dcb0ce66c..d730f4f0d033 100644 --- a/release/build.properties +++ b/release/build.properties @@ -5,11 +5,11 @@ homepage=https://geonetwork-opensource.org supportEmail=geonetwork-users@lists.sourceforge.net # Application version properties -version=4.4.6 +version=4.4.7 subVersion=SNAPSHOT # Java runtime properties javaVersion=11 javaDisplayVersion=11 -jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.6 +jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.7 jreName=AdoptOpenJDK diff --git a/release/pom.xml b/release/pom.xml index 41e95a1b7824..b63014f0f57b 100644 --- a/release/pom.xml +++ b/release/pom.xml @@ -7,7 +7,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT gn-release diff --git a/schemas-test/pom.xml b/schemas-test/pom.xml index e6e2bbe40211..60a6f0c668c5 100644 --- a/schemas-test/pom.xml +++ b/schemas-test/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 jar diff --git a/schemas/config-editor.xsd b/schemas/config-editor.xsd index 31455f489a34..b98b85fd1a61 100644 --- a/schemas/config-editor.xsd +++ b/schemas/config-editor.xsd @@ -1094,6 +1094,7 @@ the mandatory section with no name and then the inner elements. + @@ -2530,4 +2531,66 @@ added in the directive panel eg. `text` for guidelines or add `action` to easily + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields-keywords.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields-keywords.xsl index 10620e6653e5..209b8c35e531 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields-keywords.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields-keywords.xsl @@ -179,7 +179,7 @@ in default language --> diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields.xsl index d0c6585bd351..50b94e8a144f 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout-custom-fields.xsl @@ -139,81 +139,6 @@ - - - - - - - - - - - - - - -
- -
- - -
-
- - -
-
- - - - -
-
-
- - - - diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout.xsl index 4f807c0ecdec..841360a02b43 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/layout/layout.xsl @@ -34,6 +34,7 @@ +
+ + + + + + + + + @@ -371,12 +383,25 @@
+ + + + + + + + + @@ -385,6 +410,7 @@ + - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/labels.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/labels.xml index d24c02eb5c86..1820bb3241d8 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/labels.xml +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/labels.xml @@ -588,14 +588,6 @@ Degree of detail in the grid dataset optional - - - - - - - -
@@ -1323,6 +1315,10 @@ Designation of the locale language + + + ISO 3 letters code. + Additional metadata language @@ -2643,14 +2639,6 @@ Ground sample distance Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/schematron-rules-url-check.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/schematron-rules-url-check.xml new file mode 100644 index 000000000000..1563da639bee --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/schematron-rules-url-check.xml @@ -0,0 +1,6 @@ + + URL Validation + Online Resource Link Check +
URL test failed. Current status is
+
Url is valid
+
diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/strings.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/strings.xml index b3a0372af0a5..21fa0925a451 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/strings.xml +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/eng/strings.xml @@ -15,6 +15,9 @@ Value is not valid for the field Potential values are: + Select a feature type + New feature type + Information about the dataset diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/codelists.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/codelists.xml index 98cd9e48a1d2..46a6a71ae76c 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/codelists.xml +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/codelists.xml @@ -38,7 +38,7 @@ Le téléphone offre un service voacal - facsimilie + facsimile Le téléphone offre un service de fax @@ -1726,12 +1726,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/labels.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/labels.xml index b17c9b295b2e..8d2dfd946997 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/labels.xml +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/labels.xml @@ -1396,6 +1396,10 @@ Langue principale de la fiche. Code ISO de la langue à 3 caractères.
+ + + Langue de la donnée. Code ISO de la langue à 3 caractères. + Langue additionnelle. Code ISO de la langue à 3 caractères. @@ -4213,14 +4217,6 @@ version en tant que mot-clé complémentaire(s). Le numéro de version doit comp Distance de référence, mesurée au sol Résolution au sol Saisir une distance si pas d'échelle - - - - - - - - diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/schematron-rules-url-check.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/schematron-rules-url-check.xml new file mode 100644 index 000000000000..1563da639bee --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/schematron-rules-url-check.xml @@ -0,0 +1,6 @@ + + URL Validation + Online Resource Link Check +
URL test failed. Current status is
+
Url is valid
+
diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/strings.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/strings.xml index cce5262ac026..081e6c154979 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/strings.xml +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/fre/strings.xml @@ -14,6 +14,8 @@ La valeur n'est pas valide pour le champ Les valeurs possibles sont : + Choisir une table attributaire + Nouvelle table Comprendre la ressource Service disponible et données associées diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/ger/schematron-rules-url-check.xml b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/ger/schematron-rules-url-check.xml new file mode 100644 index 000000000000..1563da639bee --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/loc/ger/schematron-rules-url-check.xml @@ -0,0 +1,6 @@ + + URL Validation + Online Resource Link Check +
URL test failed. Current status is
+
Url is valid
+
diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-full.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-core.xsl similarity index 88% rename from schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-full.xsl rename to schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-core.xsl index 534d8dabef34..2dbaa3c0dd36 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-full.xsl +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-core.xsl @@ -1,7 +1,6 @@ - - - - - + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-brief.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat.xsl similarity index 100% rename from schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-brief.xsl rename to schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat.xsl diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-hvd.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-hvd.xsl new file mode 100644 index 000000000000..5fa84c146f13 --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-hvd.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-mobility.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-mobility.xsl new file mode 100644 index 000000000000..b9168b58b977 --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap-mobility.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap.xsl new file mode 100644 index 000000000000..1541e24dae01 --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-dcat-ap.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap-semiceu.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap-semiceu.xsl new file mode 100644 index 000000000000..f6a3f2910a25 --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap-semiceu.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap.xsl new file mode 100644 index 000000000000..e0578159f69c --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/eu-geodcat-ap.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/process/encode-keyword-as-anchor.xsl b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/process/encode-keyword-as-anchor.xsl new file mode 100644 index 000000000000..9210acaed5da --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/process/encode-keyword-as-anchor.xsl @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-dcat-ap-hvd.sch b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-dcat-ap-hvd.sch new file mode 100644 index 000000000000..c10fc68d32c9 --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-dcat-ap-hvd.sch @@ -0,0 +1,432 @@ + + + + + + DCAT-AP High Value Dataset (HVD) + + + + + + + + + + + + + + + + + + + + + + + + + + Applicable legislation is mandatory. Use a keyword with an Anchor pointing to + http://data.europa.eu/eli/reg_impl/2023/138/oj. + + + La législation applicable est obligatoire. Utilisez un mot-clé avec une ancre pointant vers + http://data.europa.eu/eli/reg_impl/2023/138/oj. + + Applicable legislation keyword found. + + La législation applicable HVD est encodée. + + + + No implementing rule or other specification found. Check the data quality + report specification to add one. For INSPIRE datasets, this is a data specification conformity. + + + Aucune règle d'implémentation ou autre spécification n'a été trouvée. Vérifiez la spécification du rapport de + qualité des données + pour en ajouter une. Pour les ensembles de données INSPIRE, il s'agit d'une conformité aux spécifications des + données. + + + Implementing rules or specifications found:. + + + Règles ou spécifications encodées :. + + + + + Contact information that can be used for sending comments about the Dataset is missing. + + + Les informations de contact pouvant être utilisées pour envoyer des commentaires sur l'ensemble de données sont + manquantes. + + + Contact information that can be used for sending comments about the Dataset defined:. + + + Contact pouvant être utilisées pour envoyer des commentaires sur l'ensemble de données encodé :. + + + + + The HVD category to which this Dataset belongs is missing. + + + La catégorie HVD à laquelle appartient cet ensemble de données est manquante. + + + HVD categories found:. + + + Catégories HVD encodées :. + + + + + The HVD IR is a quality improvement of existing datasets. The intention is that HVD datasets are publicly and open + accessible. Therefore a Distribution is expected to be present. Add an online resource with a download protocol or + function. + + + Les règles d'implémentation HVD ont pour objectif une amélioration de la qualité des ensembles de données existants. + L'objectif est que les ensembles de données HVD soient accessibles au public et en libre accès. Par conséquent, une + distribution est attendue. Ajoutez une ressource en ligne avec un protocole ou une fonction de téléchargement. + + + Distribution URLs found:. + + + URL(s) de distribution encodées :. + + + + The root location or primary endpoint of the service (an IRI) is missing. Add an operation with a protocol which is + not considered as an endpoint description (ie.) or a URL containing . + + + L'URL principale du service (un IRI) est manquant. Ajoutez une opération avec un protocole qui n'est pas une + description de service + (ie.) ou une URL contenant . + + + End point URL found:. + + + URL(s) du service encodées :. + + + + An API in the context of HVD is not a standalone resource. It is used to open up HVD datasets. Therefore each Data + Service is at least tightly connected with a Dataset. + Add at least one operatesOn element with a xlink:href or uuidref. + + + Une API dans le contexte de HVD n'est pas une ressource autonome. Elle est utilisée pour ouvrir des ensembles de + données HVD. Par conséquent, chaque service de données est au moins étroitement lié à un ensemble de données. + Ajoutez au moins un élément operateOn avec un xlink:href ou un uuidref. + + + Operates on dataset found:. + + + Données associées encodées :. + + + + + A page that provides additional information about the Data Service is missing. + Add at least one online resource with a function documentation, an additional documentation or a URL pointing to https://directory.spatineo.com. + + + Il manque une page qui fournit des informations supplémentaires sur le service de données. + Ajoutez au moins une ressource en ligne avec une function documentation, une documentation supplémentaire ou une URL pointant vers https://directory.spatineo.com. + + + Documentation pages found:. + + + Documentations encodées :. + + + + + + HVD + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-url-check.sch b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-url-check.sch new file mode 100644 index 000000000000..969f3ff5a71c --- /dev/null +++ b/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/schematron/schematron-rules-url-check.sch @@ -0,0 +1,44 @@ + + + + URL checks + + + + + + + + + + + + + + + + + + + + + + $loc/strings/invalidURLCheck + + + + + + + + - + + + + + '' + + + + + diff --git a/schemas/iso19115-3.2018/src/main/resources/config-spring-geonetwork.xml b/schemas/iso19115-3.2018/src/main/resources/config-spring-geonetwork.xml index ee08ec080ca3..390c62c2c686 100644 --- a/schemas/iso19115-3.2018/src/main/resources/config-spring-geonetwork.xml +++ b/schemas/iso19115-3.2018/src/main/resources/config-spring-geonetwork.xml @@ -6,6 +6,21 @@ + + + + + + + + + + + + + + + mdb:identificationInfo/*/mri:citation/*/cit:title/gco:CharacterString diff --git a/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/LanguageXslProcessTest.java b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/LanguageXslProcessTest.java index c3f5a966ce3f..b91a3eddd626 100644 --- a/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/LanguageXslProcessTest.java +++ b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/LanguageXslProcessTest.java @@ -1,3 +1,25 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ package org.fao.geonet.schema; import org.fao.geonet.schema.iso19115_3_2018.ISO19115_3_2018SchemaPlugin; @@ -14,9 +36,6 @@ import static org.junit.Assert.assertThat; import static org.xmlunit.matchers.EvaluateXPathMatcher.hasXPath; -/** - * Created by francois on 3/24/14. - */ public class LanguageXslProcessTest extends XslProcessTest { public LanguageXslProcessTest() { diff --git a/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/XslConversionTest.java b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/XslConversionTest.java new file mode 100644 index 000000000000..926e6045a99e --- /dev/null +++ b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/schema/XslConversionTest.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.schema; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.fao.geonet.schema.iso19115_3_2018.ISO19115_3_2018SchemaPlugin; +import org.fao.geonet.schemas.XslProcessTest; +import org.fao.geonet.utils.Xml; +import org.jdom.Element; +import static org.junit.Assert.assertFalse; +import org.junit.Test; +import org.xmlunit.builder.DiffBuilder; +import org.xmlunit.builder.Input; +import org.xmlunit.diff.DefaultNodeMatcher; +import org.xmlunit.diff.Diff; +import org.xmlunit.diff.ElementSelectors; + +public class XslConversionTest extends XslProcessTest { + + public XslConversionTest() { + super(); + this.setNs(ISO19115_3_2018SchemaPlugin.allNamespaces); + } + + @Test + public void testOdsConversion() throws Exception { + xslFile = Paths.get(testClass.getClassLoader().getResource("convert/fromJsonOpenDataSoft.xsl").toURI()); + xmlFile = Paths.get(testClass.getClassLoader().getResource("ods.xml").toURI()); + Path jsonFile = Paths.get(testClass.getClassLoader().getResource("ods.json").toURI()); + String jsonString = Files.readString(jsonFile); + Element xmlFromJSON = Xml.getXmlFromJSON(jsonString); + xmlFromJSON.setName("record"); + xmlFromJSON.addContent(new Element("nodeUrl").setText("https://www.odwb.be")); + + Element inputElement = Xml.loadFile(xmlFile); + String expectedXml = Xml.getString(inputElement); + + Element resultElement = Xml.transform(xmlFromJSON, xslFile); + String resultOfConversion = Xml.getString(resultElement); + + Diff diff = DiffBuilder + .compare(Input.fromString(resultOfConversion)) + .withTest(Input.fromString(expectedXml)) + .withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byName)) + .normalizeWhitespace() + .ignoreComments() + .checkForSimilar() + .build(); + assertFalse( + String.format("Differences: %s", diff.toString()), + diff.hasDifferences()); + } +} diff --git a/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/util/XslUtil.java b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/util/XslUtil.java index d26836d4400f..8d40393b2e08 100644 --- a/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/util/XslUtil.java +++ b/schemas/iso19115-3.2018/src/test/java/org/fao/geonet/util/XslUtil.java @@ -1,7 +1,32 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ package org.fao.geonet.util; public class XslUtil { public static String twoCharLangCode(String iso3code) { return iso3code.substring(0, 2); } + public static String threeCharLangCode(String iso2code) { + return "fre"; + } } diff --git a/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing-light.xml b/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing-light.xml index 5a2447a7e43b..d4cc5342f647 100644 --- a/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing-light.xml +++ b/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing-light.xml @@ -1813,12 +1813,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive @@ -8929,12 +8929,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing.xml b/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing.xml index 147c2608c84c..0b0b5b435e6b 100644 --- a/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing.xml +++ b/schemas/iso19115-3.2018/src/test/resources/metadata-for-editing.xml @@ -584,7 +584,7 @@ La liste des changements d'état sont accessibles ici : Import de fichiers Mode d'insertion : Copier/Coller - Carte interactive + Carte interactive

Information et cartes interactives

Vous pouvez trouver des cartes interactives en cherchant dans Geonetwork pour des jeux de données numérique avec une carte interactive, ou bien en vous connectant directement à un serveur cartographique existant

@@ -20334,12 +20334,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive @@ -27450,12 +27450,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19115-3.2018/src/test/resources/metadata-iso19139-for-editing.xml b/schemas/iso19115-3.2018/src/test/resources/metadata-iso19139-for-editing.xml index 034c615c0dc7..a6e50d22f17d 100644 --- a/schemas/iso19115-3.2018/src/test/resources/metadata-iso19139-for-editing.xml +++ b/schemas/iso19115-3.2018/src/test/resources/metadata-iso19139-for-editing.xml @@ -584,7 +584,7 @@ La liste des changements d'état sont accessibles ici : Import de fichiers Mode d'insertion : Copier/Coller - Carte interactive + Carte interactive

Information et cartes interactives

Vous pouvez trouver des cartes interactives en cherchant dans Geonetwork pour des jeux de données numérique avec une carte interactive, ou bien en vous connectant directement à un serveur cartographique existant

@@ -20334,12 +20334,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive @@ -27450,12 +27450,12 @@ Acteur qui a assuré la réalisation de la ressource,éventuellement en faisant - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19115-3.2018/src/test/resources/ods.json b/schemas/iso19115-3.2018/src/test/resources/ods.json new file mode 100644 index 000000000000..daaba03772d9 --- /dev/null +++ b/schemas/iso19115-3.2018/src/test/resources/ods.json @@ -0,0 +1,381 @@ +{ + "links": [ + { + "rel": "self", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes" + }, + { + "rel": "datasets", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets" + }, + { + "rel": "records", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/records" + }, + { + "rel": "exports", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/exports" + }, + { + "rel": "facets", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/facets" + }, + { + "rel": "reuses", + "href": "https://www.odwb.be/api/explore/v2.0/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/reuses" + } + ], + "dataset": { + "visibility": "domain", + "dataset_id": "collecte-de-sang-centre-de-prelevement-fixes", + "dataset_uid": "da_lac5du", + "has_records": true, + "features": [ + "calendar", + "geo", + "analyze", + "timeserie", + "custom_view" + ], + "attachments": [], + "alternative_exports": [], + "data_visible": true, + "fields": [ + { + "name": "identifiant", + "description": null, + "annotations": { + + }, + "label": "identifiant", + "type": "text" + }, + { + "name": "code_collecte", + "description": null, + "annotations": { + + }, + "label": "code_collecte", + "type": "text" + }, + { + "name": "nom", + "description": null, + "annotations": { + "sortable": true + }, + "label": "nom", + "type": "text" + }, + { + "name": "type_de_collecte", + "description": null, + "annotations": { + "facet": true + }, + "label": "type_de_collecte", + "type": "text" + }, + { + "name": "description", + "description": null, + "annotations": { + + }, + "label": "description", + "type": "text" + }, + { + "name": "url_source", + "description": null, + "annotations": { + + }, + "label": "url_source", + "type": "text" + }, + { + "name": "latitude", + "description": null, + "annotations": { + + }, + "label": "latitude", + "type": "text" + }, + { + "name": "longitude", + "description": null, + "annotations": { + + }, + "label": "longitude", + "type": "text" + }, + { + "name": "rue", + "description": null, + "annotations": { + + }, + "label": "rue", + "type": "text" + }, + { + "name": "code_postal", + "description": null, + "annotations": { + + }, + "label": "code postal", + "type": "text" + }, + { + "name": "ville", + "description": null, + "annotations": { + "facet": true + }, + "label": "ville", + "type": "text" + }, + { + "name": "date_collecte", + "description": null, + "annotations": { + "facetsort": "-count", + "timeserie_precision": "day" + }, + "label": "date_collecte", + "type": "date" + }, + { + "name": "horaire_am1", + "description": null, + "annotations": { + + }, + "label": "horaire_am1", + "type": "text" + }, + { + "name": "horaire_am2", + "description": null, + "annotations": { + + }, + "label": "horaire_am2", + "type": "text" + }, + { + "name": "horaire_pm1", + "description": null, + "annotations": { + + }, + "label": "horaire_pm1", + "type": "text" + }, + { + "name": "horaire_pm2", + "description": null, + "annotations": { + + }, + "label": "horaire_pm2", + "type": "text" + }, + { + "name": "collecte_publique", + "description": null, + "annotations": { + + }, + "label": "collecte_publique", + "type": "text" + }, + { + "name": "collecte_avec_rdv", + "description": null, + "annotations": { + + }, + "label": "collecte_avec_rdv", + "type": "text" + }, + { + "name": "statut", + "description": null, + "annotations": { + + }, + "label": "statut", + "type": "text" + }, + { + "name": "infos_complementaires", + "description": null, + "annotations": { + + }, + "label": "Infos complémentaires", + "type": "text" + }, + { + "name": "geopointarcgis", + "description": null, + "annotations": { + + }, + "label": "geopointarcgis", + "type": "geo_point_2d" + }, + { + "name": "commune", + "description": null, + "annotations": { + + }, + "label": "commune", + "type": "text" + }, + { + "name": "province", + "description": null, + "annotations": { + + }, + "label": "province", + "type": "text" + } + ], + "metas": { + "dcat": { + "created": null, + "issued": null, + "creator": null, + "contributor": null, + "contact_name": "Thomas Paulus 084 32 16 00 pendant les heures d'ouverture.", + "contact_email": "sfs.communication@croix-rouge.be", + "accrualperiodicity": "daily", + "spatial": null, + "temporal": null, + "granularity": null, + "dataquality": "lineage", + "publisher_type": [ + "NonProfitOrganisation" + ], + "conforms_to": null, + "temporal_coverage_start": "2020", + "temporal_coverage_end": "2022", + "accessRights": null + }, + "semantic": { + "rml_mapping": null, + "classes": null, + "properties": null + }, + "default": { + "title": "Tous les lieux de collecte de sang en Région wallonne et à Bruxelles", + "description": "\u003Cp\u003E\u003Cfont face=\"Open Sans, sans-serif\"\u003E\u003Cspan style=\"font-size: 12px;\"\u003EToutes les collectes de sang organisées par le Service du Sang de la Croix-Rouge, en Région wallonne et à Bruxelles, y compris les dons de plasma et de plaquettes. \u003C/span\u003E\u003C/font\u003E\u003C/p\u003E\u003Cp\u003E\u003Cfont face=\"Open Sans, sans-serif\"\u003E\u003Cspan style=\"font-size: 12px;\"\u003ERecherchez le lieu de collecte le plus proche de chez vous et prenez rendez-vous si nécessaire.\u003C/span\u003E\u003C/font\u003E\u003Cbr\u003E\u003C/p\u003E", + "theme": [ + "Qualité de Vie" + ], + "keyword": [ + "Centre de prélèvement", + "Sang", + "don de sang", + "Croix rouge", + "santé", + "Collecte" + ], + "license": "Creative Commons - CC0", + "license_url": "http://www.opendefinition.org/licenses/cc-by/", + "language": "fr", + "metadata_languages": [ + "fr" + ], + "timezone": [ + "Europe/Brussels" + ], + "modified": "2024-08-29T08:09:20.651000+00:00", + "modified_updates_on_metadata_change": true, + "modified_updates_on_data_change": true, + "data_processed": "2024-08-29T08:09:20.651000+00:00", + "metadata_processed": "2024-08-29T08:09:20.896000+00:00", + "geographic_reference": [ + "be_40_03000", + "be_40_04000" + ], + "geographic_reference_auto": false, + "territory": [ + "Région wallonne", + "Région de Bruxelles-Capitale" + ], + "geometry_types": [ + "Point" + ], + "bbox": { + "type": "Feature", + "geometry": { + "coordinates": [ + [ + [6.17299164645374, 50.8502430282533], + [3.97492295131087, 50.8502430282533], + [3.97492295131087, 49.9810485122725], + [6.17299164645374, 49.9810485122725], + [6.17299164645374, 50.8502430282533] + ] + ], + "type": "Polygon" + }, + "properties": { + + } + }, + "publisher": "Croix-rouge de Belgique - Service du sang", + "references": "https://www.donneurdesang.be", + "records_count": 2355, + "attributions": [ + "National Geographic Institute (NGI-IGN, ngi.be)" + ], + "source_domain": null, + "source_domain_title": null, + "source_domain_address": null, + "source_dataset": null, + "shared_catalog": null, + "federated": false, + "oauth_scope": null, + "parent_domain": null, + "update_frequency": null + }, + "inspire": { + "theme": null, + "type": null, + "file_identifier": null, + "hierarchy_level": null, + "hierarchy_level_name": null, + "spatial_resolution": null, + "topologic_consistency": null, + "contact_individual_name": null, + "contact_position": null, + "contact_address": null, + "contact_email": null, + "identification_purpose": null, + "extend_description": null, + "extend_bounding_box_westbound_longitude": null, + "extend_bounding_box_eastbound_longitude": null, + "extend_bounding_box_southbound_latitude": null, + "extend_bounding_box_northbound_latitude": null + }, + "custom": { + "echelon-territorial": [ + "Régional" + ], + "high-value-dataset": false, + "nom-moissonneur": null + } + } + } +} diff --git a/schemas/iso19115-3.2018/src/test/resources/ods.xml b/schemas/iso19115-3.2018/src/test/resources/ods.xml new file mode 100644 index 000000000000..5caa315c5696 --- /dev/null +++ b/schemas/iso19115-3.2018/src/test/resources/ods.xml @@ -0,0 +1,854 @@ + + + + + + collecte-de-sang-centre-de-prelevement-fixes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Croix-rouge de Belgique - Service du sang + + + + + + + + + + + + + + + + + + + + 2024-08-29T08:09:20.651000+00:00 + + + + + + + + + + 2024-08-29T08:09:20.651000+00:00 + + + + + + + + + + ISO 19115-3 + + + + + + + + + + + + 2355 + + + + + + + + + + + Tous les lieux de collecte de sang en Région wallonne et à Bruxelles + + + + + 2024-08-29T08:09:20.651000+00:00 + + + + + + + + + + 2024-08-29T08:09:20.651000+00:00 + + + + + + + + + + collecte-de-sang-centre-de-prelevement-fixes + + + + + + + <p><font face="Open Sans, sans-serif"><span style="font-size: 12px;">Toutes les collectes de sang organisées par le Service du Sang de la Croix-Rouge, en Région wallonne et à Bruxelles, y compris les dons de plasma et de plaquettes. </span></font></p><p><font face="Open Sans, sans-serif"><span style="font-size: 12px;">Recherchez le lieu de collecte le plus proche de chez vous et prenez rendez-vous si nécessaire.</span></font><br></p> + + + National Geographic Institute (NGI-IGN, ngi.be) + + + + + publisher + + + + + Croix-rouge de Belgique - Service du sang + + + + + + + + + + + + + + Croix-rouge de Belgique - Service du sang + + + + + + + sfs.communication@croix-rouge.be + + + + + + + + + Thomas Paulus 084 32 16 00 pendant les heures d'ouverture. + + + + + + + + + + health + + + + + + + + 3.97492295131087 + + + 6.17299164645374 + + + 49.9810485122725 + + + 50.8502430282533 + + + + + + + + + 2020 + + + 2022 + + + + + + + + + + + be_40_03000 + + + + + + + + + + + be_40_04000 + + + + + + + + + + + + + + + + + + Région wallonne + + + + + + + + + + Région de Bruxelles-Capitale + + + + + + + + + + Qualité de Vie + + + + + + + + + + Centre de prélèvement + + + Sang + + + don de sang + + + Croix rouge + + + santé + + + Collecte + + + + + + + + + Creative Commons - CC0 + + + + + http://www.opendefinition.org/licenses/cc-by/ + + + + + + + + + + + + + Creative Commons - CC0 + + + + + + + + + + + + + + + + + + + + + + + Tous les lieux de collecte de sang en Région wallonne et à Bruxelles + + false + + + + identifiant + + identifiant + + 1 + + + + text + + + + + + + + code_collecte + + code_collecte + + 1 + + + + text + + + + + + + + nom + + nom + + 1 + + + + text + + + + + + + + type_de_collecte + + type_de_collecte + + 1 + + + + text + + + + + + + + description + + description + + 1 + + + + text + + + + + + + + url_source + + url_source + + 1 + + + + text + + + + + + + + latitude + + latitude + + 1 + + + + text + + + + + + + + longitude + + longitude + + 1 + + + + text + + + + + + + + rue + + rue + + 1 + + + + text + + + + + + + + code_postal + + code postal + + 1 + + + + text + + + + + + + + ville + + ville + + 1 + + + + text + + + + + + + + date_collecte + + date_collecte + + 1 + + + + date + + + + + + + + horaire_am1 + + horaire_am1 + + 1 + + + + text + + + + + + + + horaire_am2 + + horaire_am2 + + 1 + + + + text + + + + + + + + horaire_pm1 + + horaire_pm1 + + 1 + + + + text + + + + + + + + horaire_pm2 + + horaire_pm2 + + 1 + + + + text + + + + + + + + collecte_publique + + collecte_publique + + 1 + + + + text + + + + + + + + collecte_avec_rdv + + collecte_avec_rdv + + 1 + + + + text + + + + + + + + statut + + statut + + 1 + + + + text + + + + + + + + infos_complementaires + + Infos complémentaires + + 1 + + + + text + + + + + + + + geopointarcgis + + geopointarcgis + + 1 + + + + geo_point_2d + + + + + + + + commune + + commune + + 1 + + + + text + + + + + + + + province + + province + + 1 + + + + text + + + + + + + + + + + + + + + + + + + + https://www.odwb.be/api/explore/v2.1/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/exports/csv?use_labels=true + + + WWW:DOWNLOAD:text/csv + + + csv + + + csv + + + + + + + + + + https://www.odwb.be/api/explore/v2.1/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/exports/json?use_labels=true + + + WWW:DOWNLOAD:application/json + + + json + + + json + + + + + + + + + + https://www.odwb.be/api/explore/v2.1/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/exports/geojson?use_labels=true + + + WWW:DOWNLOAD:application/vnd.geo+json + + + geojson + + + geojson + + + + + + + + + + https://www.odwb.be/api/explore/v2.1/catalog/datasets/collecte-de-sang-centre-de-prelevement-fixes/exports/shp?use_labels=true + + + WWW:DOWNLOAD:x-gis/x-shapefile + + + shp + + + shp + + + + + + + + + + + + + + https://www.odwb.be/explore/dataset/collecte-de-sang-centre-de-prelevement-fixes/information/ + + + WWW:LINK:LANDING_PAGE + + + Landing Page + + + + + + + + + + https://www.donneurdesang.be + + + WWW:LINK + + + + + + + + + + + lineage + + + + + + + + + + + diff --git a/schemas/iso19139/pom.xml b/schemas/iso19139/pom.xml index 1f0fb2404f10..303647a36c4c 100644 --- a/schemas/iso19139/pom.xml +++ b/schemas/iso19139/pom.xml @@ -5,7 +5,7 @@ gn-schemas org.geonetwork-opensource.schemas - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/schemas/iso19139/src/main/java/org/fao/geonet/schema/iso19139/ISO19139SchemaPlugin.java b/schemas/iso19139/src/main/java/org/fao/geonet/schema/iso19139/ISO19139SchemaPlugin.java index a5eef93bb38e..e3a484700658 100644 --- a/schemas/iso19139/src/main/java/org/fao/geonet/schema/iso19139/ISO19139SchemaPlugin.java +++ b/schemas/iso19139/src/main/java/org/fao/geonet/schema/iso19139/ISO19139SchemaPlugin.java @@ -607,6 +607,31 @@ public Element processElement(Element el, } + @Override + public boolean duplicateElementsForMultilingual() { + return false; + } + + @Override + public List getMetadataLanguages(Element metadata) { + try { + return Xml.selectNodes(metadata, ".//gmd:locale/gmd:PT_Locale/@id", allNamespaces.asList()) + .stream() + .filter(Attribute.class::isInstance) + .map(node -> ((Attribute)node).getValue()) + .filter(s -> s != null && !s.isBlank()) + .collect(Collectors.toList()); + } catch (JDOMException ignored) { + } + return Collections.emptyList(); + } + + @Override + public boolean isMultilingualElementType(String elementType) { + // Not required in ISO schemas, only required for schemas where duplicateElementsForMultilingual returns true. + return false; + } + /** * Checks if an element requires processing in {@link #processElement(Element, String, String, String)}. * diff --git a/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/OGCWMC-OR-OWSC-to-ISO19139.xsl b/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/OGCWMC-OR-OWSC-to-ISO19139.xsl index 9d5eb8d5384b..6753f9c1f823 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/OGCWMC-OR-OWSC-to-ISO19139.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/OGCWMC-OR-OWSC-to-ISO19139.xsl @@ -45,9 +45,8 @@ - - - + @@ -75,45 +74,7 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -318,4 +279,45 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/identification.xsl b/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/identification.xsl index 49a8f59750cd..0e07348d9108 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/identification.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/convert/OGCWMCtoISO19139/identification.xsl @@ -22,7 +22,6 @@ ows-context:General/ows:Title"/> - @@ -70,50 +69,10 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - @@ -128,13 +87,11 @@ - - @@ -151,7 +108,5 @@ - - - + \ No newline at end of file diff --git a/schemas/iso19139/src/main/plugin/iso19139/convert/thesaurus-transformation.xsl b/schemas/iso19139/src/main/plugin/iso19139/convert/thesaurus-transformation.xsl index a27b85bcb185..9e3103d71c43 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/convert/thesaurus-transformation.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/convert/thesaurus-transformation.xsl @@ -136,6 +136,10 @@ + + + + + + else $thesaurus/title"/> - + @@ -341,95 +347,31 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + - - + + - - - - - - - - - - - - + + + + codeListValue="{name(current-group()[1])}"/> - + + + + + + + + + + + + + + core + extended + http://data.europa.eu/r5r/ + http://data.europa.eu/930/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + enabled + + + disabled + + + + + + + + + abcdefghijklmnopqrstuvwxyz + ABCDEFGHIJKLMNOPQRSTUVWXYZ + + + + http://www.opengis.net/def/crs/EPSG/0 + urn:ogc:def:crs:EPSG + EPSG Coordinate Reference Systems + http://www.opengis.net/def/crs/OGC + urn:ogc:def:crs:OGC + OGC Coordinate Reference Systems + + + + + + + + + + + + + + + + + + + + + LonLat + + + + + http://www.w3.org/ns/dcat# + http://purl.org/dc/terms/ + http://purl.org/dc/dcmitype/ + http://xmlns.com/foaf/0.1/ + http://data.europa.eu/930/ + http://www.opengis.net/ont/geosparql# + http://www.w3.org/ns/prov# + http://www.w3.org/2004/02/skos/core# + http://www.w3.org/2006/vcard/ns# + http://www.w3.org/2001/XMLSchema# + + + + + + + + + + http://publications.europa.eu/resource/authority/ + + + + + + + + + + + + + + https://www.iana.org/assignments/ + + + + + + + + + + + http://www.qudt.org/vocab/unit + + + http://www.wurvoc.org/vocabularies/om-1.8 + http://www.ontology-of-units-of-measure.org/resource/om-2 + + + + + + + + + + + + http://inspire.ec.europa.eu/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ResourceUri is . Ignored: . + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bg + + + cs + + + da + + + de + + + el + + + en + + + es + + + et + + + fi + + + fr + + + ga + + + hr + + + it + + + lv + + + lt + + + hu + + + mt + + + nl + + + pl + + + pt + + + ru + + + sk + + + sl + + + sv + + + + + + + + + + + + + + + + + + + + + + + + + + + bg + + + cs + + + da + + + de + + + el + + + en + + + es + + + et + + + fi + + + fr + + + ga + + + hr + + + it + + + lv + + + lt + + + hu + + + mt + + + nl + + + pl + + + pt + + + ru + + + sk + + + sl + + + sv + + + + + + + + + + + + + + + + + + + + + dataset + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + dct:description + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + N/A + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + foaf:name + + + + + + + + + vcard:fn + + + + + + + + + + + + + + + + + + + + + + + + + + + foaf:name + + + + + + + + + vcard:organization-name + + + + + + + + + vcard:fn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <gml:Envelope srsName=""><gml:lowerCorner> </gml:lowerCorner><gml:upperCorner> </gml:upperCorner></gml:Envelope> + <gml:Envelope srsName=""><gml:lowerCorner> </gml:lowerCorner><gml:upperCorner> </gml:upperCorner></gml:Envelope> + <gml:Envelope srsName=""><gml:lowerCorner> </gml:lowerCorner><gml:upperCorner> </gml:upperCorner></gml:Envelope> + + + + + + + + POLYGON(( , , , , )) + <> POLYGON(( , , , , )) + <> POLYGON(( , , , , )) + + + + + + + + + + {"type":"Polygon","coordinates":[[[,],[,],[,],[,],[,]]]} + + {"type":"Polygon","crs":{"type":"name","properties":{"name":""}},"coordinates":[[[,],[,],[,],[,],[,]]]} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + gYear + + + date + + + dateTime + + + date + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + dct:description + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dct:title + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dcat:keyword + + + + + + + + + dc:subject + + + + + + + + + + dcat:keyword + + + + + + + + + + + + + + + + + skos:prefLabel + + + + + + + + + + + + + + + skos:prefLabel + + + + + + + + + + + + + + skos:prefLabel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Spatial resolution (distance):   + + + + + + + + + + + + + + + + + + + + + + + + + Spatial resolution (equivalent scale): + + + + + + + + + + + + + + ISO-10646-UCS-2 + + + ISO-10646-UCS-4 + + + UTF-7 + + + UTF-8 + + + UTF-16 + + + ISO-8859-1 + + + ISO-8859-2 + + + ISO-8859-3 + + + ISO-8859-4 + + + ISO-8859-5 + + + ISO-8859-6 + + + ISO-8859-7 + + + ISO-8859-8 + + + ISO-8859-9 + + + ISO-8859-10 + + + ISO-8859-11 + + + ISO-8859-12 + + + ISO-8859-13 + + + ISO-8859-14 + + + ISO-8859-15 + + + ISO-8859-16 + + + + JIS_Encoding + + + Shift_JIS + + + EUC-JP + + + US-ASCII + + + + IBM037 + + + EUC-KR + + + Big5 + + + GB2312 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ETRS89 - European Terrestrial Reference System 1989 + ETRS89 - European Terrestrial Reference System 1989 + + + + + + + + + + + + + + + + + + + + CRS84 + CRS84 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bg + + + cs + + + da + + + de + + + el + + + en + + + es + + + et + + + fi + + + fr + + + ga + + + hr + + + it + + + lv + + + lt + + + hu + + + mt + + + nl + + + pl + + + pt + + + ru + + + sk + + + sl + + + sv + + + + + + + + + + + + + + + + yes + + + no + + + + + + + + + + + csw + + + csw + + + sos + + + sos + + + sps + + + sps + + + wcs + + + wcs + + + wfs + + + wfs + + + wms + + + wms + + + wmts + + + wmts + + + wps + + + wps + + + + + + + + + http://www.opengeospatial.org/standards/cat + + + http://www.opengeospatial.org/standards/sos + + + http://www.opengeospatial.org/standards/sps + + + http://www.opengeospatial.org/standards/wcs + + + http://www.opengeospatial.org/standards/wfs + + + http://www.opengeospatial.org/standards/wms + + + http://www.opengeospatial.org/standards/wmts + + + http://www.opengeospatial.org/standards/wps + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/formatter/eu-geodcat-ap/view.xsl b/schemas/iso19139/src/main/plugin/iso19139/formatter/eu-geodcat-ap/view.xsl new file mode 100644 index 000000000000..cfc878ef7def --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/formatter/eu-geodcat-ap/view.xsl @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl b/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl index d2fdc3f4a78a..9511c1ad4d08 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/index-fields/index.xsl @@ -34,7 +34,6 @@ xmlns:gn-fn-index="http://geonetwork-opensource.org/xsl/functions/index" xmlns:index="java:org.fao.geonet.kernel.search.EsSearchManager" xmlns:digestUtils="java:org.apache.commons.codec.digest.DigestUtils" - xmlns:exslt="http://exslt.org/common" xmlns:util="java:org.fao.geonet.util.XslUtil" xmlns:date-util="java:org.fao.geonet.utils.DateUtil" xmlns:daobs="http://daobs.org" @@ -193,19 +192,6 @@ - - - dataset - - - - - - - - - - - map - map/static + map-static - map/interactive + map-interactive + + dataset + + + + + + + + @@ -288,14 +283,16 @@ + - + - + - + @@ -384,7 +381,7 @@ { - "url": "" + "url": "" , "nameObject": @@ -657,6 +654,22 @@ + + { + "frequency": "" + + ,"nextUpdateDate": "" + + + ,"userDefinedFrequency": "" + + + ,"noteObject": + + + } + + @@ -1020,12 +1033,14 @@ select="(../../gmd:measureDescription/gco:CharacterString)[1]"/> + { "name": "", "description": "", - + "date": "", @@ -1047,10 +1062,13 @@ + + { "descriptionObject": - + ,"date": "" @@ -1122,8 +1140,7 @@ - + @@ -1147,7 +1164,7 @@ { - "hash": "", + "hash": "", "idx": , "protocol":"", "mimeType":" + --> + + + + + + + + + - + @@ -81,7 +81,7 @@ - + diff --git a/schemas/iso19139/src/main/plugin/iso19139/layout/layout-custom-fields.xsl b/schemas/iso19139/src/main/plugin/iso19139/layout/layout-custom-fields.xsl index 02d78c944bdc..bcd1f66e0914 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/layout/layout-custom-fields.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/layout/layout-custom-fields.xsl @@ -80,8 +80,9 @@ + + <!– Measure elements, gco:Distance, gco:Angle, gco:Scale, gco:Length, ... –> @@ -148,6 +149,7 @@ +--> diff --git a/schemas/iso19139/src/main/plugin/iso19139/layout/layout.xsl b/schemas/iso19139/src/main/plugin/iso19139/layout/layout.xsl index 726eeb40d785..482a5080ef31 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/layout/layout.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/layout/layout.xsl @@ -295,7 +295,7 @@ + select="*/@*[name() != 'uom']"> @@ -311,6 +311,16 @@ + + + + + + + + + @@ -355,6 +365,18 @@ + + + + + + + + @@ -387,6 +409,7 @@ + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/ara/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/ara/codelists.xml index 26a2d53b2f49..2c8a1f3382f7 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/ara/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/ara/codelists.xml @@ -1539,12 +1539,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/cat/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/cat/codelists.xml index 593b2d950315..c702f9bdc155 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/cat/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/cat/codelists.xml @@ -1536,12 +1536,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/cat/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/cat/labels.xml index f52c2d513409..a15eaef7aa89 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/cat/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/cat/labels.xml @@ -921,14 +921,6 @@ Distància a la mostra sobre la terra Provide a distance if no equivalent Scale is documented - - - - - - - -
diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/chi/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/chi/codelists.xml index 5a8ad8cecee5..8651f7229f59 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/chi/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/chi/codelists.xml @@ -1538,12 +1538,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/dut/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/dut/codelists.xml index a6d68c0fc0f0..f3c796011206 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/dut/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/dut/codelists.xml @@ -1615,12 +1615,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/eng/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/eng/codelists.xml index c94257f9c439..dd1041e96b89 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/eng/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/eng/codelists.xml @@ -1626,12 +1626,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/eng/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/eng/labels.xml index 9734aca0800c..eefc5a9a28e9 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/eng/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/eng/labels.xml @@ -1126,17 +1126,6 @@ Ground sample distance Provide a distance if no equivalent Scale is documented - - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/fin/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/fin/codelists.xml index a3f437fe6859..e3fdd033c408 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/fin/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/fin/codelists.xml @@ -1518,12 +1518,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/fin/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/fin/labels.xml index e80ffafe0a00..e3bc0da716d0 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/fin/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/fin/labels.xml @@ -1011,17 +1011,6 @@ Näytetiheys maastossa. Pakollinen, jos mittakaavaa ei kuvailtu. - - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/fre/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/fre/codelists.xml index b6536b5fc719..b85fbc7f6f06 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/fre/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/fre/codelists.xml @@ -1558,12 +1558,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/fre/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/fre/labels.xml index b2e388944d97..4ee4947b9542 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/fre/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/fre/labels.xml @@ -359,7 +359,10 @@ n’est pas respectée. - + + + + Type de méthode d'évaluation d'une mesure de qualité de données identifiée @@ -2489,14 +2492,6 @@ version en tant que mot-clé complémentaire(s). Le numéro de version doit comp Distance de référence, mesurée au sol Résolution au sol Saisir une distance si pas d'échelle - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/ger/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/ger/codelists.xml index fdf6efea6602..bafdec7674de 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/ger/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/ger/codelists.xml @@ -1585,12 +1585,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/ger/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/ger/labels.xml index 9992dceb00e1..694390351d98 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/ger/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/ger/labels.xml @@ -2969,14 +2969,6 @@ Abstand der Rastermittelpunkte bzw. Gitterstützpunkte auszufüllen, wenn kein Vergleichsmaßstab angegeben wird Bodenauflösung - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/ita/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/ita/codelists.xml index 664af8b9d985..155e059e3c1c 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/ita/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/ita/codelists.xml @@ -1607,12 +1607,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/ita/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/ita/labels.xml index 71dffb075eed..2572c8f4feba 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/ita/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/ita/labels.xml @@ -938,14 +938,6 @@ Risoluzione geometrica al suolo Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/nor/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/nor/codelists.xml index c6bcf08174a8..5a6e4a75d327 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/nor/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/nor/codelists.xml @@ -1604,12 +1604,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/nor/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/nor/labels.xml index 6498f529f646..9316fca442c5 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/nor/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/nor/labels.xml @@ -1119,14 +1119,6 @@ Distanse målt i terrenget Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/pol/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/pol/codelists.xml index 0e485ba94fb6..ad11fa36e91a 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/pol/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/pol/codelists.xml @@ -1526,12 +1526,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/por/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/por/codelists.xml index 2150a5c7957b..934e121bdca7 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/por/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/por/codelists.xml @@ -1542,12 +1542,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/por/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/por/labels.xml index c78c04236724..0b735e7410ae 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/por/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/por/labels.xml @@ -730,14 +730,6 @@ Distância da amostra do solo condicional - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/rus/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/rus/codelists.xml index c11faa922667..56934e812bcc 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/rus/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/rus/codelists.xml @@ -1537,12 +1537,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/slo/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/slo/codelists.xml index d60dbfa0697f..ddeff854ae98 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/slo/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/slo/codelists.xml @@ -1448,12 +1448,12 @@ Informácie o mapách a grafoch, z ktorých dátová sada pochádza - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/slo/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/slo/labels.xml index b25c966c9711..29b8f0457f93 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/slo/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/slo/labels.xml @@ -934,17 +934,6 @@ duševného Vzorkovacia vzdialenosť terénu Uveďte vzdialenosť, ak nie je zdokumentovaná žiadna ekvivalentná mierka - - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/spa/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/spa/codelists.xml index 5a682d3c1e77..aeec293ff865 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/spa/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/spa/codelists.xml @@ -1543,12 +1543,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/spa/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/spa/labels.xml index ca84fda5dde4..9587c9fe2341 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/spa/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/spa/labels.xml @@ -958,14 +958,6 @@ Distancia a la muestra sobre la tierra Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/swe/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/swe/labels.xml index a7dfcc1841dd..441567bd09de 100755 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/swe/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/swe/labels.xml @@ -1113,14 +1113,6 @@ bearbetningens genomförande Avståndet mellan observationspunkter i terrängen. Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/tur/codelists.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/tur/codelists.xml index 5535a16635cc..7308138bb181 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/tur/codelists.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/tur/codelists.xml @@ -1579,12 +1579,12 @@ - staticMap + map-static - interactiveMap + map-interactive diff --git a/schemas/iso19139/src/main/plugin/iso19139/loc/tur/labels.xml b/schemas/iso19139/src/main/plugin/iso19139/loc/tur/labels.xml index 45c029ca16ee..ff53873d7359 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/loc/tur/labels.xml +++ b/schemas/iso19139/src/main/plugin/iso19139/loc/tur/labels.xml @@ -1010,14 +1010,6 @@ Ground sample distance Provide a distance if no equivalent Scale is documented - - - - - - - - diff --git a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-summary.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/dcat-core.xsl similarity index 88% rename from schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-summary.xsl rename to schemas/iso19139/src/main/plugin/iso19139/present/csw/dcat-core.xsl index 534d8dabef34..2dbaa3c0dd36 100644 --- a/schemas/iso19115-3.2018/src/main/plugin/iso19115-3.2018/present/csw/dcat-summary.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/dcat-core.xsl @@ -1,7 +1,6 @@ - - - - - + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-hvd.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-hvd.xsl new file mode 100644 index 000000000000..5fa84c146f13 --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-hvd.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-mobility.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-mobility.xsl new file mode 100644 index 000000000000..b9168b58b977 --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap-mobility.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap.xsl new file mode 100644 index 000000000000..1541e24dae01 --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-dcat-ap.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap-semiceu.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap-semiceu.xsl new file mode 100644 index 000000000000..f6a3f2910a25 --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap-semiceu.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap.xsl b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap.xsl new file mode 100644 index 000000000000..e0578159f69c --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/present/csw/eu-geodcat-ap.xsl @@ -0,0 +1,27 @@ + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-add.xsl b/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-add.xsl index 0afe80eaa3d2..e9c86757ca43 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-add.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-add.xsl @@ -187,19 +187,29 @@ Note: It assumes that it will be adding new items in - + + + + + - + and ($resourceHash = '' or digestUtils:md5Hex(normalize-space(.)) = $resourceHash)"> + + + + + + + + - @@ -243,7 +253,7 @@ Note: It assumes that it will be adding new items in - + diff --git a/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-remove.xsl b/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-remove.xsl index 718f483eced0..5ea7b2107736 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-remove.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/process/onlinesrc-remove.xsl @@ -53,15 +53,25 @@ Stylesheet used to remove a reference to a online resource. - + + + + + + and ($resourceHash = '' or digestUtils:md5Hex(normalize-space(.)) = $resourceHash) + )"> + + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-add.xsl b/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-add.xsl index 16b975a837cc..4c1735560398 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-add.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-add.xsl @@ -27,7 +27,9 @@ xmlns:srv="http://www.isotc211.org/2005/srv" xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:geonet="http://www.fao.org/geonetwork" - exclude-result-prefixes="#all" + xmlns:xs="http://www.w3.org/2001/XMLSchema" + xmlns:digestUtils="java:org.apache.commons.codec.digest.DigestUtils" + exclude-result-prefixes="#all" version="2.0"> - - - + + + + + + + + + + + @@ -56,9 +69,7 @@ - - - + @@ -83,12 +94,19 @@ - - + + + + diff --git a/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-remove.xsl b/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-remove.xsl index a856ed23dd1f..cd20ebb81478 100644 --- a/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-remove.xsl +++ b/schemas/iso19139/src/main/plugin/iso19139/process/thumbnail-remove.xsl @@ -25,22 +25,47 @@ - + + + - + + + + + + + + + + + + + + + - + diff --git a/schemas/iso19139/src/main/plugin/iso19139/schematron/schematron-rules-dcat-ap-hvd.sch b/schemas/iso19139/src/main/plugin/iso19139/schematron/schematron-rules-dcat-ap-hvd.sch new file mode 100644 index 000000000000..c10fc68d32c9 --- /dev/null +++ b/schemas/iso19139/src/main/plugin/iso19139/schematron/schematron-rules-dcat-ap-hvd.sch @@ -0,0 +1,432 @@ + + + + + + DCAT-AP High Value Dataset (HVD) + + + + + + + + + + + + + + + + + + + + + + + + + + Applicable legislation is mandatory. Use a keyword with an Anchor pointing to + http://data.europa.eu/eli/reg_impl/2023/138/oj. + + + La législation applicable est obligatoire. Utilisez un mot-clé avec une ancre pointant vers + http://data.europa.eu/eli/reg_impl/2023/138/oj. + + Applicable legislation keyword found. + + La législation applicable HVD est encodée. + + + + No implementing rule or other specification found. Check the data quality + report specification to add one. For INSPIRE datasets, this is a data specification conformity. + + + Aucune règle d'implémentation ou autre spécification n'a été trouvée. Vérifiez la spécification du rapport de + qualité des données + pour en ajouter une. Pour les ensembles de données INSPIRE, il s'agit d'une conformité aux spécifications des + données. + + + Implementing rules or specifications found:. + + + Règles ou spécifications encodées :. + + + + + Contact information that can be used for sending comments about the Dataset is missing. + + + Les informations de contact pouvant être utilisées pour envoyer des commentaires sur l'ensemble de données sont + manquantes. + + + Contact information that can be used for sending comments about the Dataset defined:. + + + Contact pouvant être utilisées pour envoyer des commentaires sur l'ensemble de données encodé :. + + + + + The HVD category to which this Dataset belongs is missing. + + + La catégorie HVD à laquelle appartient cet ensemble de données est manquante. + + + HVD categories found:. + + + Catégories HVD encodées :. + + + + + The HVD IR is a quality improvement of existing datasets. The intention is that HVD datasets are publicly and open + accessible. Therefore a Distribution is expected to be present. Add an online resource with a download protocol or + function. + + + Les règles d'implémentation HVD ont pour objectif une amélioration de la qualité des ensembles de données existants. + L'objectif est que les ensembles de données HVD soient accessibles au public et en libre accès. Par conséquent, une + distribution est attendue. Ajoutez une ressource en ligne avec un protocole ou une fonction de téléchargement. + + + Distribution URLs found:. + + + URL(s) de distribution encodées :. + + + + The root location or primary endpoint of the service (an IRI) is missing. Add an operation with a protocol which is + not considered as an endpoint description (ie.) or a URL containing . + + + L'URL principale du service (un IRI) est manquant. Ajoutez une opération avec un protocole qui n'est pas une + description de service + (ie.) ou une URL contenant . + + + End point URL found:. + + + URL(s) du service encodées :. + + + + An API in the context of HVD is not a standalone resource. It is used to open up HVD datasets. Therefore each Data + Service is at least tightly connected with a Dataset. + Add at least one operatesOn element with a xlink:href or uuidref. + + + Une API dans le contexte de HVD n'est pas une ressource autonome. Elle est utilisée pour ouvrir des ensembles de + données HVD. Par conséquent, chaque service de données est au moins étroitement lié à un ensemble de données. + Ajoutez au moins un élément operateOn avec un xlink:href ou un uuidref. + + + Operates on dataset found:. + + + Données associées encodées :. + + + + + A page that provides additional information about the Data Service is missing. + Add at least one online resource with a function documentation, an additional documentation or a URL pointing to https://directory.spatineo.com. + + + Il manque une page qui fournit des informations supplémentaires sur le service de données. + Ajoutez au moins une ressource en ligne avec une function documentation, une documentation supplémentaire ou une URL pointant vers https://directory.spatineo.com. + + + Documentation pages found:. + + + Documentations encodées :. + + + + + + HVD + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/iso19139/src/main/resources/config-spring-geonetwork.xml b/schemas/iso19139/src/main/resources/config-spring-geonetwork.xml index 2f93be956973..97e8f27ea919 100644 --- a/schemas/iso19139/src/main/resources/config-spring-geonetwork.xml +++ b/schemas/iso19139/src/main/resources/config-spring-geonetwork.xml @@ -31,6 +31,21 @@ + + + + + + + + + + + + + + + gmd:identificationInfo/*/gmd:citation/*/gmd:title/gco:CharacterString diff --git a/schemas/pom.xml b/schemas/pom.xml index a53759dfc16b..721025696623 100644 --- a/schemas/pom.xml +++ b/schemas/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/schemas/schema-core/pom.xml b/schemas/schema-core/pom.xml index 2404ad904d8b..de7242be3359 100644 --- a/schemas/schema-core/pom.xml +++ b/schemas/schema-core/pom.xml @@ -28,7 +28,7 @@ gn-schemas org.geonetwork-opensource.schemas - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 diff --git a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/CSWPlugin.java b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/CSWPlugin.java index 02d8a1d8dfbc..260acdb957ae 100644 --- a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/CSWPlugin.java +++ b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/CSWPlugin.java @@ -23,7 +23,6 @@ package org.fao.geonet.kernel.schema; -import org.jdom.Element; import org.jdom.Namespace; import java.util.Map; @@ -33,4 +32,5 @@ public interface CSWPlugin { * Return the list of typenames and corresponding namespace for the plugin. */ Map getCswTypeNames(); + } diff --git a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/MultilingualSchemaPlugin.java b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/MultilingualSchemaPlugin.java index 511e51b10964..effd23e06aa5 100644 --- a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/MultilingualSchemaPlugin.java +++ b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/MultilingualSchemaPlugin.java @@ -26,6 +26,7 @@ import org.jdom.Element; import org.jdom.JDOMException; +import java.util.ArrayList; import java.util.List; /** @@ -40,8 +41,66 @@ public interface MultilingualSchemaPlugin { */ public abstract List getTranslationForElement(Element element, String languageIdentifier); + /** + * Updates an element with the related multilingual information using the language and value provided. + * + * @param element XML element to update. + * @param languageIdentifier Language identifier. + * @param value Value for the element. + */ public abstract void addTranslationToElement(Element element, String languageIdentifier, String value); + /** + * Remove all multilingual aspect of an element. + * + * @param element XML element to update. + * @param mdLang Metadata languages. + * @return + * @throws JDOMException + */ public abstract Element removeTranslationFromElement(Element element, List mdLang) throws JDOMException; + /** + * Retrieves the list of metadata languages used in the metadata. + * @param metadata + * @return + */ + public abstract List getMetadataLanguages(Element metadata); + + /** + * Checks if an element type is multilingual. For example, in DCAT schema, rdf:PlainLiteral type. + * + * @param elementType Element type to check. + * @return true if the element type is multilingual, otherwise false. + */ + public abstract boolean isMultilingualElementType(String elementType); + + + /** + * Flag to indicate when adding an element to the metadata editor, if should be duplicated for each metadata language. + * For example, in DCAT schema adding vcard:organization-name in a metadata that has English and French languages + * (similar case for Dublin Core), should duplicate the element for each language: + * + * + * + * + * For ISO profiles should be set to false, as the multilingual elements are not duplicated. Multilingual values + * are added as children elements, requiring a different processing. Adding gmd:organisationName in a metadata + * that has English and French languages: + * + * + * + * + * + * + * + * + * + * + * + * + * + * @return + */ + public abstract boolean duplicateElementsForMultilingual(); } diff --git a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/SchemaPlugin.java b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/SchemaPlugin.java index a2396ae937a6..605358b1a45f 100644 --- a/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/SchemaPlugin.java +++ b/schemas/schema-core/src/main/java/org/fao/geonet/kernel/schema/SchemaPlugin.java @@ -29,17 +29,18 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Set; +import java.util.*; -/** - * Created by francois on 6/16/14. - */ public abstract class SchemaPlugin implements CSWPlugin { public static final String LOGGER_NAME = "geonetwork.schema-plugin"; + /** + * List of output schemas supported by the CSW for this plugin. + * The key correspond to the XSLT filename to use for the corresponding value (usually URI). + * XSLT are in the folder present/csw/{key-?(brief|summary|full)?}.xsl + */ + private Map outputSchemas = new HashMap<>(); + protected SchemaPlugin(String identifier, ImmutableSet allNamespaces) { this.identifier = identifier; @@ -126,4 +127,12 @@ public List getAnalyzedLinks() { public Element processElement(Element el, String attributeName, String parsedAttributeName, String attributeValue) { return el; }; + + public Map getOutputSchemas() { + return outputSchemas; + } + + public void setOutputSchemas(Map outputSchemas) { + this.outputSchemas = outputSchemas; + } } diff --git a/sde/pom.xml b/sde/pom.xml index 4d0e33f5c8b6..2b30169e4efb 100644 --- a/sde/pom.xml +++ b/sde/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT diff --git a/services/pom.xml b/services/pom.xml index e0ccd7873f1a..c640bb577a3e 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT 4.0.0 @@ -99,8 +99,11 @@ gn-translationproviders ${project.version} - - + + ${project.groupId} + gn-auditable + ${project.version} + org.mockito mockito-all @@ -116,6 +119,11 @@ org.springframework spring-test + + org.xmlunit + xmlunit-core + test + com.h2database h2 @@ -232,11 +240,14 @@ powermock-api-mockito test - org.apache.commons commons-csv + + commons-codec + commons-codec + co.elastic.clients elasticsearch-java diff --git a/services/src/main/java/org/fao/geonet/api/ApiUtils.java b/services/src/main/java/org/fao/geonet/api/ApiUtils.java index 1c11fc79ba46..b3bcd593a560 100644 --- a/services/src/main/java/org/fao/geonet/api/ApiUtils.java +++ b/services/src/main/java/org/fao/geonet/api/ApiUtils.java @@ -259,11 +259,19 @@ public static Path downloadUrlInTemp(String url) throws IOException, URISyntaxEx } /** - * Check if the current user can edit this record. + * Check if the current user can edit this record */ public static AbstractMetadata canEditRecord(String metadataUuid, HttpServletRequest request) throws Exception { + return canEditRecord(metadataUuid, false, request); + } + + /** + * Check if the current user can edit this record. + */ + public static AbstractMetadata canEditRecord(String metadataUuid, boolean approved, HttpServletRequest request) throws Exception { ApplicationContext appContext = ApplicationContextHolder.get(); - AbstractMetadata metadata = getRecord(metadataUuid); + String metadataId = getInternalId(metadataUuid, approved); + AbstractMetadata metadata = getRecord(metadataId); AccessManager accessManager = appContext.getBean(AccessManager.class); if (!accessManager.canEdit(createServiceContext(request), String.valueOf(metadata.getId()))) { throw new SecurityException(String.format( @@ -297,8 +305,7 @@ public static AbstractMetadata canViewRecord(String metadataUuid, HttpServletReq * Check if the current user can view this record. */ public static AbstractMetadata canViewRecord(String metadataUuid, boolean approved, HttpServletRequest request) throws Exception { - String metadataId; - metadataId = getInternalId(metadataUuid, approved); + String metadataId = getInternalId(metadataUuid, approved); AbstractMetadata metadata = getRecord(metadataId); try { diff --git a/services/src/main/java/org/fao/geonet/api/GlobalExceptionController.java b/services/src/main/java/org/fao/geonet/api/GlobalExceptionController.java index e7a89ad1ec96..2c4e1b80be94 100644 --- a/services/src/main/java/org/fao/geonet/api/GlobalExceptionController.java +++ b/services/src/main/java/org/fao/geonet/api/GlobalExceptionController.java @@ -35,6 +35,7 @@ import org.fao.geonet.exceptions.UserNotFoundEx; import org.fao.geonet.exceptions.XSDValidationErrorEx; import org.fao.geonet.inspire.validator.InspireValidatorException; +import org.fao.geonet.util.FileUtil; import org.fao.geonet.utils.Log; import org.json.JSONException; import org.springframework.beans.factory.annotation.Autowired; @@ -148,15 +149,47 @@ public Object securityHandler(final HttpServletRequest request, final Exception } @ResponseBody - @ResponseStatus(HttpStatus.BAD_REQUEST) + @ResponseStatus(HttpStatus.PAYLOAD_TOO_LARGE) @ApiResponse(content = {@Content(mediaType = APPLICATION_JSON_VALUE)}) @ExceptionHandler({ MaxUploadSizeExceededException.class }) - public ApiError maxFileExceededHandler(final Exception exception) { - storeApiErrorCause(exception); + public ApiError maxFileExceededHandler(final Exception exception, final HttpServletRequest request) { + Exception ex; + // Convert exception to a localized exception so that it can be translated. + if (exception instanceof MaxUploadSizeExceededException) { + long maxUploadSize = ((MaxUploadSizeExceededException) exception).getMaxUploadSize(); + long contentLength = exception instanceof InputStreamLimitExceededException ? + ((InputStreamLimitExceededException) exception).getRemoteFileSize() : + request.getContentLengthLong(); + + // This can occur if the content length header is present on a resource but does not reflect the actual file size. + // This could indicate an attempt to bypass the maximum upload size. + if (contentLength > 0 && contentLength < maxUploadSize) { + Log.warning(Geonet.RESOURCES, "Request content length is less than the maximum upload size but still caused an exception."); + } + + if (contentLength > maxUploadSize) { + ex = new GeonetMaxUploadSizeExceededException("uploadedResourceSizeExceededException", exception) + .withMessageKey("exception.maxUploadSizeExceeded", + new String[]{FileUtil.humanizeFileSize(maxUploadSize)}) + .withDescriptionKey("exception.maxUploadSizeExceeded.description", + new String[]{FileUtil.humanizeFileSize(contentLength), + FileUtil.humanizeFileSize(maxUploadSize)}); + } else { + ex = new GeonetMaxUploadSizeExceededException("uploadedResourceSizeExceededException", exception) + .withMessageKey("exception.maxUploadSizeExceeded", + new String[]{FileUtil.humanizeFileSize(maxUploadSize)}) + .withDescriptionKey("exception.maxUploadSizeExceededUnknownSize.description", + new String[]{FileUtil.humanizeFileSize(maxUploadSize)}); + } + } else { + ex = exception; + } + + storeApiErrorCause(ex); - return new ApiError("max_file_exceeded", exception); + return new ApiError("max_file_exceeded", ex); } @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/api/auditable/AuditableApi.java b/services/src/main/java/org/fao/geonet/api/auditable/AuditableApi.java new file mode 100644 index 000000000000..880c6e48ec87 --- /dev/null +++ b/services/src/main/java/org/fao/geonet/api/auditable/AuditableApi.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.auditable; + +import javax.servlet.ServletRequest; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.fao.geonet.auditable.BaseAuditableService; +import org.fao.geonet.auditable.model.RevisionInfo; +import org.fao.geonet.domain.auditable.AuditableEntity; +import org.springframework.beans.factory.ListableBeanFactory; +import org.springframework.http.HttpStatus; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + + +@RequestMapping(value = { + "/{portal}/api/auditable" +}) +@Tag(name = "auditable", + description = "Entity auditable operations") +@RestController("auditable") +public class AuditableApi { + + // Auditable service beans + private final Map> factory = new HashMap<>(); + + public AuditableApi(ListableBeanFactory beanFactory) { + Collection auditableServiceBeans = beanFactory.getBeansOfType(BaseAuditableService.class).values(); + auditableServiceBeans.forEach(filter -> factory.put(filter.getEntityType().toLowerCase(), filter)); + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Get an entity history", + description = "") + @GetMapping( + value = "/{entityType}/{entityIdentifier}" + ) + @ResponseStatus(HttpStatus.OK) + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "Entity history details.") + }) + @PreAuthorize("hasAuthority('UserAdmin')") + public List getEntityHistory( + @Parameter( + description = "Entity type", + required = true + ) + @PathVariable + String entityType, + @Parameter( + description = "Entity identifier", + required = true + ) + @PathVariable + Integer entityIdentifier + ) { + BaseAuditableService service = factory.get(entityType); + return service.getEntityHistory(entityIdentifier); + } +} diff --git a/services/src/main/java/org/fao/geonet/api/categories/TagsApi.java b/services/src/main/java/org/fao/geonet/api/categories/TagsApi.java index d87bab7908a6..3447a171fac1 100644 --- a/services/src/main/java/org/fao/geonet/api/categories/TagsApi.java +++ b/services/src/main/java/org/fao/geonet/api/categories/TagsApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.categories; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -180,7 +182,7 @@ public org.fao.geonet.domain.MetadataCategory getTag( @PreAuthorize("hasAuthority('UserAdmin')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Tag updated."), + @ApiResponse(responseCode = "204", description = "Tag updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @ResponseBody @@ -239,7 +241,7 @@ private void updateCategory( method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Tag removed."), + @ApiResponse(responseCode = "204", description = "Tag removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @PreAuthorize("hasAuthority('UserAdmin')") diff --git a/services/src/main/java/org/fao/geonet/api/doiservers/DoiServersApi.java b/services/src/main/java/org/fao/geonet/api/doiservers/DoiServersApi.java new file mode 100644 index 000000000000..68f248c0ffa8 --- /dev/null +++ b/services/src/main/java/org/fao/geonet/api/doiservers/DoiServersApi.java @@ -0,0 +1,327 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.doiservers; + +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.fao.geonet.api.ApiParams; +import org.fao.geonet.api.doiservers.model.AnonymousDoiServer; +import org.fao.geonet.api.doiservers.model.DoiServerDto; +import org.fao.geonet.api.exception.ResourceNotFoundException; +import org.fao.geonet.domain.AbstractMetadata; +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.kernel.datamanager.IMetadataUtils; +import org.fao.geonet.repository.DoiServerRepository; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.*; + +import java.util.*; +import java.util.stream.Collectors; + +@RequestMapping(value = { + "/{portal}/api/doiservers" +}) +@Tag(name = "doiservers", + description = "DOI servers related operations") +@RestController("doiservers") +public class DoiServersApi { + private static final String API_PARAM_DOISERVER_IDENTIFIER = "DOI server identifier"; + + private static final String API_PARAM_DOISERVER_DETAILS = "DOI server details"; + + public static final String MSG_DOISERVER_WITH_ID_NOT_FOUND = "DOI server with id '%s' not found."; + + + private final DoiServerRepository doiServerRepository; + + private final IMetadataUtils metadataUtils; + + DoiServersApi(final DoiServerRepository doiServerRepository, final IMetadataUtils metadataUtils) { + this.doiServerRepository = doiServerRepository; + this.metadataUtils = metadataUtils; + + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Get DOI servers" + ) + @GetMapping( + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + public + @ResponseStatus(HttpStatus.OK) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "List of all DOI servers."), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + List getDoiServers() { + List doiServers = doiServerRepository.findAll(); + List list = new ArrayList<>(doiServers.size()); + doiServers.stream().forEach(e -> list.add(new AnonymousDoiServer(DoiServerDto.from(e)))); + return list; + } + + + @io.swagger.v3.oas.annotations.Operation( + summary = "Get DOI servers that can be used with a metadata" + ) + @GetMapping(value = "metadata/{metadataId}", + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + public + @ResponseStatus(HttpStatus.OK) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "List of all DOI servers where a metadata can be published."), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + List getDoiServers( + @Parameter(description = "Metadata UUID", + required = true, + example = "") + @PathVariable Integer metadataId) { + + List doiServers = doiServerRepository.findAll(); + List list = new ArrayList<>(doiServers.size()); + + AbstractMetadata metadata = metadataUtils.findOne(metadataId); + Integer groupOwner = metadata.getSourceInfo().getGroupOwner(); + + // Find servers related to the metadata groups owner + List doiServersForMetadata = doiServers.stream().filter( + s -> s.getPublicationGroups().stream().anyMatch(g -> g.getId() == groupOwner)).collect(Collectors.toList()); + + if (doiServersForMetadata.isEmpty()) { + // If no servers related to the metadata groups owner, + // find the servers that are not related to any metadata group + doiServersForMetadata = doiServers.stream() + .filter(s -> s.getPublicationGroups().isEmpty()) + .collect(Collectors.toList()); + } + + doiServersForMetadata.forEach(s -> { + DoiServerDto doiServerDto = DoiServerDto.from(s); + list.add(new AnonymousDoiServer(doiServerDto)); + }); + + Collections.sort(list, Comparator.comparing(DoiServerDto::getName)); + + return list; + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Get a DOI Server" + ) + @GetMapping(value = "/{doiServerId}", + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_EDITOR) + }) + public AnonymousDoiServer getDoiServer( + @Parameter(description = API_PARAM_DOISERVER_IDENTIFIER, + required = true, + example = "") + @PathVariable String doiServerId + ) throws ResourceNotFoundException { + Optional doiServerOpt = doiServerRepository.findOneById(Integer.parseInt(doiServerId)); + if (doiServerOpt.isEmpty()) { + throw new ResourceNotFoundException(String.format( + MSG_DOISERVER_WITH_ID_NOT_FOUND, + doiServerId + )); + } else { + return new AnonymousDoiServer(DoiServerDto.from(doiServerOpt.get())); + } + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Add a DOI server", + description = "Return the id of the newly created DOI server." + ) + @PutMapping( + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "201", description = "DOI server created."), + @ApiResponse(responseCode = "400", description = "Bad parameters."), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + @ResponseStatus(HttpStatus.CREATED) + public ResponseEntity addDoiServer( + @Parameter( + description = API_PARAM_DOISERVER_DETAILS, + required = true + ) + @RequestBody + DoiServerDto doiServerDto + ) { + Optional existingDoiServerOpt = doiServerRepository.findOneById(doiServerDto.getId()); + if (existingDoiServerOpt.isPresent()) { + throw new IllegalArgumentException(String.format( + "DOI server with id '%d' already exists.", + doiServerDto.getId() + )); + } else { + DoiServer doiServer = doiServerDto.asDoiServer(); + doiServerRepository.save(doiServer); + + return new ResponseEntity<>(doiServer.getId(), HttpStatus.CREATED); + } + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Update a DOI server" + ) + @PutMapping( + value = "/{doiServerId}", + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "204", description = "DOI server updated."), + @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + @ResponseStatus(HttpStatus.NO_CONTENT) + public void updateDoiServer( + @Parameter(description = API_PARAM_DOISERVER_IDENTIFIER, + required = true, + example = "") + @PathVariable Integer doiServerId, + @Parameter(description = API_PARAM_DOISERVER_DETAILS, + required = true) + @RequestBody + DoiServerDto doiServerDto + ) throws ResourceNotFoundException { + Optional existingMapserverOpt = doiServerRepository.findOneById(doiServerId); + if (existingMapserverOpt.isPresent()) { + DoiServer doiServer = doiServerDto.asDoiServer(); + + doiServerRepository.update(doiServerId, entity -> { + entity.setName(doiServer.getName()); + entity.setDescription(doiServer.getDescription()); + entity.setUrl(doiServer.getUrl()); + entity.setUsername(doiServer.getUsername()); + entity.setPublicUrl(doiServer.getPublicUrl()); + entity.setLandingPageTemplate(doiServer.getLandingPageTemplate()); + entity.setPattern(doiServer.getPattern()); + entity.setPrefix(doiServer.getPrefix()); + entity.setPublicationGroups(doiServer.getPublicationGroups()); + }); + } else { + throw new ResourceNotFoundException(String.format( + MSG_DOISERVER_WITH_ID_NOT_FOUND, + doiServerId + )); + } + } + + @io.swagger.v3.oas.annotations.Operation( + summary = "Remove a DOI server" + ) + @DeleteMapping( + value = "/{doiServerId}", + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "204", description = "DOI server removed."), + @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + @ResponseStatus(HttpStatus.NO_CONTENT) + public void deleteMapserver( + @Parameter(description = API_PARAM_DOISERVER_IDENTIFIER, + required = true + ) + @PathVariable Integer doiServerId + ) throws ResourceNotFoundException { + Optional existingMapserverOpt = doiServerRepository.findOneById(doiServerId); + if (existingMapserverOpt.isPresent()) { + doiServerRepository.delete(existingMapserverOpt.get()); + } else { + throw new ResourceNotFoundException(String.format( + MSG_DOISERVER_WITH_ID_NOT_FOUND, + doiServerId + )); + } + } + + + @io.swagger.v3.oas.annotations.Operation( + summary = "Update a DOI server authentication" + ) + @PostMapping( + value = "/{doiServerId}/auth", + produces = { + MediaType.APPLICATION_JSON_VALUE + }) + @PreAuthorize("hasAuthority('Administrator')") + @ApiResponses(value = { + @ApiResponse(responseCode = "204", description = "DOI server updated."), + @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) + }) + @ResponseStatus(HttpStatus.NO_CONTENT) + public void updateDoiServerAuth( + @Parameter( + description = API_PARAM_DOISERVER_IDENTIFIER, + required = true, + example = "") + @PathVariable Integer doiServerId, + @Parameter( + description = "Password", + required = true) + @RequestParam + String password + ) throws ResourceNotFoundException { + Optional existingMapserverOpt = doiServerRepository.findOneById(doiServerId); + if (existingMapserverOpt.isPresent()) { + doiServerRepository.update(doiServerId, entity -> { + entity.setPassword(password); + }); + } else { + throw new ResourceNotFoundException(String.format( + MSG_DOISERVER_WITH_ID_NOT_FOUND, + doiServerId + )); + } + } +} diff --git a/services/src/main/java/org/fao/geonet/api/doiservers/model/AnonymousDoiServer.java b/services/src/main/java/org/fao/geonet/api/doiservers/model/AnonymousDoiServer.java new file mode 100644 index 000000000000..7760a19b9324 --- /dev/null +++ b/services/src/main/java/org/fao/geonet/api/doiservers/model/AnonymousDoiServer.java @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.doiservers.model; + +public class AnonymousDoiServer extends DoiServerDto { + + public AnonymousDoiServer(DoiServerDto doiServer) { + super(); + this + .setId(doiServer.getId()) + .setName(doiServer.getName()) + .setUsername(doiServer.getUsername()) + .setDescription(doiServer.getDescription()) + .setUrl(doiServer.getUrl()) + .setLandingPageTemplate(doiServer.getLandingPageTemplate()) + .setPattern(doiServer.getPattern()) + .setPublicUrl(doiServer.getPublicUrl()) + .setPrefix(doiServer.getPrefix()) + .setPublicationGroups(doiServer.getPublicationGroups()); + } + + @Override + public String getPassword() { + return "***"; + } +} diff --git a/services/src/main/java/org/fao/geonet/api/doiservers/model/DoiServerDto.java b/services/src/main/java/org/fao/geonet/api/doiservers/model/DoiServerDto.java new file mode 100644 index 000000000000..f20514181c17 --- /dev/null +++ b/services/src/main/java/org/fao/geonet/api/doiservers/model/DoiServerDto.java @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.doiservers.model; + +import org.fao.geonet.ApplicationContextHolder; +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.domain.Group; +import org.fao.geonet.repository.GroupRepository; + +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +public class DoiServerDto { + private int id; + private String name; + private String description; + private String url; + private String username; + private String password; + private String landingPageTemplate; + private String publicUrl; + private String pattern = "{{uuid}}"; + private String prefix; + private Set publicationGroups = new HashSet<>(); + + + public int getId() { + return id; + } + + public DoiServerDto setId(int id) { + this.id = id; + return this; + } + + public String getName() { + return name; + } + + public DoiServerDto setName(String name) { + this.name = name; + return this; + } + + public String getDescription() { + return description; + } + + public DoiServerDto setDescription(String description) { + this.description = description; + return this; + } + + public String getUrl() { + return url; + } + + public DoiServerDto setUrl(String url) { + this.url = url; + return this; + } + + public String getUsername() { + return username; + } + + public DoiServerDto setUsername(String username) { + this.username = username; + return this; + } + + public String getPassword() { + return password; + } + + public DoiServerDto setPassword(String password) { + this.password = password; + return this; + } + + public String getLandingPageTemplate() { + return landingPageTemplate; + } + + public DoiServerDto setLandingPageTemplate(String landingPageTemplate) { + this.landingPageTemplate = landingPageTemplate; + return this; + } + + public String getPublicUrl() { + return publicUrl; + } + + public DoiServerDto setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + return this; + } + + public String getPattern() { + return pattern; + } + + public DoiServerDto setPattern(String pattern) { + this.pattern = pattern; + return this; + } + + public String getPrefix() { + return prefix; + } + + public DoiServerDto setPrefix(String prefix) { + this.prefix = prefix; + return this; + } + + public Set getPublicationGroups() { + return publicationGroups; + } + + public DoiServerDto setPublicationGroups(Set publicationGroups) { + this.publicationGroups = publicationGroups; + return this; + } + + public static DoiServerDto from(DoiServer doiServer) { + DoiServerDto doiServerDto = new DoiServerDto(); + + doiServerDto.setId(doiServer.getId()); + doiServerDto.setName(doiServer.getName()); + doiServerDto.setDescription(doiServer.getDescription()); + doiServerDto.setUrl(doiServer.getUrl()); + doiServerDto.setUsername(doiServer.getUsername()); + doiServerDto.setPassword(doiServer.getPassword()); + doiServerDto.setPattern(doiServer.getPattern()); + doiServerDto.setLandingPageTemplate(doiServer.getLandingPageTemplate()); + doiServerDto.setPublicUrl(doiServer.getPublicUrl()); + doiServerDto.setPrefix(doiServer.getPrefix()); + doiServerDto.setPublicationGroups(doiServer.getPublicationGroups().stream().map(Group::getId).collect(Collectors.toSet())); + + return doiServerDto; + } + + public DoiServer asDoiServer() { + DoiServer doiServer = new DoiServer(); + + doiServer.setId(getId()); + doiServer.setName(getName()); + doiServer.setDescription(getDescription()); + doiServer.setUrl(getUrl()); + doiServer.setUsername(getUsername()); + doiServer.setPassword(getPassword()); + doiServer.setPattern(getPattern()); + doiServer.setLandingPageTemplate(getLandingPageTemplate()); + doiServer.setPublicUrl(getPublicUrl()); + doiServer.setPrefix(getPrefix()); + + GroupRepository groupRepository = ApplicationContextHolder.get().getBean(GroupRepository.class); + Set groups = new HashSet<>(); + getPublicationGroups().forEach(groupId -> { + if (groupId != null) { + Optional g = groupRepository.findById(groupId); + + if (g.isPresent()) { + groups.add(g.get()); + } + } + }); + doiServer.setPublicationGroups(groups); + + return doiServer; + } +} diff --git a/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java b/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java index a2a0cd1bcb2d..dca972556b78 100644 --- a/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java +++ b/services/src/main/java/org/fao/geonet/api/es/EsHTTPProxy.java @@ -301,7 +301,7 @@ private static boolean hasOperation(ObjectNode doc, ReservedGroup group, Reserve @ResponseStatus(value = HttpStatus.OK) @ResponseBody public void search( - @RequestParam(defaultValue = SelectionManager.SELECTION_METADATA) + @RequestParam(defaultValue = SelectionManager.SELECTION_BUCKET) String bucket, @Parameter(description = "Type of related resource. If none, no associated resource returned.", required = false @@ -330,11 +330,15 @@ public void search( description = "The multi search API executes several searches from a single API request. See https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html for search parameters, and https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html Query DSL.") @RequestMapping(value = "/search/records/_msearch", method = RequestMethod.POST, - produces = MediaType.APPLICATION_JSON_VALUE, - consumes = MediaType.APPLICATION_JSON_VALUE) + produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_NDJSON_VALUE}, + consumes = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_NDJSON_VALUE}) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Search results.", - content = @Content(mediaType = MediaType.APPLICATION_JSON_VALUE, schema = @Schema(type = "string"))) + content = { + @Content(mediaType = MediaType.APPLICATION_JSON_VALUE, schema = @Schema(type = "string")), + @Content(mediaType = MediaType.APPLICATION_NDJSON_VALUE, schema = @Schema(type = "string")) + } + ) }) @ResponseStatus(value = HttpStatus.OK) @ResponseBody @@ -383,7 +387,7 @@ public void msearch( @PreAuthorize("hasAuthority('Administrator')") @ResponseBody public void call( - @RequestParam(defaultValue = SelectionManager.SELECTION_METADATA) + @RequestParam(defaultValue = SelectionManager.SELECTION_BUCKET) String bucket, @Parameter(description = "'_search' for search service.") @PathVariable String endPoint, diff --git a/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java b/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java index 0b0fb4980d2a..2e88e8a42bc2 100644 --- a/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java +++ b/services/src/main/java/org/fao/geonet/api/groups/GroupsApi.java @@ -26,11 +26,14 @@ import com.google.common.base.Functions; import com.google.common.collect.Lists; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jeeves.server.UserSession; import jeeves.server.context.ServiceContext; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; @@ -45,8 +48,10 @@ import org.fao.geonet.api.tools.i18n.TranslationPackBuilder; import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.*; +import org.fao.geonet.domain.page.Page; import org.fao.geonet.kernel.DataManager; import org.fao.geonet.repository.*; +import org.fao.geonet.repository.page.PageRepository; import org.fao.geonet.repository.specification.GroupSpecs; import org.fao.geonet.repository.specification.MetadataSpecs; import org.fao.geonet.repository.specification.OperationAllowedSpecs; @@ -77,6 +82,7 @@ import java.nio.file.attribute.FileTime; import java.sql.SQLException; import java.util.*; +import java.util.stream.Collectors; import static org.springframework.data.jpa.domain.Specification.where; @@ -149,6 +155,9 @@ public class GroupsApi { @Autowired private MetadataRepository metadataRepository; + @Autowired + private PageRepository pageRepository; + private static Resources.ResourceHolder getImage(Resources resources, ServiceContext serviceContext, Group group) throws IOException { final Path logosDir = resources.locateLogosDir(serviceContext); final Path harvesterLogosDir = resources.locateHarvesterLogosDir(serviceContext); @@ -430,7 +439,7 @@ public List getGroupUsers( @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('UserAdmin')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Group updated."), + @ApiResponse(responseCode = "204", description = "Group updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @@ -485,7 +494,7 @@ public void updateGroup( @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Group removed."), + @ApiResponse(responseCode = "204", description = "Group removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @@ -541,6 +550,19 @@ public void deleteGroup( )); } + List staticPages = pageRepository.findPageByStatus(Page.PageStatus.GROUPS); + List staticPagesAssignedToGroup = + staticPages.stream().filter(p -> + !p.getGroups().stream().filter(g -> g.getId() == groupIdentifier).collect(Collectors.toList()).isEmpty()) + .collect(Collectors.toList()); + + if (!staticPagesAssignedToGroup.isEmpty()) { + throw new NotAllowedException(String.format( + "Group %s is associated with '%s' static page(s). Please remove the static page(s) associated with that group first.", + group.get().getName(), staticPagesAssignedToGroup.stream().map(p -> p.getLabel()).collect(Collectors.joining()) + )); + } + groupRepository.deleteById(groupIdentifier); translationPackBuilder.clearCache(); diff --git a/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java b/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java index ca2b45d30f73..1e6973dde27c 100644 --- a/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java +++ b/services/src/main/java/org/fao/geonet/api/harvesting/HarvestersApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.harvesting; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -98,7 +100,7 @@ public class HarvestersApi { @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('UserAdmin')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Harvester records transfered to new source."), + @ApiResponse(responseCode = "204", description = "Harvester records transfered to new source.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) diff --git a/services/src/main/java/org/fao/geonet/api/identifiers/IdentifiersApi.java b/services/src/main/java/org/fao/geonet/api/identifiers/IdentifiersApi.java index ddf0c3be3575..b658f37e006f 100644 --- a/services/src/main/java/org/fao/geonet/api/identifiers/IdentifiersApi.java +++ b/services/src/main/java/org/fao/geonet/api/identifiers/IdentifiersApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.identifiers; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -61,9 +63,9 @@ public class IdentifiersApi { @io.swagger.v3.oas.annotations.Operation( summary = "Get identifier templates", description = "Identifier templates are used to create record UUIDs " + - "havind a particular structure. The template will be used " + - "when user creates a new record. The template identifier to " + - "use is defined in the administration > settings." + "having a particular structure. The template will be used " + + "when user creates a new record. The identifier template to " + + "use is defined in the admin console > metadata and templates." // authorizations = { // @Authorization(value = "basicAuth") // }) @@ -153,7 +155,7 @@ public ResponseEntity addIdentifier( ) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Identifier template updated."), + @ApiResponse(responseCode = "204", description = "Identifier template updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Resource not found."), @ApiResponse(responseCode = "403", description = "Operation not allowed. Only Editor can access it.") }) @@ -198,7 +200,7 @@ public void updateIdentifier( ) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Template identifier removed."), + @ApiResponse(responseCode = "204", description = "Template identifier removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Resource not found."), @ApiResponse(responseCode = "403", description = "Operation not allowed. Only Editor can access it.") }) diff --git a/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java b/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java index e62541f05a45..c9cbcc7d59dc 100644 --- a/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java +++ b/services/src/main/java/org/fao/geonet/api/languages/LanguagesApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.languages; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -185,7 +187,7 @@ public void addLanguages( @PreAuthorize("hasAuthority('Administrator')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Language translations removed."), + @ApiResponse(responseCode = "204", description = "Language translations removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Resource not found."), @ApiResponse(responseCode = "403", description = "Operation not allowed. Only Administrator can access it.") }) diff --git a/services/src/main/java/org/fao/geonet/api/mapservers/GeoServerRest.java b/services/src/main/java/org/fao/geonet/api/mapservers/GeoServerRest.java index a6f4d19a4462..d7d347cbd4f3 100644 --- a/services/src/main/java/org/fao/geonet/api/mapservers/GeoServerRest.java +++ b/services/src/main/java/org/fao/geonet/api/mapservers/GeoServerRest.java @@ -346,11 +346,14 @@ public boolean createDatastore(String ws, String ds, String file) throws IOExcep } else if (file.startsWith("file://")) { type = "external"; } + boolean isZip = ".zip".equals(extension); Log.debug(Geonet.GEOPUBLISH, "Creating datastore " + ds + " in workspace " + ws + " from file " + file); - int status = sendREST(GeoServerRest.METHOD_PUT, "/workspaces/" + ws - + "/datastores/" + ds + "/" + type + extension, file, null, - "text/plain", false); + int status = sendREST(GeoServerRest.METHOD_PUT, + "/workspaces/" + ws + "/datastores/" + ds + "/" + type + (isZip ? ".shp" : extension), + file, null, + (isZip ? "application/zip" : "text/plain"), + false); return status == 201; } @@ -473,18 +476,20 @@ public boolean createStyle(String ws, String layer, String sldbody) { } if (sldbody.isEmpty() || (!sldbody.isEmpty() && status != 200)) { String info = getLayerInfo(layer); - Element layerProperties = Xml.loadString(info, false); - String styleName = layerProperties.getChild("defaultStyle") - .getChild("name").getText(); - - Log.debug(Geonet.GEOPUBLISH, "Getting default style for " + styleName + " to apply to layer " + layer + " in workspace " + ws); - /* get the default style (polygon, line, point) from the global styles */ - status = sendREST(GeoServerRest.METHOD_GET, "/styles/" + styleName - + ".sld?quietOnNotFound=true", null, null, null, true); - - status = sendREST(GeoServerRest.METHOD_PUT, url + "/" + layer - + "_style", getResponse(), null, - "application/vnd.ogc.sld+xml", true); + if (info != null) { + Element layerProperties = Xml.loadString(info, false); + String styleName = layerProperties.getChild("defaultStyle") + .getChild("name").getText(); + + Log.debug(Geonet.GEOPUBLISH, "Getting default style for " + styleName + " to apply to layer " + layer + " in workspace " + ws); + /* get the default style (polygon, line, point) from the global styles */ + status = sendREST(GeoServerRest.METHOD_GET, "/styles/" + styleName + + ".sld?quietOnNotFound=true", null, null, null, true); + + status = sendREST(GeoServerRest.METHOD_PUT, url + "/" + layer + + "_style", getResponse(), null, + "application/vnd.ogc.sld+xml", true); + } } checkResponseCode(status); diff --git a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java index f6a862622475..db1f0814de1e 100644 --- a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java +++ b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.mapservers; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -209,7 +211,7 @@ public ResponseEntity addMapserver( }) @PreAuthorize("hasAuthority('Reviewer')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Mapserver updated."), + @ApiResponse(responseCode = "204", description = "Mapserver updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER) }) @@ -253,7 +255,7 @@ public void updateMapserver( }) @PreAuthorize("hasAuthority('Reviewer')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Mapserver updated."), + @ApiResponse(responseCode = "204", description = "Mapserver updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER) }) @@ -323,7 +325,7 @@ private void updateMapserver( }) @PreAuthorize("hasAuthority('Reviewer')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Mapserver removed."), + @ApiResponse(responseCode = "204", description = "Mapserver removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER) }) diff --git a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersUtils.java b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersUtils.java index 57c4aa03f542..83a19c251a88 100644 --- a/services/src/main/java/org/fao/geonet/api/mapservers/MapServersUtils.java +++ b/services/src/main/java/org/fao/geonet/api/mapservers/MapServersUtils.java @@ -259,7 +259,10 @@ public static boolean publishExternal(String file, GeoServerRest g, ACTION actio if (!g.deleteLayer(dsName)) report += "Layer: " + g.getStatus(); if (isRaster) { - + if (!g.deleteCoverage(dsName, dsName)) + report += "Coverage: " + g.getStatus(); + if (!g.deleteCoverageStore(dsName)) + report += "Coveragestore: " + g.getStatus(); } else { if (!g.deleteFeatureType(dsName, dsName)) report += "Feature type: " + g.getStatus(); diff --git a/services/src/main/java/org/fao/geonet/api/pages/PageProperties.java b/services/src/main/java/org/fao/geonet/api/pages/PageProperties.java index 227908a40824..d19070345c7e 100644 --- a/services/src/main/java/org/fao/geonet/api/pages/PageProperties.java +++ b/services/src/main/java/org/fao/geonet/api/pages/PageProperties.java @@ -1,11 +1,14 @@ package org.fao.geonet.api.pages; +import org.apache.commons.collections4.CollectionUtils; +import org.fao.geonet.domain.Group; import org.fao.geonet.domain.page.Page; import org.fao.geonet.domain.page.Page.PageFormat; import org.fao.geonet.domain.page.Page.PageSection; import org.fao.geonet.domain.page.Page.PageStatus; import java.io.Serializable; +import java.util.ArrayList; import java.util.List; public class PageProperties implements Serializable { @@ -21,6 +24,7 @@ public class PageProperties implements Serializable { private String label; private String icon; private Page.PageFormat format; + private List groups; private Page page; public PageProperties() { @@ -36,6 +40,12 @@ public PageProperties(Page p) { status = p.getStatus(); label = p.getLabel(); icon = p.getIcon(); + if (CollectionUtils.isNotEmpty(p.getGroups())) { + groups = new ArrayList<>(); + for (Group g : p.getGroups()) { + groups.add(g.getName()); + } + } } @Override @@ -114,4 +124,12 @@ public String getIcon() { public void setIcon(String icon) { this.icon = icon; } + + public List getGroups() { + return groups; + } + + public void setGroups(List groups) { + this.groups = groups; + } } diff --git a/services/src/main/java/org/fao/geonet/api/pages/PagesAPI.java b/services/src/main/java/org/fao/geonet/api/pages/PagesAPI.java index 491c04878e27..14da2d9e93c3 100644 --- a/services/src/main/java/org/fao/geonet/api/pages/PagesAPI.java +++ b/services/src/main/java/org/fao/geonet/api/pages/PagesAPI.java @@ -27,6 +27,7 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jeeves.server.UserSession; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; import org.fao.geonet.api.ApiParams; @@ -35,10 +36,16 @@ import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.api.exception.WebApplicationException; import org.fao.geonet.api.tools.i18n.LanguageUtils; +import org.fao.geonet.domain.Group; import org.fao.geonet.domain.Profile; +import org.fao.geonet.domain.UserGroup; import org.fao.geonet.domain.page.Page; import org.fao.geonet.domain.page.PageIdentity; +import org.fao.geonet.repository.GroupRepository; +import org.fao.geonet.repository.UserGroupRepository; import org.fao.geonet.repository.page.PageRepository; +import org.fao.geonet.repository.specification.UserGroupSpecs; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; @@ -55,8 +62,10 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; import java.util.Optional; +import java.util.Set; import static org.springframework.http.HttpHeaders.CONTENT_TYPE; @@ -75,11 +84,16 @@ public class PagesAPI { private static final String ERROR_CREATE = "Wrong parameters are provided"; private final PageRepository pageRepository; + private final GroupRepository groupRepository; + + @Autowired + UserGroupRepository userGroupRepository; private final LanguageUtils languageUtils; - public PagesAPI(PageRepository pageRepository, LanguageUtils languageUtils) { + public PagesAPI(PageRepository pageRepository, GroupRepository groupRepository, LanguageUtils languageUtils) { this.pageRepository = pageRepository; + this.groupRepository = groupRepository; this.languageUtils = languageUtils; } @@ -157,6 +171,7 @@ private ResponseEntity createPage(PageProperties pageProperties, String language = pageProperties.getLanguage(); String pageId = pageProperties.getPageId(); Page.PageFormat format = pageProperties.getFormat(); + List groups = pageProperties.getGroups(); if (language != null) { checkValidLanguage(language); @@ -182,6 +197,15 @@ private ResponseEntity createPage(PageProperties pageProperties, if (status != null) { newPage.setStatus(status); + + if (status == Page.PageStatus.GROUPS && CollectionUtils.isNotEmpty(groups)) { + Set pageGroups = new LinkedHashSet<>(); + for (String groupName : groups) { + Group group = groupRepository.findByName(groupName); + pageGroups.add(group); + } + newPage.setGroups(pageGroups); + } } pageRepository.save(newPage); @@ -203,6 +227,30 @@ private ResponseEntity updatePageInternal(@NotNull String language, String newLabel = pageProperties.getLabel(); String newIcon = pageProperties.getIcon(); + Set _groups = new LinkedHashSet<>(); + if (CollectionUtils.isNotEmpty(pageProperties.getGroups())) { + for (String groupName : pageProperties.getGroups()) { + Group group = groupRepository.findByName(groupName); + + Group groupToAdd= new Group(); + groupToAdd.setId(group.getId()); + groupToAdd.setAllowedCategories(group.getAllowedCategories()); + groupToAdd.setDescription(group.getDescription()); + groupToAdd.setEmail(group.getEmail()); + groupToAdd.setLogo(group.getLogo()); + groupToAdd.setEnableAllowedCategories(group.getEnableAllowedCategories()); + groupToAdd.setDefaultCategory(group.getDefaultCategory()); + groupToAdd.setName(group.getName()); + groupToAdd.setReferrer(group.getReferrer()); + groupToAdd.setWebsite(group.getWebsite()); + groupToAdd.setLabelTranslations(group.getLabelTranslations()); + + _groups.add(groupToAdd); + } + + } + + checkValidLanguage(language); if (newLanguage != null) { @@ -240,7 +288,8 @@ private ResponseEntity updatePageInternal(@NotNull String language, pageProperties.getSections() != null ? pageProperties.getSections() : pageToUpdate.getSections(), pageProperties.getStatus() != null ? pageProperties.getStatus() : pageToUpdate.getStatus(), newLabel != null ? newLabel : pageToUpdate.getLabel(), - newIcon != null ? newIcon : pageToUpdate.getIcon()); + newIcon != null ? newIcon : pageToUpdate.getIcon(), + CollectionUtils.isNotEmpty(_groups)? _groups: null); pageRepository.save(pageCopy); pageRepository.delete(pageToUpdate); @@ -251,7 +300,14 @@ private ResponseEntity updatePageInternal(@NotNull String language, pageToUpdate.setStatus(pageProperties.getStatus() != null ? pageProperties.getStatus() : pageToUpdate.getStatus()); pageToUpdate.setLabel(newLabel); pageToUpdate.setIcon(newIcon); + + pageToUpdate.getGroups().clear(); + if (pageToUpdate.getStatus() == Page.PageStatus.GROUPS) { + pageToUpdate.getGroups().addAll(_groups); + } + pageRepository.save(pageToUpdate); + } return ResponseEntity.noContent().build(); @@ -349,7 +405,7 @@ public ResponseEntity getPageContent( final UserSession us = ApiUtils.getUserSession(session); if (page.get().getStatus().equals(Page.PageStatus.HIDDEN) && us.getProfile() != Profile.Administrator) { return ResponseEntity.status(HttpStatus.FORBIDDEN).build(); - } else if (page.get().getStatus().equals(Page.PageStatus.PRIVATE) && (us.getProfile() == null || us.getProfile() == Profile.Guest)) { + } else if ((page.get().getStatus().equals(Page.PageStatus.PRIVATE) || page.get().getStatus().equals(Page.PageStatus.GROUPS)) && (us.getProfile() == null || us.getProfile() == Profile.Guest)) { return ResponseEntity.status(HttpStatus.FORBIDDEN).build(); } else { String content; @@ -383,6 +439,7 @@ public ResponseEntity> listPages( @Parameter(hidden = true) final HttpSession session) { final UserSession us = ApiUtils.getUserSession(session); + List unfilteredResult; if (language == null) { @@ -396,15 +453,16 @@ public ResponseEntity> listPages( for (final Page page : unfilteredResult) { if (page.getStatus().equals(Page.PageStatus.HIDDEN) && us.getProfile() == Profile.Administrator || page.getStatus().equals(Page.PageStatus.PRIVATE) && us.getProfile() != null && us.getProfile() != Profile.Guest + || page.getStatus().equals(Page.PageStatus.GROUPS) && us.getProfile() != null && us.getProfile() != Profile.Guest && checkGroupPermission(us, page) || page.getStatus().equals(Page.PageStatus.PUBLIC) || page.getStatus().equals(Page.PageStatus.PUBLIC_ONLY) && !us.isAuthenticated()) { if (section == null) { - filteredResult.add(new org.fao.geonet.api.pages.PageProperties(page)); + filteredResult.add(new PageProperties(page)); } else { final List sections = page.getSections(); final boolean containsRequestedSection = sections.contains(section); if (containsRequestedSection) { - filteredResult.add(new org.fao.geonet.api.pages.PageProperties(page)); + filteredResult.add(new PageProperties(page)); } } } @@ -497,17 +555,17 @@ private void checkValidLanguage(String language) { * @param page the page * @return the response entity */ - private ResponseEntity checkPermissionsOnSinglePageAndReturn(final HttpSession session, final Page page) { + private ResponseEntity checkPermissionsOnSinglePageAndReturn(final HttpSession session, final Page page) { if (page == null) { return new ResponseEntity<>(HttpStatus.NOT_FOUND); } else { final UserSession us = ApiUtils.getUserSession(session); if (page.getStatus().equals(Page.PageStatus.HIDDEN) && us.getProfile() != Profile.Administrator) { return new ResponseEntity<>(HttpStatus.FORBIDDEN); - } else if (page.getStatus().equals(Page.PageStatus.PRIVATE) && (us.getProfile() == null || us.getProfile() == Profile.Guest)) { + } else if ((page.getStatus().equals(Page.PageStatus.PRIVATE) || page.getStatus().equals(Page.PageStatus.GROUPS)) && (us.getProfile() == null || us.getProfile() == Profile.Guest)) { return new ResponseEntity<>(HttpStatus.FORBIDDEN); } else { - return new ResponseEntity<>(new org.fao.geonet.api.pages.PageProperties(page), HttpStatus.OK); + return new ResponseEntity<>(new PageProperties(page), HttpStatus.OK); } } } @@ -536,7 +594,7 @@ private Page searchPage(final String language, final String pageId, final PageRe */ protected Page getEmptyHiddenDraftPage(final String language, final String pageId, final String label, final String icon, final Page.PageFormat format) { final List sections = new ArrayList<>(); - return new Page(new PageIdentity(language, pageId), null, null, format, sections, Page.PageStatus.HIDDEN, label, icon); + return new Page(new PageIdentity(language, pageId), null, null, format, sections, Page.PageStatus.HIDDEN, label, icon, null); } /** @@ -569,4 +627,36 @@ private void fillContent(final MultipartFile data, } } + + /** + * Check is the user is in designated group to access the static page when page permission level is set to GROUP + * @param us Current User Session + * @param page static page object + * @return permission granted + */ + private boolean checkGroupPermission (UserSession us, Page page) { + boolean isGranted = false; + String currentUserId = us.getUserId(); + + if (us.getProfile() == Profile.Administrator) { + isGranted = true; + } else if (page.getStatus().equals(Page.PageStatus.GROUPS) && StringUtils.isNotEmpty(currentUserId)) { + List userGroups = userGroupRepository.findAll(UserGroupSpecs.hasUserId(Integer.parseInt(currentUserId))); + + Set accessingGroups = page.getGroups(); + + if (CollectionUtils.isNotEmpty(userGroups) && CollectionUtils.isNotEmpty(accessingGroups)) { + for (UserGroup userGroup : userGroups) { + for (Group group : accessingGroups) { + if (org.apache.commons.lang3.StringUtils.equals(userGroup.getGroup().getName(), group.getName())) { + isGranted = true; + break; + } + } + } + } + } + + return isGranted; + } } diff --git a/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessApi.java b/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessApi.java index afd1f15d037e..6b709470b447 100644 --- a/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessApi.java +++ b/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessApi.java @@ -92,6 +92,9 @@ public class DatabaseProcessApi { @Autowired DataManager dataMan; + @Autowired + IMetadataUtils metadataUtils; + @Autowired SchemaManager schemaMan; @@ -192,7 +195,7 @@ public ResponseEntity previewProcessSearchAndReplace( final String siteURL = request.getRequestURL().toString() + "?" + request.getQueryString(); for (String uuid : records) { - String id = dataMan.getMetadataId(uuid); + String id = String.valueOf(metadataUtils.findOneByUuid(uuid).getId()); Log.info("org.fao.geonet.services.metadata", "Processing metadata for preview with id:" + id); @@ -203,7 +206,6 @@ public ResponseEntity previewProcessSearchAndReplace( false, processingReport); if (record != null) { if (diffType != null) { - IMetadataUtils metadataUtils = serviceContext.getBean(IMetadataUtils.class); AbstractMetadata metadata = metadataUtils.findOne(id); preview.addContent( Diff.diff(metadata.getData(), Xml.getString(record), diffType)); @@ -393,7 +395,7 @@ public void process(String catalogueId) throws Exception { DataManager dataMan = context.getBean(DataManager.class); ApplicationContext appContext = ApplicationContextHolder.get(); for (String uuid : this.records) { - String id = getDataManager().getMetadataId(uuid); + String id = String.valueOf(context.getBean(IMetadataUtils.class).findOneByUuid(uuid).getId()); Log.info("org.fao.geonet.services.metadata", "Processing metadata with id:" + id); diff --git a/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessUtils.java b/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessUtils.java index 1387a2c8e870..70808011c86e 100644 --- a/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessUtils.java +++ b/services/src/main/java/org/fao/geonet/api/processing/DatabaseProcessUtils.java @@ -38,7 +38,6 @@ import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.lib.Lib; -import org.fao.geonet.repository.MetadataRepository; import org.fao.geonet.repository.MetadataValidationRepository; import org.fao.geonet.utils.Xml; import org.jdom.Element; @@ -61,7 +60,6 @@ public static Element process(ServiceContext context, String id, AccessManager accessMan = context.getBean(AccessManager.class); DataManager dataMan = context.getBean(DataManager.class); IMetadataUtils metadataUtils = context.getBean(IMetadataUtils.class); - MetadataRepository metadataRepository = context.getBean(MetadataRepository.class); report.incrementProcessedRecords(); @@ -88,11 +86,11 @@ public static Element process(ServiceContext context, String id, String updatedXml = useRegexp ? (StringUtils.isNotEmpty(flags) - ? metadataRepository.selectOneWithRegexSearchAndReplaceWithFlags( + ? metadataUtils.selectOneWithRegexSearchAndReplaceWithFlags( info.getUuid(), search, replace, flags) - : metadataRepository.selectOneWithRegexSearchAndReplace( + : metadataUtils.selectOneWithRegexSearchAndReplace( info.getUuid(), search, replace)) - : metadataRepository.selectOneWithSearchAndReplace( + : metadataUtils.selectOneWithSearchAndReplace( info.getUuid(), search, replace); // Check XML is still well formed. diff --git a/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java b/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java index 775874d572de..94a7ed8ca860 100644 --- a/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java +++ b/services/src/main/java/org/fao/geonet/api/processing/ProcessApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.processing; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -99,7 +101,7 @@ public List getProcessReport() throws Exception { MediaType.APPLICATION_JSON_VALUE }) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Report registry cleared."), + @ApiResponse(responseCode = "204", description = "Report registry cleared.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_AUTHENTICATED) }) @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java b/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java index e75476782456..08d060e3ca5d 100644 --- a/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/CatalogApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -125,8 +125,8 @@ public class CatalogApi { .add("geom") .add(SOURCE_CATALOGUE) .add(Geonet.IndexFieldNames.DATABASE_CHANGE_DATE) - .add("resourceTitleObject.default") // TODOES multilingual - .add("resourceAbstractObject.default").build(); + .add(Geonet.IndexFieldNames.RESOURCETITLE + "Object") + .add(Geonet.IndexFieldNames.RESOURCEABSTRACT + "Object").build(); } @Autowired @@ -167,7 +167,7 @@ private static String paramsAsString(Map requestParams) { StringBuilder paramNonPaging = new StringBuilder(); for (Entry pair : requestParams.entrySet()) { if (!pair.getKey().equals("from") && !pair.getKey().equals("to")) { - paramNonPaging.append(paramNonPaging.toString().equals("") ? "" : "&").append(pair.getKey()).append("=").append(pair.getValue()); + paramNonPaging.append(paramNonPaging.toString().isEmpty() ? "" : "&").append(pair.getKey()).append("=").append(pair.getValue()); } } return paramNonPaging.toString(); @@ -364,6 +364,11 @@ public void exportAsPdf( required = false ) String bucket, + @RequestParam( + required = false, + defaultValue = "eng" + ) + String language, @Parameter(hidden = true) @RequestParam Map allRequestParams, @@ -384,76 +389,82 @@ public void exportAsPdf( final SearchResponse searchResponse = searchManager.query( String.format( - "uuid:(\"%s\")", - String.join("\" or \"", uuidList)), + "uuid:(\"%s\") AND NOT draft:\"y\"", // Skip working copies as duplicate UUIDs cause the PDF xslt to fail + String.join("\" OR \"", uuidList)), EsFilterBuilder.buildPermissionsFilter(ApiUtils.createServiceContext(httpRequest)), searchFieldsForPdf, 0, maxhits); Map params = new HashMap<>(); Element request = new Element("request"); - allRequestParams.entrySet().forEach(e -> { - Element n = new Element(e.getKey()); - n.setText(e.getValue()); + allRequestParams.forEach((key, value) -> { + Element n = new Element(key); + n.setText(value); request.addContent(n); }); + if (!languageUtils.getUiLanguages().contains(language)) { + language = languageUtils.getDefaultUiLanguage(); + } + + String langCode = "lang" + language; + Element response = new Element("response"); ObjectMapper objectMapper = new ObjectMapper(); searchResponse.hits().hits().forEach(h1 -> { Hit h = (Hit) h1; Element r = new Element("metadata"); final Map source = objectMapper.convertValue(h.source(), Map.class); - source.entrySet().forEach(e -> { - Object v = e.getValue(); + source.forEach((key, v) -> { if (v instanceof String) { - Element t = new Element(e.getKey()); + Element t = new Element(key); t.setText((String) v); r.addContent(t); - } else if (v instanceof HashMap && e.getKey().endsWith("Object")) { - Element t = new Element(e.getKey()); - Map textFields = (HashMap) e.getValue(); - t.setText(textFields.get("default")); + } else if (v instanceof HashMap && key.endsWith("Object")) { + Element t = new Element(key); + Map textFields = (HashMap) v; + String textValue = textFields.get(langCode) != null ? textFields.get(langCode) : textFields.get("default"); + t.setText(textValue); r.addContent(t); - } else if (v instanceof ArrayList && e.getKey().equals("link")) { + } else if (v instanceof ArrayList && key.equals("link")) { //landform|Physiography of North and Central Eurasia Landform|http://geonetwork3.fao.org/ows/7386_landf|OGC:WMS-1.1.1-http-get-map|application/vnd.ogc.wms_xml ((ArrayList) v).forEach(i -> { - Element t = new Element(e.getKey()); + Element t = new Element(key); Map linkProperties = (HashMap) i; t.setText(linkProperties.get("description") + "|" + linkProperties.get("name") + "|" + linkProperties.get("url") + "|" + linkProperties.get("protocol")); r.addContent(t); }); - } else if (v instanceof HashMap && e.getKey().equals("overview")) { - Element t = new Element(e.getKey()); + } else if (v instanceof HashMap && key.equals("overview")) { + Element t = new Element(key); Map overviewProperties = (HashMap) v; t.setText(overviewProperties.get("url") + "|" + overviewProperties.get("name")); r.addContent(t); } else if (v instanceof ArrayList) { ((ArrayList) v).forEach(i -> { - if (i instanceof HashMap && e.getKey().equals("overview")) { - Element t = new Element(e.getKey()); + if (i instanceof HashMap && key.equals("overview")) { + Element t = new Element(key); Map overviewProperties = (HashMap) i; t.setText(overviewProperties.get("url") + "|" + overviewProperties.get("name")); r.addContent(t); } else if (i instanceof HashMap) { - Element t = new Element(e.getKey()); + Element t = new Element(key); Map tags = (HashMap) i; t.setText(tags.get("default")); // TODOES: Multilingual support r.addContent(t); } else { - Element t = new Element(e.getKey()); + Element t = new Element(key); t.setText((String) i); r.addContent(t); } }); - } else if (v instanceof HashMap && e.getKey().equals("geom")) { - Element t = new Element(e.getKey()); + } else if (v instanceof HashMap && key.equals("geom")) { + Element t = new Element(key); t.setText(((HashMap) v).get("coordinates").toString()); r.addContent(t); } else if (v instanceof HashMap) { // Skip. } else { - Element t = new Element(e.getKey()); + Element t = new Element(key); t.setText(v.toString()); r.addContent(t); } @@ -461,14 +472,13 @@ public void exportAsPdf( response.addContent(r); }); - Locale locale = languageUtils.parseAcceptLanguage(httpRequest.getLocales()); - String language = IsoLanguagesMapper.iso639_2T_to_iso639_2B(locale.getISO3Language()); - language = XslUtil.twoCharLangCode(language, "eng").toLowerCase(); - new XsltResponseWriter("env", "search") - .withJson(String.format("catalog/locales/%s-v4.json", language)) - .withJson(String.format("catalog/locales/%s-core.json", language)) - .withJson(String.format("catalog/locales/%s-search.json", language)) + String language2Code = XslUtil.twoCharLangCode(language, "eng").toLowerCase(); + + new XsltResponseWriter("env", "search", language) + .withJson(String.format("catalog/locales/%s-v4.json", language2Code)) + .withJson(String.format("catalog/locales/%s-core.json", language2Code)) + .withJson(String.format("catalog/locales/%s-search.json", language2Code)) .withXml(response) .withParams(params) .withXsl("xslt/services/pdf/portal-present-fop.xsl") @@ -504,6 +514,11 @@ public void exportAsCsv( required = false ) String bucket, + @RequestParam( + required = false, + defaultValue = "eng" + ) + String language, @Parameter(description = "XPath pointing to the XML element to loop on.", required = false, example = "Use . for the metadata, " + @@ -575,7 +590,11 @@ public void exportAsCsv( } }); - Element r = new XsltResponseWriter(null, "search") + if (!languageUtils.getUiLanguages().contains(language)) { + language = languageUtils.getDefaultUiLanguage(); + } + + Element r = new XsltResponseWriter(null, "search", language) .withParams(allRequestParams.entrySet().stream() .collect(Collectors.toMap( Entry::getKey, diff --git a/services/src/main/java/org/fao/geonet/api/records/DoiApi.java b/services/src/main/java/org/fao/geonet/api/records/DoiApi.java index ce59aa1d8e43..f786642641ad 100644 --- a/services/src/main/java/org/fao/geonet/api/records/DoiApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/DoiApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -23,34 +23,35 @@ package org.fao.geonet.api.records; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; import jeeves.server.context.ServiceContext; import jeeves.services.ReadWriteController; -import org.fao.geonet.api.API; import org.fao.geonet.api.ApiParams; import org.fao.geonet.api.ApiUtils; +import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.doi.client.DoiManager; import org.fao.geonet.domain.AbstractMetadata; -import org.springframework.beans.factory.annotation.Autowired; +import org.fao.geonet.domain.DoiServer; +import org.fao.geonet.repository.DoiServerRepository; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.security.access.prepost.PreAuthorize; -import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import java.util.Map; +import java.util.Optional; import static org.fao.geonet.api.ApiParams.API_CLASS_RECORD_OPS; import static org.fao.geonet.api.ApiParams.API_CLASS_RECORD_TAG; import static org.fao.geonet.api.ApiParams.API_PARAM_RECORD_UUID; +import static org.fao.geonet.api.doiservers.DoiServersApi.MSG_DOISERVER_WITH_ID_NOT_FOUND; /** * Handle DOI creation. @@ -60,19 +61,24 @@ }) @Tag(name = API_CLASS_RECORD_TAG, description = API_CLASS_RECORD_OPS) -@Controller("doi") +@RestController("doi") @PreAuthorize("hasAuthority('Editor')") @ReadWriteController public class DoiApi { - @Autowired - private DoiManager doiManager; + private final DoiManager doiManager; + + private final DoiServerRepository doiServerRepository; + + DoiApi(final DoiManager doiManager, final DoiServerRepository doiServerRepository) { + this.doiManager = doiManager; + this.doiServerRepository = doiServerRepository; + } @io.swagger.v3.oas.annotations.Operation( summary = "Check that a record can be submitted to DataCite for DOI creation. " + "DataCite requires some fields to be populated.") - @RequestMapping(value = "/{metadataUuid}/doi/checkPreConditions", - method = RequestMethod.GET, + @GetMapping(value = "/{metadataUuid}/doi/{doiServerId}/checkPreConditions", produces = { MediaType.APPLICATION_JSON_VALUE } @@ -86,27 +92,31 @@ public class DoiApi { @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) public - @ResponseBody ResponseEntity> checkDoiStatus( @Parameter( description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, + @Parameter( + description = "DOI server identifier", + required = true) + @PathVariable + Integer doiServerId, @Parameter(hidden = true) HttpServletRequest request ) throws Exception { AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request); ServiceContext serviceContext = ApiUtils.createServiceContext(request); - final Map reportStatus = doiManager.check(serviceContext, metadata, null); + DoiServer doiServer = retrieveDoiServer(doiServerId); + final Map reportStatus = doiManager.check(serviceContext, doiServer, metadata, null); return new ResponseEntity<>(reportStatus, HttpStatus.OK); } @io.swagger.v3.oas.annotations.Operation( summary = "Check the DOI URL created based on current configuration and pattern.") - @RequestMapping(value = "/{metadataUuid}/doi/checkDoiUrl", - method = RequestMethod.GET, + @GetMapping(value = "/{metadataUuid}/doi/{doiServerId}/checkDoiUrl", produces = { MediaType.TEXT_PLAIN_VALUE } @@ -119,26 +129,30 @@ ResponseEntity> checkDoiStatus( @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) public - @ResponseBody ResponseEntity checkDoiUrl( @Parameter( description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, + @Parameter( + description = "DOI server identifier", + required = true) + @PathVariable + Integer doiServerId, @Parameter(hidden = true) HttpServletRequest request ) throws Exception { AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request); - return new ResponseEntity<>(doiManager.checkDoiUrl(metadata), HttpStatus.OK); + DoiServer doiServer = retrieveDoiServer(doiServerId); + return new ResponseEntity<>(doiManager.checkDoiUrl(doiServer, metadata), HttpStatus.OK); } @io.swagger.v3.oas.annotations.Operation( summary = "Submit a record to the Datacite metadata store in order to create a DOI.") - @RequestMapping(value = "/{metadataUuid}/doi", - method = RequestMethod.PUT, + @PutMapping(value = "/{metadataUuid}/doi/{doiServerId}", produces = { MediaType.APPLICATION_JSON_VALUE } @@ -151,13 +165,17 @@ ResponseEntity checkDoiUrl( @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) public - @ResponseBody ResponseEntity> createDoi( @Parameter( description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, + @Parameter( + description = "DOI server identifier", + required = true) + @PathVariable + Integer doiServerId, @Parameter(hidden = true) HttpServletRequest request, @Parameter(hidden = true) @@ -166,7 +184,8 @@ ResponseEntity> createDoi( AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request); ServiceContext serviceContext = ApiUtils.createServiceContext(request); - Map doiInfo = doiManager.register(serviceContext, metadata); + DoiServer doiServer = retrieveDoiServer(doiServerId); + Map doiInfo = doiManager.register(serviceContext, doiServer, metadata); return new ResponseEntity<>(doiInfo, HttpStatus.CREATED); } @@ -174,22 +193,20 @@ ResponseEntity> createDoi( @io.swagger.v3.oas.annotations.Operation( summary = "Remove a DOI (this is not recommended, DOI are supposed to be persistent once created. This is mainly here for testing).") - @RequestMapping(value = "/{metadataUuid}/doi", - method = RequestMethod.DELETE, + @DeleteMapping(value = "/{metadataUuid}/doi/{doiServerId}", produces = { MediaType.APPLICATION_JSON_VALUE } ) @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "DOI unregistered."), + @ApiResponse(responseCode = "204", description = "DOI unregistered.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Metadata or DOI not found."), @ApiResponse(responseCode = "500", description = "Service unavailable."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) }) public - @ResponseBody - ResponseEntity unregisterDoi( + ResponseEntity unregisterDoi( @Parameter( description = API_PARAM_RECORD_UUID, required = true) @@ -197,16 +214,34 @@ ResponseEntity unregisterDoi( String metadataUuid, @Parameter(hidden = true) HttpServletRequest request, + @Parameter( + description = "DOI server identifier", + required = true) + @PathVariable + Integer doiServerId, @Parameter(hidden = true) HttpSession session ) throws Exception { AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request); ServiceContext serviceContext = ApiUtils.createServiceContext(request); - doiManager.unregisterDoi(metadata, serviceContext); + DoiServer doiServer = retrieveDoiServer(doiServerId); + doiManager.unregisterDoi(doiServer, metadata, serviceContext); return new ResponseEntity<>(HttpStatus.NO_CONTENT); } + private DoiServer retrieveDoiServer(Integer doiServerId) throws ResourceNotFoundException { + Optional doiServerOpt = doiServerRepository.findOneById(doiServerId); + if (doiServerOpt.isEmpty()) { + throw new ResourceNotFoundException(String.format( + MSG_DOISERVER_WITH_ID_NOT_FOUND, + doiServerId + )); + } + + return doiServerOpt.get(); + } + // TODO: At some point we may add support for DOI States management // https://support.datacite.org/docs/mds-api-guide#section-doi-states } diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java index 64bb64c4200f..05bfe8599b6e 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -51,6 +51,7 @@ import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.datamanager.IMetadataUtils; import org.fao.geonet.kernel.mef.MEFLib; +import org.fao.geonet.kernel.search.EsSearchManager; import org.fao.geonet.lib.Lib; import org.fao.geonet.repository.MetadataRepository; import org.fao.geonet.utils.Log; @@ -63,6 +64,7 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; @@ -108,6 +110,9 @@ public class MetadataApi { private ApplicationContext context; + @Autowired + EsSearchManager esSearchManager; + public static RelatedResponse getRelatedResources( String language, ServiceContext context, AbstractMetadata md, RelatedItemType[] type, int start, int rows) throws Exception { @@ -366,7 +371,7 @@ private Object getRecordAs( throws Exception { AbstractMetadata metadata; try { - metadata = ApiUtils.canViewRecord(metadataUuid, request); + metadata = ApiUtils.canViewRecord(metadataUuid, approved, request); } catch (ResourceNotFoundException e) { Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e); throw e; @@ -404,6 +409,13 @@ private Object getRecordAs( } } + if (!metadataUtils.isMetadataAvailableInPortal(Integer.parseInt(mdId))) { + Log.debug(API.LOG_MODULE_NAME, String.format("Metadata with UUID '%s' is not available in the portal", metadataUuid)); + throw new ResourceNotFoundException(String.format("Metadata with UUID '%s' not found.", metadataUuid)) + .withMessageKey("exception.resourceNotFound.metadata") + .withDescriptionKey("exception.resourceNotFound.metadata.description", new String[]{ metadataUuid }); + } + Element xml = withInfo ? dataManager.getMetadata(context, mdId, forEditing, withValidationErrors, keepXlinkAttributes) : @@ -780,6 +792,47 @@ public FeatureResponse getFeatureCatalog( } + @io.swagger.v3.oas.annotations.Operation(summary = "Check if metadata field value is duplicated in another metadata", + description = "Verifies if a metadata field value is in use. Fields supported: title (title), " + + "alternate title (altTitle) or resource identifier (identifier)") + @PostMapping(value = "/{metadataUuid:.+}/checkDuplicatedFieldValue", + produces = {MediaType.APPLICATION_JSON_VALUE}) + @PreAuthorize("hasAuthority('Editor')") + @ApiResponses(value = { + @ApiResponse(responseCode = "200", description = "Return true if the field value is duplicated in another metadata or false in other case."), + @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW) + }) + public ResponseEntity checkDuplicatedFieldValue( + @Parameter(description = API_PARAM_RECORD_UUID, + required = true) + @PathVariable + String metadataUuid, + @Parameter(description = "Metadata field information to check", + required = true) + @RequestBody DuplicatedValueDto duplicatedValueDto, + HttpServletRequest request + ) throws Exception { + try { + ApiUtils.canViewRecord(metadataUuid, request); + } catch (SecurityException e) { + Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e); + throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW); + } + + List validFields = Arrays.asList("title", "altTitle", "identifier"); + + if (!validFields.contains(duplicatedValueDto.getField())) { + throw new IllegalArgumentException(String.format("A valid field name is required:", String.join(",", validFields))); + } + + if (StringUtils.isEmpty(duplicatedValueDto.getValue())) { + throw new IllegalArgumentException("A non-empty value is required."); + } + + + boolean uuidsWithSameTitle = MetadataUtils.isMetadataFieldValueExistingInOtherRecords(duplicatedValueDto.getValue(), duplicatedValueDto.getField(), metadataUuid); + return ResponseEntity.ok(uuidsWithSameTitle); + } private boolean isIncludedAttributeTable(RelatedResponse.Fcat fcat) { return fcat != null @@ -789,4 +842,25 @@ private boolean isIncludedAttributeTable(RelatedResponse.Fcat fcat) { && fcat.getItem().get(0).getFeatureType().getAttributeTable() != null && fcat.getItem().get(0).getFeatureType().getAttributeTable().getElement() != null; } + + private static class DuplicatedValueDto { + private String field; + private String value; + + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + } } diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java index 0dfb298d2c3c..ad25403313c6 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataInsertDeleteApi.java @@ -27,6 +27,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -206,7 +208,7 @@ public class MetadataInsertDeleteApi { + "from the index and then from the database.") @RequestMapping(value = "/{metadataUuid}", method = RequestMethod.DELETE) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Record deleted."), + @ApiResponse(responseCode = "204", description = "Record deleted.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "401", description = "This template is referenced"), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @@ -706,6 +708,9 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( @Parameter(description = "Publish record.", required = false) @RequestParam(required = false, defaultValue = "false") final boolean publishToAll, @Parameter(description = API_PARAM_RECORD_UUID_PROCESSING, required = false) @RequestParam(required = false, defaultValue = "NOTHING") final MEFLib.UuidAction uuidProcessing, @Parameter(description = API_PARAM_RECORD_GROUP, required = false) @RequestParam(required = false) final String group, + @Parameter(description = "Schema", required = false) + @RequestParam(required = false, defaultValue = "iso19139") + final String schema, HttpServletRequest request) throws Exception { if (StringUtils.isEmpty(xml) && StringUtils.isEmpty(url)) { throw new IllegalArgumentException("A context as XML or a remote URL MUST be provided."); @@ -716,12 +721,17 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( } ServiceContext context = ApiUtils.createServiceContext(request); - Path styleSheetWmc = dataDirectory.getXsltConversion("schema:iso19139:convert/fromOGCWMC-OR-OWSC"); + Path styleSheetWmc = dataDirectory.getXsltConversion( + String.format("schema:%s:convert/fromOGCWMC-OR-OWSC", + schema)); FilePathChecker.verify(filename); + String uuid = UUID.randomUUID().toString(); + // Convert the context in an ISO19139 records Map xslParams = new HashMap<>(); + xslParams.put("uuid", uuid); xslParams.put("viewer_url", viewerUrl); xslParams.put("map_url", url); xslParams.put("topic", topic); @@ -747,7 +757,6 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( // 4. Inserts the metadata (does basically the same as the metadata.insert.paste // service (see Insert.java) - String uuid = UUID.randomUUID().toString(); String date = new ISODate().toString(); SimpleMetadataProcessingReport report = new SimpleMetadataProcessingReport(); @@ -758,7 +767,7 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( md.add(transformedMd); // Import record - Importer.importRecord(uuid, uuidProcessing, md, "iso19139", 0, settingManager.getSiteId(), + Importer.importRecord(uuid, uuidProcessing, md, schema, 0, settingManager.getSiteId(), settingManager.getSiteName(), null, context, id, date, date, group, MetadataType.METADATA); final Store store = context.getBean("resourceStore", Store.class); @@ -777,7 +786,7 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( onlineSrcParams.put("name", filename); onlineSrcParams.put("desc", title); transformedMd = Xml.transform(transformedMd, - schemaManager.getSchemaDir("iso19139").resolve("process").resolve("onlinesrc-add.xsl"), + schemaManager.getSchemaDir(schema).resolve("process").resolve("onlinesrc-add.xsl"), onlineSrcParams); dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, context.getLanguage(), null, true, IndexingMode.none); @@ -792,7 +801,7 @@ public SimpleMetadataProcessingReport insertOgcMapContextFile( onlineSrcParams.put("thumbnail_url", settingManager.getNodeURL() + String.format("api/records/%s/attachments/%s", uuid, overviewFilename)); transformedMd = Xml.transform(transformedMd, - schemaManager.getSchemaDir("iso19139").resolve("process").resolve("thumbnail-add.xsl"), + schemaManager.getSchemaDir(schema).resolve("process").resolve("thumbnail-add.xsl"), onlineSrcParams); dataManager.updateMetadata(context, id.get(0), transformedMd, false, true, context.getLanguage(), null, true, IndexingMode.none); diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java index e7d9da4e0a94..8d81e77c7f32 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataSharingApi.java @@ -25,6 +25,8 @@ import com.google.common.base.Optional; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -132,6 +134,9 @@ public class MetadataSharingApi implements ApplicationEventPublisherAware @Autowired IMetadataManager metadataManager; + @Autowired + IMetadataOperations metadataOperations; + @Autowired MetadataValidationRepository metadataValidationRepository; @@ -215,7 +220,7 @@ public List getPublicationOptions() { method = RequestMethod.PUT ) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Settings updated."), + @ApiResponse(responseCode = "204", description = "Settings updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @PreAuthorize("hasAuthority('Reviewer')") @@ -260,7 +265,7 @@ public void publish( method = RequestMethod.PUT ) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Settings updated."), + @ApiResponse(responseCode = "204", description = "Settings updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @PreAuthorize("hasAuthority('Reviewer')") @@ -314,7 +319,7 @@ public void unpublish( method = RequestMethod.PUT ) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Settings updated."), + @ApiResponse(responseCode = "204", description = "Settings updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @PreAuthorize("hasAuthority('Editor')") @@ -560,8 +565,24 @@ private void setOperations( // Check if the user profile can change the privileges for publication/un-publication of the reserved groups checkChangesAllowedToUserProfileForReservedGroups(context.getUserSession(), sharingBefore, privileges, !sharing.isClear()); + List excludeFromDelete = new ArrayList(); + + // Exclude deleting privileges for groups in which the user does not have the minimum profile for privileges + for (Group group: groupRepository.findByMinimumProfileForPrivilegesNotNull()) { + if (!canUserChangePrivilegesForGroup(context, group)) { + excludeFromDelete.add(group.getId()); + } + } + + // Exclude deleting privileges for reserved groups if the skipAllReservedGroup flag is set + if (skipAllReservedGroup) { + excludeFromDelete.add(ReservedGroup.all.getId()); + excludeFromDelete.add(ReservedGroup.intranet.getId()); + excludeFromDelete.add(ReservedGroup.guest.getId()); + } + if (sharing.isClear()) { - dataManager.deleteMetadataOper(context, String.valueOf(metadata.getId()), skipAllReservedGroup); + metadataOperations.deleteMetadataOper(String.valueOf(metadata.getId()), excludeFromDelete); } for (GroupOperations p : privileges) { @@ -740,6 +761,8 @@ public SharingResponse getRecordSharingSettings( } groupPrivilege.setUserProfile(userGroupProfile); + // Restrict changing privileges for groups with a minimum profile for setting privileges set + groupPrivilege.setRestricted(!canUserChangePrivilegesForGroup(context, g)); //--- get all operations that this group can do on given metadata Specification hasGroupIdAndMetadataId = @@ -775,7 +798,7 @@ public SharingResponse getRecordSharingSettings( method = RequestMethod.PUT ) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Record group updated."), + @ApiResponse(responseCode = "204", description = "Record group updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @PreAuthorize("hasAuthority('Editor')") @@ -818,9 +841,9 @@ public void setRecordGroup( metadataManager.save(metadata); dataManager.indexMetadata(String.valueOf(metadata.getId()), true); - new RecordGroupOwnerChangeEvent(metadata.getId(), - ApiUtils.getUserSession(request.getSession()).getUserIdAsInt(), - ObjectJSONUtils.convertObjectInJsonObject(oldGroup, RecordGroupOwnerChangeEvent.FIELD), + new RecordGroupOwnerChangeEvent(metadata.getId(), + ApiUtils.getUserSession(request.getSession()).getUserIdAsInt(), + ObjectJSONUtils.convertObjectInJsonObject(oldGroup, RecordGroupOwnerChangeEvent.FIELD), ObjectJSONUtils.convertObjectInJsonObject(group.get(), RecordGroupOwnerChangeEvent.FIELD)).publish(appContext); } @@ -1472,6 +1495,22 @@ private void checkUserProfileToUnpublishMetadata(UserSession userSession) { } } + /** + * Checks if the user can change the privileges for the group. + * + * @param context The {@link ServiceContext} object. + * @param group The {@link Group} to change the privileges for. + * @return True if the user can change the privileges for the group, false otherwise. + */ + private boolean canUserChangePrivilegesForGroup(final ServiceContext context, Group group) { + Profile minimumProfileForPrivileges = group.getMinimumProfileForPrivileges(); + if (minimumProfileForPrivileges == null) { + return true; + } else { + return accessManager.isProfileOrMoreOnGroup(context, minimumProfileForPrivileges, group.getId()); + } + } + /** * Returns the list of privilege changes for the reserved groups. * diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataTagApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataTagApi.java index 0e326143429d..f13a0b66e3d0 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataTagApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataTagApi.java @@ -25,6 +25,8 @@ import com.google.common.collect.Sets; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -212,7 +214,7 @@ private void indexTags(AbstractMetadata metadata) throws Exception { @DeleteMapping(value = "/{metadataUuid}/tags") @ResponseStatus(value = HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Record tags removed."), + @ApiResponse(responseCode = "204", description = "Record tags removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT) }) @PreAuthorize("hasAuthority('Editor')") diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataUtils.java b/services/src/main/java/org/fao/geonet/api/records/MetadataUtils.java index 76bb7f594f3f..09a9794a4890 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataUtils.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataUtils.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -37,6 +37,7 @@ import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.GeonetContext; import org.fao.geonet.NodeInfo; +import org.fao.geonet.api.API; import org.fao.geonet.api.es.EsHTTPProxy; import org.fao.geonet.api.records.model.related.AssociatedRecord; import org.fao.geonet.api.records.model.related.RelatedItemOrigin; @@ -306,7 +307,7 @@ public static Map> getAssociated( if (!e.fields().isEmpty()) { FIELDLIST_RELATED_SCRIPTED.keySet().forEach(f -> { JsonData dc = (JsonData) e.fields().get(f); - + if (dc != null) { if (associatedRecord.getProperties() == null) { associatedRecord.setProperties(new HashMap<>()); @@ -774,6 +775,48 @@ public static boolean retrieveMetadataValidationStatus(AbstractMetadata metadata return isInvalid; } + /** + * Check if other metadata records exist apart from the one with {code}metadataUuidToExclude{code} with the same + * {code}metadataValue{code} for the field {code}metadataField{code}. + * + * @param metadataValue Metadata value to check. + * @param metadataField Metadata field to check the value. + * @param metadataUuidToExclude Metadata identifier to exclude from the search. + * @return A list of metadata uuids that have the same value for the field provided. + */ + public static boolean isMetadataFieldValueExistingInOtherRecords(String metadataValue, String metadataField, String metadataUuidToExclude) { + ApplicationContext applicationContext = ApplicationContextHolder.get(); + EsSearchManager searchMan = applicationContext.getBean(EsSearchManager.class); + + String esFieldName = "resourceTitleObject.\\\\*.keyword"; + if (metadataField.equals("altTitle")) { + esFieldName = "resourceAltTitleObject.\\\\*.keyword"; + } else if (metadataField.equals("identifier")) { + esFieldName = "resourceIdentifier.code"; + } + + boolean duplicatedMetadataValue = false; + String jsonQuery = " {" + + " \"query_string\": {" + + " \"query\": \"+" + esFieldName + ":\\\"%s\\\" -uuid:\\\"%s\\\"\"" + + " }" + + "}"; + + ObjectMapper objectMapper = new ObjectMapper(); + try { + JsonNode esJsonQuery = objectMapper.readTree(String.format(jsonQuery, metadataValue, metadataUuidToExclude)); + + final SearchResponse queryResult = searchMan.query( + esJsonQuery, + FIELDLIST_UUID, + 0, 5); + + duplicatedMetadataValue = !queryResult.hits().hits().isEmpty(); + } catch (Exception ex) { + Log.error(API.LOG_MODULE_NAME, ex.getMessage(), ex); + } + return duplicatedMetadataValue; + } /** * Checks if a result for a search query has results. diff --git a/services/src/main/java/org/fao/geonet/api/records/MetadataWorkflowApi.java b/services/src/main/java/org/fao/geonet/api/records/MetadataWorkflowApi.java index 054a72b0d86c..ce1fd6fa8880 100644 --- a/services/src/main/java/org/fao/geonet/api/records/MetadataWorkflowApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/MetadataWorkflowApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.records; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -81,6 +83,7 @@ import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.util.*; +import java.util.stream.Collectors; import static org.fao.geonet.api.ApiParams.*; import static org.fao.geonet.kernel.setting.Settings.SYSTEM_METADATAPRIVS_PUBLICATION_NOTIFICATIONLEVEL; @@ -158,11 +161,7 @@ public class MetadataWorkflowApi { RoleHierarchy roleHierarchy; // The restore function currently supports these states - static final Integer[] supportedRestoreStatuses = { - Integer.parseInt(StatusValue.Events.RECORDUPDATED), - Integer.parseInt(StatusValue.Events.RECORDPROCESSINGCHANGE), - Integer.parseInt(StatusValue.Events.RECORDDELETED), - Integer.parseInt(StatusValue.Events.RECORDRESTORED)}; + static final StatusValue.Events[] supportedRestoreStatuses = StatusValue.Events.getSupportedRestoreStatuses(); private enum State { BEFORE, AFTER @@ -177,9 +176,18 @@ public List getRecordStatusHistory( @Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, @RequestParam(required = false) boolean details, @Parameter(description = "Sort direction", required = false) @RequestParam(defaultValue = "DESC") Sort.Direction sortOrder, + @Parameter(description = "Use approved version or not", example = "true") + @RequestParam(required = false, defaultValue = "true") Boolean approved, HttpServletRequest request) throws Exception { ServiceContext context = ApiUtils.createServiceContext(request); - AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, request); + + AbstractMetadata metadata; + try { + metadata = ApiUtils.canViewRecord(metadataUuid, approved, request); + } catch (SecurityException e) { + Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e); + throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW); + } String sortField = SortUtils.createPath(MetadataStatus_.changeDate); @@ -200,9 +208,17 @@ public List getRecordStatusHistoryByType( @Parameter(description = "Type", required = true) @PathVariable StatusValueType type, @RequestParam(required = false) boolean details, @Parameter(description = "Sort direction", required = false) @RequestParam(defaultValue = "DESC") Sort.Direction sortOrder, + @Parameter(description = "Use approved version or not", example = "true") + @RequestParam(required = false, defaultValue = "true") Boolean approved, HttpServletRequest request) throws Exception { ServiceContext context = ApiUtils.createServiceContext(request); - AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, request); + AbstractMetadata metadata; + try { + metadata = ApiUtils.canViewRecord(metadataUuid, approved, request); + } catch (SecurityException e) { + Log.debug(API.LOG_MODULE_NAME, e.getMessage(), e); + throw new NotAllowedException(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW); + } String sortField = SortUtils.createPath(MetadataStatus_.changeDate); @@ -224,8 +240,10 @@ public List getRecordStatusHistoryByType( @ResponseBody public MetadataWorkflowStatusResponse getStatus( @Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, + @Parameter(description = "Use approved version or not", example = "true") + @RequestParam(required = false, defaultValue = "true") Boolean approved, HttpServletRequest request) throws Exception { - AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, request); + AbstractMetadata metadata = ApiUtils.canEditRecord(metadataUuid, approved, request); Locale locale = languageUtils.parseAcceptLanguage(request.getLocales()); ResourceBundle messages = ApiUtils.getMessagesResourceBundle(request.getLocales()); ServiceContext context = ApiUtils.createServiceContext(request, locale.getISO3Language()); @@ -576,7 +594,7 @@ public Map setStatus(@Parameter(description = API_PAR method = RequestMethod.PUT ) @PreAuthorize("hasAuthority('Editor')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Task closed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Task closed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Status not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseStatus(HttpStatus.NO_CONTENT) @@ -604,7 +622,7 @@ public void closeTask(@Parameter(description = API_PARAM_RECORD_UUID, required = @io.swagger.v3.oas.annotations.Operation(summary = "Delete a record status", description = "") @RequestMapping(value = "/{metadataUuid}/status/{statusId:[0-9]+}.{userId:[0-9]+}.{changeDate}", method = RequestMethod.DELETE) @PreAuthorize("hasAuthority('Administrator')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Status removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Status removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Status not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN)}) @ResponseStatus(HttpStatus.NO_CONTENT) @@ -631,7 +649,7 @@ public void deleteRecordStatus( @io.swagger.v3.oas.annotations.Operation(summary = "Delete all record status", description = "") @RequestMapping(value = "/{metadataUuid}/status", method = RequestMethod.DELETE) @PreAuthorize("hasAuthority('Administrator')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Status removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Status removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Status not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN)}) @ResponseStatus(HttpStatus.NO_CONTENT) @@ -1047,15 +1065,15 @@ private List buildMetadataStatusResponses(List categories = ObjectJSONUtils.extractListOfFieldFromJSONString(s.getCurrentState(), "category", "name"); StringBuilder categoriesAsString = new StringBuilder("[ "); @@ -1064,7 +1082,7 @@ private String extractCurrentStatus(MetadataStatus s) { } categoriesAsString.append("]"); return categoriesAsString.toString(); - case StatusValue.Events.RECORDVALIDATIONTRIGGERED: + case RECORDVALIDATIONTRIGGERED: if (s.getCurrentState() == null) { return "UNKNOWN"; } else if (s.getCurrentState().equals("1")) { @@ -1078,11 +1096,11 @@ private String extractCurrentStatus(MetadataStatus s) { } private String extractPreviousStatus(MetadataStatus s) { - switch (Integer.toString(s.getStatusValue().getId())) { - case StatusValue.Events.ATTACHMENTDELETED: + switch (StatusValue.Events.fromId(s.getStatusValue().getId())) { + case ATTACHMENTDELETED: return s.getPreviousState(); - case StatusValue.Events.RECORDOWNERCHANGE: - case StatusValue.Events.RECORDGROUPOWNERCHANGE: + case RECORDOWNERCHANGE: + case RECORDGROUPOWNERCHANGE: return ObjectJSONUtils.extractFieldFromJSONString(s.getPreviousState(), "owner", "name"); default: return ""; @@ -1228,20 +1246,31 @@ private MetadataStatus getMetadataStatus(String uuidOrInternalId, int statusId, private String getValidatedStateText(MetadataStatus metadataStatus, State state, HttpServletRequest request, HttpSession httpSession) throws Exception { if (!StatusValueType.event.equals(metadataStatus.getStatusValue().getType()) - || !ArrayUtils.contains(supportedRestoreStatuses, metadataStatus.getStatusValue().getId())) { + || !ArrayUtils.contains(supportedRestoreStatuses, StatusValue.Events.fromId(metadataStatus.getStatusValue().getId()))) { throw new NotAllowedException("Unsupported action on status type '" + metadataStatus.getStatusValue().getType() + "' for metadata '" + metadataStatus.getUuid() + "'. Supports status type '" - + StatusValueType.event + "' with the status id '" + Arrays.toString(supportedRestoreStatuses) + "'."); + + StatusValueType.event + "' with the status id '" + Arrays.stream(supportedRestoreStatuses).map(StatusValue.Events::getId).collect(Collectors.toList()) + "'."); } String stateText; + MediaType stateFormat; if (state.equals(State.AFTER)) { stateText = metadataStatus.getCurrentState(); + stateFormat = StatusValue.Events.fromId(metadataStatus.getStatusValue().getId()).getCurrentStateFormat(); } else { stateText = metadataStatus.getPreviousState(); + stateFormat = StatusValue.Events.fromId(metadataStatus.getStatusValue().getId()).getPreviousStateFormat(); + } + + String xmlStateText; + if (stateFormat.equals(MediaType.APPLICATION_JSON)) { + // Any status with JSON format will have the XML stored in the field 'xmlRecord' + xmlStateText = ObjectJSONUtils.extractFieldFromJSONString(stateText, "xmlRecord"); + } else { + xmlStateText = stateText; } - if (stateText == null) { + if (xmlStateText == null) { throw new ResourceNotFoundException( String.format("No data exists for previous state on metadata record '%s', user '%d' at date '%s'", metadataStatus.getUuid(), metadataStatus.getUserId(), metadataStatus.getChangeDate())); @@ -1256,10 +1285,10 @@ private String getValidatedStateText(MetadataStatus metadataStatus, State state, } catch (ResourceNotFoundException e) { // If metadata record does not exists then it was deleted so // we will only allow the administrator, owner to view the contents - checkCanViewStatus(stateText, httpSession); + checkCanViewStatus(xmlStateText, httpSession); } - return stateText; + return xmlStateText; } /** diff --git a/services/src/main/java/org/fao/geonet/api/records/attachments/AttachmentsApi.java b/services/src/main/java/org/fao/geonet/api/records/attachments/AttachmentsApi.java index b1b27ecfa300..80fe2c53a6ae 100644 --- a/services/src/main/java/org/fao/geonet/api/records/attachments/AttachmentsApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/attachments/AttachmentsApi.java @@ -1,6 +1,6 @@ /* * ============================================================================= - * === Copyright (C) 2001-2023 Food and Agriculture Organization of the + * === Copyright (C) 2001-2024 Food and Agriculture Organization of the * === United Nations (FAO-UN), United Nations World Food Programme (WFP) * === and United Nations Environment Programme (UNEP) * === @@ -68,6 +68,7 @@ import javax.servlet.http.HttpServletResponse; import java.awt.image.BufferedImage; import java.io.IOException; +import java.io.InputStream; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -171,14 +172,13 @@ public List getAllResources( @RequestParam(required = false, defaultValue = FilesystemStore.DEFAULT_FILTER) String filter, @Parameter(hidden = true) HttpServletRequest request) throws Exception { ServiceContext context = ApiUtils.createServiceContext(request); - List list = store.getResources(context, metadataUuid, sort, filter, approved); - return list; + return store.getResources(context, metadataUuid, sort, filter, approved); } @io.swagger.v3.oas.annotations.Operation(summary = "Delete all uploaded metadata resources") @RequestMapping(method = RequestMethod.DELETE, produces = MediaType.APPLICATION_JSON_VALUE) @PreAuthorize("hasAuthority('Editor')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attachment added."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attachment added.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseStatus(value = HttpStatus.NO_CONTENT) public void delResources( @@ -200,7 +200,7 @@ public void delResources( @io.swagger.v3.oas.annotations.Operation(summary = "Create a new resource for a given metadata") @PreAuthorize("hasAuthority('Editor')") @RequestMapping(method = RequestMethod.POST, - consumes = MediaType.ALL_VALUE, + consumes = MediaType.MULTIPART_FORM_DATA_VALUE, produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @ApiResponses(value = {@ApiResponse(responseCode = "201", description = "Attachment uploaded."), @@ -228,7 +228,8 @@ public MetadataResource putResource( @io.swagger.v3.oas.annotations.Operation(summary = "Create a new resource from a URL for a given metadata") @PreAuthorize("hasAuthority('Editor')") - @RequestMapping(method = RequestMethod.PUT) + @RequestMapping(method = RequestMethod.PUT, + produces = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @ApiResponses(value = {@ApiResponse(responseCode = "201", description = "Attachment added."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @@ -290,7 +291,11 @@ public void getResource( MIN_IMAGE_SIZE, MAX_IMAGE_SIZE, size)); } } else { - StreamUtils.copy(Files.newInputStream(file.getPath()), response.getOutputStream()); + response.setContentLengthLong(Files.size(file.getPath())); + + try (InputStream inputStream = Files.newInputStream(file.getPath())) { + StreamUtils.copy(inputStream, response.getOutputStream()); + } } } } @@ -316,7 +321,7 @@ public MetadataResource patchResource( @io.swagger.v3.oas.annotations.Operation(summary = "Delete a metadata resource") @PreAuthorize("hasAuthority('Editor')") @RequestMapping(value = "/{resourceId:.+}", method = RequestMethod.DELETE) - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attachment visibility removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attachment visibility removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseStatus(value = HttpStatus.NO_CONTENT) public void delResource( diff --git a/services/src/main/java/org/fao/geonet/api/records/editing/AjaxEditUtils.java b/services/src/main/java/org/fao/geonet/api/records/editing/AjaxEditUtils.java index cf06d7c9e342..a1af85ff1278 100644 --- a/services/src/main/java/org/fao/geonet/api/records/editing/AjaxEditUtils.java +++ b/services/src/main/java/org/fao/geonet/api/records/editing/AjaxEditUtils.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -36,6 +36,7 @@ import org.fao.geonet.kernel.SchemaManager; import org.fao.geonet.kernel.UpdateDatestamp; import org.fao.geonet.kernel.schema.MetadataSchema; +import org.fao.geonet.kernel.schema.MultilingualSchemaPlugin; import org.fao.geonet.kernel.schema.SchemaPlugin; import org.fao.geonet.lib.Lib; import org.fao.geonet.schema.iso19139.ISO19139Namespaces; @@ -340,8 +341,16 @@ public Element getMetadataEmbedded(ServiceContext srvContext, String id, boolean /** * For Ajax Editing : adds an element or an attribute to a metadata element ([add] link). + * + * @param session User session. + * @param id Metadata identifier. + * @param ref Reference of the parent element to add the element. + * @param name Name of the element or attribute to add, with the namespace + * @param childName Empty for inserting element, `geonet:attribute` for attributes. + * @return + * @throws Exception */ - public synchronized Element addElementEmbedded(UserSession session, String id, String ref, String name, String childName) throws Exception { + public synchronized List addElementEmbedded(UserSession session, String id, String ref, String name, String childName) throws Exception { Lib.resource.checkEditPrivilege(context, id); String schema = dataManager.getMetadataSchema(id); //--- get metadata from session @@ -362,10 +371,13 @@ public synchronized Element addElementEmbedded(UserSession session, String id, S md.removeChild(Edit.RootChild.INFO, Edit.NAMESPACE); } - Element child = null; + List children = new ArrayList<>(); MetadataSchema mds = dataManager.getSchema(schema); + if (childName != null) { if (childName.equals("geonet:attribute")) { + Element child = null; + String defaultValue = ""; @SuppressWarnings("unchecked") List attributeDefs = el.getChildren(Edit.RootChild.ATTRIBUTE, Edit.NAMESPACE); @@ -385,9 +397,11 @@ public synchronized Element addElementEmbedded(UserSession session, String id, S el.setAttribute(new Attribute(attInfo.two(), defaultValue, attInfo.one())); child = el; + children.add(child); + } else { //--- normal element - child = editLib.addElement(mds, el, name); + Element child = editLib.addElement(mds, el, name); if (!childName.equals("")) { //--- or element String uChildName = editLib.getUnqualifiedName(childName); @@ -403,20 +417,35 @@ public synchronized Element addElementEmbedded(UserSession session, String id, S //--- add mandatory sub-tags editLib.fillElement(schema, child, orChild); } + + children.add(child); } } else { - child = editLib.addElement(mds, el, name); + List metadataLanguages = new ArrayList<>(); + if (mds.getSchemaPlugin() instanceof MultilingualSchemaPlugin) { + // Metadata languages are only required if the schema plugin requires to duplicate the added + // element for each language and the element to add is multilingual. + // See {@link org.fao.geonet.kernel.schema.MultilingualSchemaPlugin#duplicateElementsForMultilingual()} + metadataLanguages = ((MultilingualSchemaPlugin) mds.getSchemaPlugin()).getMetadataLanguages(md); + } + + children = editLib.addElements(mds, el, name, metadataLanguages); } + //--- now enumerate the new child (if not a simple attribute) - if (childName == null || !childName.equals("geonet:attribute")) { - int iRef = editLib.findMaximumRef(md); - editLib.enumerateTreeStartingAt(child, iRef + 1, Integer.parseInt(ref)); - editLib.expandTree(mds, child); + if ((childName == null || !childName.equals("geonet:attribute")) && (children != null)) { + for (Element c: children) { + int iRef = editLib.findMaximumRef(md); + editLib.enumerateTreeStartingAt(c, iRef + 1, Integer.parseInt(ref)); + editLib.expandTree(mds, c); + } } - if (info != null) { - //--- remove and re-attach the info element to the child - child.removeChild(Edit.RootChild.INFO, Edit.NAMESPACE); - child.addContent(info); + if ((info != null) && (children != null)) { + for (Element c: children) { + //--- remove and re-attach the info element to the child + c.removeChild(Edit.RootChild.INFO, Edit.NAMESPACE); + c.addContent((Element) info.clone()); + } } /* When adding an gmx:Anchor to an element, due to the following code gets also a gco:CharacterString in EditLib. @@ -436,9 +465,11 @@ public synchronized Element addElementEmbedded(UserSession session, String id, S Element child = isoPlugin.createBasicTypeCharacterString(); element.addContent(child); */ - if (childName != null && childName.equals("gmx:Anchor")) { - if (child.getChild("CharacterString", ISO19139Namespaces.GCO) != null) { - child.removeChild("CharacterString", ISO19139Namespaces.GCO); + if (childName != null && childName.equals("gmx:Anchor") && (children != null)) { + for (Element c: children) { + if (c.getChild("CharacterString", ISO19139Namespaces.GCO) != null) { + c.removeChild("CharacterString", ISO19139Namespaces.GCO); + } } } @@ -451,8 +482,7 @@ public synchronized Element addElementEmbedded(UserSession session, String id, S setMetadataIntoSession(session, (Element) md.clone(), id); // Return element added - return child; - + return children; } /** @@ -592,8 +622,6 @@ private Pair parseAttributeName(String attributeName, String public synchronized void swapElementEmbedded(UserSession session, String id, String ref, boolean down) throws Exception { Lib.resource.checkEditPrivilege(context, id); - dataManager.getMetadataSchema(id); - //--- get metadata from session Element md = getMetadataFromSession(session, id); @@ -605,29 +633,87 @@ public synchronized void swapElementEmbedded(UserSession session, String id, Str throw new IllegalStateException(EditLib.MSG_ELEMENT_NOT_FOUND_AT_REF + ref); //--- swap the elements - int iSwapIndex = -1; - @SuppressWarnings("unchecked") + // For DCAT records, swap all elements of a translation group + // ie. all elements sibling with same name and different language code List list = elSwap.getParentElement().getChildren(elSwap.getName(), elSwap.getNamespace()); - int i = -1; - for (Element element : list) { - i++; - if (element == elSwap) { - iSwapIndex = i; - break; - } + String schemaId = dataManager.getMetadataSchema(id); + MetadataSchema metadataSchema = dataManager.getSchema(schemaId); + SchemaPlugin schemaPlugin = metadataSchema.getSchemaPlugin(); + List languages = new ArrayList<>(); + if (schemaPlugin instanceof MultilingualSchemaPlugin) { + languages = ((MultilingualSchemaPlugin) schemaPlugin).getMetadataLanguages(md); } - if (iSwapIndex == -1) + // Get element index and first index of the group + int swapIndex = getElementSwapIndex(list, elSwap); + int groupSwapIndex = getGroupSwapIndex(list, swapIndex, languages); + + if (swapIndex == -1) throw new IllegalStateException("Index not found for element --> " + elSwap); - if (down) swapElements(elSwap, list.get(iSwapIndex + 1)); - else swapElements(elSwap, list.get(iSwapIndex - 1)); + // Swap the element or all the group of element up or down + if (groupSwapIndex == -1) { + swapElements(elSwap, list.get(down ? swapIndex + 1 : swapIndex - 1)); + } else { + for (int i = 0; i < languages.size(); i ++) { + int currentElementToSwapIndex = groupSwapIndex + i; + Element topElement = list.get(currentElementToSwapIndex); + Element bottomElement = list.get(currentElementToSwapIndex + languages.size()); + if (down) { + swapElements(topElement, bottomElement); + } else { + swapElements(bottomElement, topElement); + } + } + } //--- store the metadata in the session again setMetadataIntoSession(session, (Element) md.clone(), id); + } + + /** + * Swap index is the target element from the API call. + * Can be one element of a group of translations. + * Collect here the index of the first element of the group. + * + *

+ * The list of languages is ordered. eg. "nl", "fr", "en" + *

+     *             Rivier
+     *             Rivière
+     *             River
+     *             Kwaliteit
+     *             Qualité
+     *             Qualiy
+     * 
+ *

+ */ + private int getGroupSwapIndex(List list, int swapIndex, List languages) { + String elementLanguage = list.get(swapIndex).getAttributeValue("lang", Namespace.XML_NAMESPACE); + if (list.size() == 1 || languages.size() == 1 || elementLanguage == null) { + return -1; + } + + // Consider element are always in order (see update-fixed-info.xsl) + // River index = 3, group index = 3 - (en lang index) + int indexInGroup = languages.indexOf(elementLanguage); + if (indexInGroup == -1) { + return swapIndex; // Language not declared? + } + return swapIndex - indexInGroup; + } + private static int getElementSwapIndex(List list, Element elSwap) { + int i = -1; + for (Element element : list) { + i++; + if (element == elSwap) { + return i; + } + } + return i; } /** diff --git a/services/src/main/java/org/fao/geonet/api/records/editing/MetadataEditingApi.java b/services/src/main/java/org/fao/geonet/api/records/editing/MetadataEditingApi.java index 3899f9e19674..0551c66936d2 100644 --- a/services/src/main/java/org/fao/geonet/api/records/editing/MetadataEditingApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/editing/MetadataEditingApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -24,6 +24,8 @@ package org.fao.geonet.api.records.editing; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -40,6 +42,7 @@ import org.fao.geonet.api.records.MetadataUtils; import org.fao.geonet.api.records.model.Direction; import org.fao.geonet.api.tools.i18n.LanguageUtils; +import org.fao.geonet.constants.Edit; import org.fao.geonet.constants.Geonet; import org.fao.geonet.constants.Params; import org.fao.geonet.domain.*; @@ -532,7 +535,7 @@ public void saveEdits( MediaType.ALL_VALUE}, produces = {MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasAuthority('Editor')") @ResponseStatus(HttpStatus.NO_CONTENT) - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Editing session cancelled."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Editing session cancelled.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseBody public void cancelEdits(@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, @@ -556,7 +559,7 @@ public void cancelEdits(@Parameter(description = API_PARAM_RECORD_UUID, required public void addElement(@Parameter(description = API_PARAM_RECORD_UUID, required = true) @PathVariable String metadataUuid, @Parameter(description = "Reference of the insertion point.", required = true) @RequestParam String ref, @Parameter(description = "Name of the element to add (with prefix)", required = true) @RequestParam String name, - @Parameter(description = "Use geonet:attribute for attributes or child name.", required = false) @RequestParam(required = false) String child, + @Parameter(description = "Empty for inserting element, `geonet:attribute` for attributes.", required = false) @RequestParam(required = false) String child, @Parameter(description = "Should attributes be shown on the editor snippet?", required = false) @RequestParam(defaultValue = "false") boolean displayAttributes, @Parameter(hidden = true) @RequestParam Map allRequestParams, HttpServletRequest request, HttpServletResponse response, @@ -578,11 +581,26 @@ public void addElement(@Parameter(description = API_PARAM_RECORD_UUID, required // -- Note that the metadata-embedded.xsl stylesheet // -- only applies the templating to the added element, not to // -- the entire metadata so performance should not be a big issue - Element elResp = new AjaxEditUtils(context).addElementEmbedded(ApiUtils.getUserSession(httpSession), + List elResp = new AjaxEditUtils(context).addElementEmbedded(ApiUtils.getUserSession(httpSession), String.valueOf(metadata.getId()), ref, name, child); - EditLib.tagForDisplay(elResp); - Element md = (Element) findRoot(elResp).clone(); - EditLib.removeDisplayTag(elResp); + Element md = null; + + EditLib editLib = context.getBean(DataManager.class).getEditLib(); + + for(Element el: elResp) { + if (md == null) { + EditLib.tagForDisplay(el); + md = (Element) findRoot(el).clone(); + EditLib.removeDisplayTag(el); + } else { + Element el2 = editLib.findElement(md, el.getChild("element", Edit.NAMESPACE).getAttribute("ref").getValue()); + EditLib.tagForDisplay(el2); + md = (Element) md.clone(); + EditLib.removeDisplayTag(el2); + } + + } + buildEditorForm(allRequestParams.get("currTab"), httpSession, allRequestParams, request, md, metadata.getDataInfo().getSchemaId(), context, applicationContext, true, true, response); @@ -614,7 +632,7 @@ public void reorderElement(@Parameter(description = API_PARAM_RECORD_UUID, requi MediaType.ALL_VALUE}, produces = {MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasAuthority('Editor')") @ResponseStatus(HttpStatus.NO_CONTENT) - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Element removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Element removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseBody public void deleteElement( @@ -638,7 +656,7 @@ public void deleteElement( MediaType.ALL_VALUE}, produces = {MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasAuthority('Editor')") @ResponseStatus(HttpStatus.NO_CONTENT) - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attribute removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "Attribute removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_EDIT)}) @ResponseBody public void deleteAttribute( diff --git a/services/src/main/java/org/fao/geonet/api/records/extent/ExpandFactor.java b/services/src/main/java/org/fao/geonet/api/records/extent/ExpandFactor.java index 8e0ff0b4b795..15afe6c0ae18 100644 --- a/services/src/main/java/org/fao/geonet/api/records/extent/ExpandFactor.java +++ b/services/src/main/java/org/fao/geonet/api/records/extent/ExpandFactor.java @@ -38,6 +38,12 @@ public final class ExpandFactor implements Comparable { double proportion; double factor; + /** + * If true, the image will be square. If false, the image will be proportional to the geometry. + * Usually, use square image when factor is high (geometry will be small). + */ + boolean squareImage = false; + public double getProportion() { return proportion; } @@ -54,6 +60,14 @@ public void setFactor(double factor) { this.factor = factor; } + public boolean isSquareImage() { + return squareImage; + } + + public void setSquareImage(boolean squareImage) { + this.squareImage = squareImage; + } + @Override public int compareTo(@Nonnull ExpandFactor o) { return Double.compare(proportion, o.proportion); diff --git a/services/src/main/java/org/fao/geonet/api/records/extent/MapRenderer.java b/services/src/main/java/org/fao/geonet/api/records/extent/MapRenderer.java index 63fd8125da9d..64387e568621 100644 --- a/services/src/main/java/org/fao/geonet/api/records/extent/MapRenderer.java +++ b/services/src/main/java/org/fao/geonet/api/records/extent/MapRenderer.java @@ -37,9 +37,7 @@ import org.geotools.referencing.CRS; import org.locationtech.jts.awt.IdentityPointTransformation; import org.locationtech.jts.awt.PointShapeFactory; -import org.locationtech.jts.awt.PointTransformation; import org.locationtech.jts.awt.ShapeWriter; -import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.geotools.api.metadata.extent.Extent; @@ -51,7 +49,6 @@ import java.awt.*; import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.util.Collection; import java.util.Map; @@ -181,8 +178,14 @@ public BufferedImage render(String id, String srs, Integer width, Integer height Envelope bboxOfImage = new Envelope(isPoint ? geom.buffer(pointBufferSize).getEnvelopeInternal() : geom.getEnvelopeInternal()); - double expandFactor = calculateExpandFactor(regionGetMapExpandFactors, bboxOfImage, srs); - bboxOfImage.expandBy(bboxOfImage.getWidth() * expandFactor, bboxOfImage.getHeight() * expandFactor); + ExpandFactor factor = calculateExpandFactor(regionGetMapExpandFactors, bboxOfImage, srs); + double expandFactor = factor.factor; + if (factor.isSquareImage()) { + height = width; + bboxOfImage.expandBy(bboxOfImage.getWidth() * expandFactor, bboxOfImage.getWidth() * expandFactor); + } else { + bboxOfImage.expandBy(bboxOfImage.getWidth() * expandFactor, bboxOfImage.getHeight() * expandFactor); + } Dimension imageDimensions = calculateImageSize(bboxOfImage, width, height); Exception error = null; @@ -272,8 +275,8 @@ private Color getColor(String color, Color defaultColor) { return defaultColor; } - private double calculateExpandFactor(SortedSet regionGetMapExpandFactors, Envelope bboxOfImage, - String srs) throws Exception { + private ExpandFactor calculateExpandFactor(SortedSet regionGetMapExpandFactors, Envelope bboxOfImage, + String srs) throws Exception { CoordinateReferenceSystem crs = Region.decodeCRS(srs); ReferencedEnvelope env = new ReferencedEnvelope(bboxOfImage, crs); env = env.transform(Region.WGS84, true); @@ -284,10 +287,10 @@ private double calculateExpandFactor(SortedSet regionGetMapExpandF for (ExpandFactor factor : regionGetMapExpandFactors) { if (scale < factor.proportion) { - return factor.factor; + return factor; } } - return regionGetMapExpandFactors.last().factor; + return regionGetMapExpandFactors.last(); } private Dimension calculateImageSize(Envelope bboxOfImage, Integer width, Integer height) { diff --git a/services/src/main/java/org/fao/geonet/api/records/formatters/CacheApi.java b/services/src/main/java/org/fao/geonet/api/records/formatters/CacheApi.java index d90ea166d703..eb9b39200b3c 100644 --- a/services/src/main/java/org/fao/geonet/api/records/formatters/CacheApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/formatters/CacheApi.java @@ -23,6 +23,8 @@ package org.fao.geonet.api.records.formatters; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import static org.fao.geonet.api.ApiParams.API_CLASS_FORMATTERS_OPS; import static org.fao.geonet.api.ApiParams.API_CLASS_FORMATTERS_TAG; @@ -67,7 +69,7 @@ public class CacheApi { @ResponseStatus(HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Cache cleared."), + @ApiResponse(responseCode = "204", description = "Cache cleared.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = "Operation not allowed. Only Administrator can access it.") }) public void clearFormatterCache() throws Exception { diff --git a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatType.java b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatType.java index 81796022bf13..c2f1e5a1f36e 100644 --- a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatType.java +++ b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatType.java @@ -62,4 +62,21 @@ public static FormatType find(String acceptHeader) { } return null; } + + public static FormatType findByFormatterKey(String formatterId) { + if (formatterId == null) { + return null; + } + + if (formatterId.contains("dcat")) { + return FormatType.xml; + } + for (FormatType c : FormatType.values()) { + if (formatterId.contains(c.name())) { + return c; + } + } + + return null; + } } diff --git a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java index 1a9830c3ef6e..96092a3a1189 100644 --- a/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java +++ b/services/src/main/java/org/fao/geonet/api/records/formatters/FormatterApi.java @@ -232,6 +232,9 @@ public void getRecordFormattedBy( if (MediaType.ALL_VALUE.equals(acceptHeader)) { acceptHeader = MediaType.TEXT_HTML_VALUE; } + if (formatType == null) { + formatType = FormatType.findByFormatterKey(formatterId); + } if (formatType == null) { formatType = FormatType.find(acceptHeader); } @@ -252,7 +255,7 @@ public void getRecordFormattedBy( language = isoLanguagesMapper.iso639_2T_to_iso639_2B(locale.getISO3Language()); } - AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, servletRequest); + AbstractMetadata metadata = ApiUtils.canViewRecord(metadataUuid, approved, servletRequest); if (approved) { metadata = ApplicationContextHolder.get().getBean(MetadataRepository.class).findOneByUuid(metadataUuid); diff --git a/services/src/main/java/org/fao/geonet/api/records/model/GroupPrivilege.java b/services/src/main/java/org/fao/geonet/api/records/model/GroupPrivilege.java index ae5de2fa1d53..289ad3c58c61 100644 --- a/services/src/main/java/org/fao/geonet/api/records/model/GroupPrivilege.java +++ b/services/src/main/java/org/fao/geonet/api/records/model/GroupPrivilege.java @@ -33,6 +33,7 @@ public class GroupPrivilege extends GroupOperations { private List userProfiles; private boolean userGroup; private boolean reserved; + private boolean restricted; public List getUserProfiles() { return userProfiles; @@ -57,4 +58,8 @@ public boolean isReserved() { public void setReserved(boolean reserved) { this.reserved = reserved; } + + public boolean isRestricted() { return restricted; } + + public void setRestricted(boolean restricted) { this.restricted = restricted; } } diff --git a/services/src/main/java/org/fao/geonet/api/regions/RegionsApi.java b/services/src/main/java/org/fao/geonet/api/regions/RegionsApi.java index 69197e66e583..4953ee3210b6 100644 --- a/services/src/main/java/org/fao/geonet/api/regions/RegionsApi.java +++ b/services/src/main/java/org/fao/geonet/api/regions/RegionsApi.java @@ -203,7 +203,19 @@ public List getRegionTypes( @io.swagger.v3.oas.annotations.Operation( summary = "Get geometry as image", - description = "A rendering of the geometry as a png.") + description = "A rendering of the geometry as a `png`.\n " + + "\n " + + "The coverage of the image is computed from the geometry envelope and size using scale factor configuration " + + "(See `regionGetMapExpandFactors` bean in `config-spring-geonetwork.xml`) " + + "to give enough context on where the geometry is. The smaller the geometry, the bigger the expand factor.\n" + + "\n " + + "If needed, when the factor is high, square image mode can be enabled (instead of proportional geometry size):\n" + + "\n " + + "```xml\n" + + " \n" + + " \n" + + "```\n") @RequestMapping( value = "/geom.png", produces = { diff --git a/services/src/main/java/org/fao/geonet/api/registries/vocabularies/KeywordsApi.java b/services/src/main/java/org/fao/geonet/api/registries/vocabularies/KeywordsApi.java index 2ba1e976bc1f..f94043fcadd3 100644 --- a/services/src/main/java/org/fao/geonet/api/registries/vocabularies/KeywordsApi.java +++ b/services/src/main/java/org/fao/geonet/api/registries/vocabularies/KeywordsApi.java @@ -46,7 +46,6 @@ import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.api.exception.WebApplicationException; import org.fao.geonet.api.registries.model.ThesaurusInfo; -import org.fao.geonet.api.tools.i18n.LanguageUtils; import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.ISODate; import org.fao.geonet.exceptions.BadParameterEx; @@ -110,12 +109,6 @@ description = ApiParams.API_CLASS_REGISTRIES_OPS) public class KeywordsApi { - /** - * The language utils. - */ - @Autowired - LanguageUtils languageUtils; - @Autowired SettingManager settingManager; @@ -161,8 +154,7 @@ public class KeywordsApi { * @throws Exception the exception */ @io.swagger.v3.oas.annotations.Operation( - summary = "Search keywords", - description = "") + summary = "Search keywords") @RequestMapping( path = "/search", method = RequestMethod.GET, @@ -175,16 +167,14 @@ public class KeywordsApi { @ResponseBody public Object searchKeywords( @Parameter( - description = "Query", - required = false + description = "Query" ) @RequestParam( required = false ) String q, @Parameter( - description = "Query in that language", - required = false + description = "Query in that language" ) @RequestParam( value = "lang", @@ -192,8 +182,7 @@ public Object searchKeywords( ) String lang, @Parameter( - description = "Number of rows", - required = false + description = "Number of rows" ) @RequestParam( required = false, @@ -201,8 +190,7 @@ public Object searchKeywords( ) int rows, @Parameter( - description = "Start from", - required = false + description = "Start from" ) @RequestParam( defaultValue = "0", @@ -210,8 +198,7 @@ public Object searchKeywords( ) int start, @Parameter( - description = "Return keyword information in one or more languages", - required = false + description = "Return keyword information in one or more languages" ) @RequestParam( value = XmlParams.pLang, @@ -219,8 +206,7 @@ public Object searchKeywords( ) List targetLangs, @Parameter( - description = "Thesaurus identifier", - required = false + description = "Thesaurus identifier" ) @RequestParam( required = false @@ -235,24 +221,21 @@ public Object searchKeywords( // ) // String thesauriDomainName, @Parameter( - description = "Type of search", - required = false + description = "Type of search" ) @RequestParam( defaultValue = "CONTAINS" ) KeywordSearchType type, @Parameter( - description = "URI query", - required = false + description = "URI query" ) @RequestParam( required = false ) String uri, @Parameter( - description = "Sort by", - required = false + description = "Sort by" ) @RequestParam( required = false, @@ -354,23 +337,19 @@ public Object getKeywordById( @RequestParam(name = "thesaurus") String sThesaurusName, @Parameter( - description = "Languages.", - required = false) + description = "Languages.") @RequestParam(name = "lang", required = false) String[] langs, @Parameter( - description = "Only print the keyword, no thesaurus information.", - required = false) + description = "Only print the keyword, no thesaurus information.") @RequestParam(required = false, defaultValue = "false") boolean keywordOnly, @Parameter( - description = "XSL template to use (ISO19139 keyword by default, see convert.xsl).", - required = false) + description = "XSL template to use (ISO19139 keyword by default, see convert.xsl).") @RequestParam(required = false) String transformation, @Parameter( - description = "langMap, that converts the values in the 'lang' parameter to how they will be actually represented in the record. {'fre':'fra'} or {'fre':'fr'}. Missing/empty means to convert to iso 2 letter.", - required = false) + description = "langMap, that converts the values in the 'lang' parameter to how they will be actually represented in the record. {'fre':'fra'} or {'fre':'fr'}. Missing/empty means to convert to iso 2 letter.") @RequestParam (name = "langMap", required = false) String langMapJson, @Parameter(hidden = true) @@ -428,23 +407,19 @@ public Object getKeywordByIds( @RequestParam(name = "thesaurus") String sThesaurusName, @Parameter( - description = "Languages.", - required = false) + description = "Languages.") @RequestParam(name = "lang", required = false) String[] langs, @Parameter( - description = "Only print the keyword, no thesaurus information.", - required = false) + description = "Only print the keyword, no thesaurus information.") @RequestParam(required = false, defaultValue = "false") boolean keywordOnly, @Parameter( - description = "XSL template to use (ISO19139 keyword by default, see convert.xsl).", - required = false) + description = "XSL template to use (ISO19139 keyword by default, see convert.xsl).") @RequestParam(required = false) String transformation, @Parameter( - description = "langMap, that converts the values in the 'lang' parameter to how they will be actually represented in the record. {'fre':'fra'} or {'fre':'fr'}. Missing/empty means to convert to iso 2 letter.", - required = false) + description = "langMap, that converts the values in the 'lang' parameter to how they will be actually represented in the record. {'fre':'fra'} or {'fre':'fr'}. Missing/empty means to convert to iso 2 letter.") @RequestParam (name = "langMap", required = false) String langMapJson, @Parameter(hidden = true) @@ -489,7 +464,7 @@ private Object getKeyword( if (thesaurus == null) { String finalSThesaurusName = sThesaurusName; Optional thesaurusEntry = thesaurusManager.getThesauriMap().values().stream().filter(t -> t.getKey().endsWith(finalSThesaurusName)).findFirst(); - if (!thesaurusEntry.isPresent()) { + if (thesaurusEntry.isEmpty()) { throw new IllegalArgumentException(String.format( "Thesaurus '%s' not found.", sThesaurusName)); } else { @@ -501,7 +476,12 @@ private Object getKeyword( if (langs == null) { langs = context.getLanguage().split(","); } - String[] iso3langCodes = Arrays.copyOf(langs, langs.length); + List langList = new ArrayList<>(List.of(langs)); + if (!langList.contains("eng")) { + langList.add("eng"); + } + + String[] iso3langCodes = langList.toArray(String[]::new); for (int i = 0; i < langs.length; i++) { if (StringUtils.isNotEmpty(langs[i])) { langs[i] = mapper.iso639_2_to_iso639_1(langs[i], langs[i].substring(0,2)); //default: fra -> fr @@ -511,7 +491,7 @@ private Object getKeyword( Element descKeys; Map> jsonResponse = new HashMap<>(); - uri = URLDecoder.decode(uri, "UTF-8"); + uri = URLDecoder.decode(uri, StandardCharsets.UTF_8); if (uri == null) { descKeys = new Element("descKeys"); @@ -566,8 +546,8 @@ private Object getKeyword( JSONObject obj = JSONObject.fromObject(langMapJson); langConversion = new Element("languageConversions"); for(Object entry : obj.entrySet()) { - String key = ((Map.Entry) entry).getKey().toString(); - String value = ((Map.Entry) entry).getValue().toString(); + String key = ((Map.Entry) entry).getKey().toString(); + String value = ((Map.Entry) entry).getValue().toString(); Element conv = new Element("conversion"); conv.setAttribute("from", key); conv.setAttribute("to", value.replace("#","")); @@ -593,12 +573,7 @@ private Object getKeyword( Element requestParams = new Element("request"); for (Map.Entry e : allRequestParams.entrySet()) { - if (e.getKey().equals("lang")) { - requestParams.addContent(new Element(e.getKey()) - .setText(String.join(",", iso3langCodes))); - } else { - requestParams.addContent(new Element(e.getKey()).setText(e.getValue())); - } + requestParams.addContent(new Element(e.getKey()).setText(e.getValue())); } if (langConversion != null) { requestParams.addContent(langConversion); @@ -618,7 +593,6 @@ private Object getKeyword( * * @param thesaurus the thesaurus * @param response the response - * @return the thesaurus * @throws Exception the exception */ @io.swagger.v3.oas.annotations.Operation( @@ -672,7 +646,6 @@ public void getThesaurus( * Delete thesaurus. * * @param thesaurus the thesaurus - * @return the element * @throws Exception the exception */ @io.swagger.v3.oas.annotations.Operation( @@ -703,7 +676,7 @@ public void deleteThesaurus( throw new ResourceNotFoundException(String.format( "Thesaurus with identifier '%s' not found in the catalogue. Should be one of: %s", thesaurus, - thesaurusMan.getThesauriMap().keySet().toString() + thesaurusMan.getThesauriMap().keySet() )); } Path item = thesaurusObject.getFile(); @@ -767,9 +740,9 @@ public String uploadThesaurus( boolean fileUpload = file != null && !file.isEmpty(); // Upload RDF file - Path rdfFile = null; - String fname = null; - File tempDir = null; + Path rdfFile; + String fname; + File tempDir; if (fileUpload) { @@ -795,7 +768,7 @@ public String uploadThesaurus( } long fsize; - if (rdfFile != null && Files.exists(rdfFile)) { + if (Files.exists(rdfFile)) { fsize = Files.size(rdfFile); } else { throw new MissingServletRequestParameterException("Thesaurus file doesn't exist", "file"); @@ -1012,7 +985,7 @@ public Element convertCsvToSkos(Path csvFile, CSVParser csvParser = new CSVParser(reader, CSVFormat.DEFAULT .withFirstRecordAsHeader() .withIgnoreHeaderCase() - .withTrim()); + .withTrim()) ) { Map allConcepts = new LinkedHashMap<>(); @@ -1075,7 +1048,7 @@ public Element convertCsvToSkos(Path csvFile, } Element scheme = buildConceptScheme(csvFile, thesaurusTitle, thesaurusNamespaceUrl); - if(broaderLinks.size() > 0 && !topConcepts.isEmpty()) { + if(!broaderLinks.isEmpty() && !topConcepts.isEmpty()) { topConcepts.forEach(t -> { Element topConcept = new Element("hasTopConcept", SKOS_NAMESPACE); topConcept.setAttribute("resource", t, RDF_NAMESPACE); @@ -1210,7 +1183,7 @@ public String uploadThesaurusFromUrl( boolean registryUpload = !StringUtils.isEmpty(registryUrl); // Upload RDF file - Path rdfFile = null; + Path rdfFile; String fname = null; // Specific upload steps @@ -1307,9 +1280,9 @@ public String uploadThesaurusFromUrl( /** * Update the information related to a local thesaurus . * - * @param thesaurusInfo - * @return - * @throws Exception + * @param thesaurus name of the thesaurus to update. + * @param thesaurusInfo thesaurus information to update. + * @throws Exception the exception */ @io.swagger.v3.oas.annotations.Operation( summary = "Updates the information of a local thesaurus", @@ -1368,11 +1341,11 @@ public void updateThesaurus( * Download for each language the codelist from the registry. Combine * them into one XML document which is then XSLT processed for SKOS conversion. * - * @param registryUrl the registry url - * @param registryType - * @param itemName the item name - * @param lang the selected languages - * @param context the context + * @param registryUrl the registry url + * @param registryType type of registry + * @param itemName the item name + * @param lang the selected languages + * @param context the context * @return the path * @throws Exception the exception */ @@ -1448,7 +1421,6 @@ private Path getXMLContentFromUrl(String url, ServiceContext context) throws URI * @param fname the fname * @param type the type * @param dir the dir - * @return Element thesaurus uploaded * @throws Exception the exception */ private void uploadThesaurus(Path rdfFile, String style, diff --git a/services/src/main/java/org/fao/geonet/api/reports/ReportUsers.java b/services/src/main/java/org/fao/geonet/api/reports/ReportUsers.java index ba3d83bb70c5..236237c24d03 100644 --- a/services/src/main/java/org/fao/geonet/api/reports/ReportUsers.java +++ b/services/src/main/java/org/fao/geonet/api/reports/ReportUsers.java @@ -20,12 +20,24 @@ * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, * Rome - Italy. email: geonetwork@osgeo.org */ - package org.fao.geonet.api.reports; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Optional; +import java.util.ResourceBundle; import jeeves.server.context.ServiceContext; import org.apache.commons.csv.CSVPrinter; -import org.fao.geonet.domain.*; +import org.fao.geonet.auditable.UserAuditableService; +import org.fao.geonet.domain.Group; +import org.fao.geonet.domain.User; +import org.fao.geonet.domain.UserGroup; +import org.fao.geonet.domain.User_; +import org.fao.geonet.kernel.setting.SettingManager; +import org.fao.geonet.kernel.setting.Settings; import org.fao.geonet.repository.SortUtils; import org.fao.geonet.repository.UserGroupRepository; import org.fao.geonet.repository.UserRepository; @@ -34,17 +46,10 @@ import org.springframework.data.domain.Sort; import org.springframework.util.StringUtils; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; - import static org.fao.geonet.api.reports.ReportUtils.CSV_FORMAT; /** * Creates a users report including last login date. - * */ public class ReportUsers implements IReport { /** @@ -72,8 +77,15 @@ public ReportUsers(final ReportFilter filter) { */ public void create(final ServiceContext context, final PrintWriter writer) throws Exception { + SettingManager settingManager = context.getBean(SettingManager.class); + UserAuditableService userAuditableService = context.getBean(UserAuditableService.class); + boolean isUserHistoryEnabled = settingManager.getValueAsBool(Settings.SYSTEM_AUDITABLE_ENABLE, false); + String lang = context.getLanguage(); + ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", + new Locale(lang)); + // Initialize CSVPrinter object - try(CSVPrinter csvFilePrinter = new CSVPrinter(writer, CSV_FORMAT)) { + try (CSVPrinter csvFilePrinter = new CSVPrinter(writer, CSV_FORMAT)) { // Retrieve users final UserRepository userRepository = context.getBean(UserRepository.class); @@ -91,7 +103,7 @@ public void create(final ServiceContext context, csvFilePrinter.println(); String[] entries = ("Username#Surname#Name#" - + "Email#User groups/Profile#Last login date").split("#"); + + "Email#User groups/Profile#Last login date" + (isUserHistoryEnabled ? "#Change history" : "")).split("#"); csvFilePrinter.printRecord(Arrays.asList(entries)); for (User user : records) { @@ -112,7 +124,7 @@ public void create(final ServiceContext context, } // Build the record element with the information for the report - List metadataRecord = new ArrayList<>(); + List metadataRecord = new ArrayList<>(isUserHistoryEnabled ? 7 : 6); metadataRecord.add(username); metadataRecord.add(surname); metadataRecord.add(name); @@ -120,6 +132,13 @@ public void create(final ServiceContext context, metadataRecord.add(userGroupsInfo); metadataRecord.add(lastLoginDate); + if (isUserHistoryEnabled) { + String userChanges = userAuditableService.getEntityHistoryAsString(user.getId(), messages); + if (StringUtils.hasLength(userChanges)) { + metadataRecord.add(userChanges); + } + } + csvFilePrinter.printRecord(metadataRecord); } } finally { @@ -129,8 +148,8 @@ public void create(final ServiceContext context, /** * Creates a string with the list of groups / profiles of a user: - * - * group1/profileGroup1-group2/profileGroup2 ... + *

+ * group1/profileGroup1-group2/profileGroup2 ... * * @param context * @param user @@ -159,7 +178,7 @@ private String retrieveGroupsListInfo(final ServiceContext context, User user) { if (i++ > 0) { userGroupsList.append("-"); } - userGroupsList.append(groupName + "/" + groupProfile); + userGroupsList.append(groupName).append("/").append(groupProfile); } return userGroupsList.toString(); diff --git a/services/src/main/java/org/fao/geonet/api/selections/UserSelectionsApi.java b/services/src/main/java/org/fao/geonet/api/selections/UserSelectionsApi.java index f4d501f7d90d..595ec8516983 100644 --- a/services/src/main/java/org/fao/geonet/api/selections/UserSelectionsApi.java +++ b/services/src/main/java/org/fao/geonet/api/selections/UserSelectionsApi.java @@ -23,6 +23,8 @@ package org.fao.geonet.api.selections; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -160,7 +162,7 @@ public ResponseEntity createPersistentSelectionType( method = RequestMethod.PUT) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Selection updated."), + @ApiResponse(responseCode = "204", description = "Selection updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Selection not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @@ -209,7 +211,7 @@ public ResponseEntity updateUserSelection( method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Selection removed."), + @ApiResponse(responseCode = "204", description = "Selection removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Selection not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @@ -348,7 +350,7 @@ ResponseEntity addToUserSelection( public @ResponseBody @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Items removed from a set."), + @ApiResponse(responseCode = "204", description = "Items removed from a set.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Selection or user not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) diff --git a/services/src/main/java/org/fao/geonet/api/site/LogosApi.java b/services/src/main/java/org/fao/geonet/api/site/LogosApi.java index b448bfe45305..a0dc037a1a3a 100644 --- a/services/src/main/java/org/fao/geonet/api/site/LogosApi.java +++ b/services/src/main/java/org/fao/geonet/api/site/LogosApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.site; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -240,7 +242,7 @@ public void getLogo( @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('UserAdmin')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Logo removed."), + @ApiResponse(responseCode = "204", description = "Logo removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) diff --git a/services/src/main/java/org/fao/geonet/api/site/SiteApi.java b/services/src/main/java/org/fao/geonet/api/site/SiteApi.java index a2bd724fa59b..2345806d9346 100644 --- a/services/src/main/java/org/fao/geonet/api/site/SiteApi.java +++ b/services/src/main/java/org/fao/geonet/api/site/SiteApi.java @@ -25,7 +25,12 @@ import co.elastic.clients.elasticsearch.core.CountRequest; import co.elastic.clients.elasticsearch.core.CountResponse; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.ExampleObject; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -47,7 +52,6 @@ import org.fao.geonet.api.tools.i18n.LanguageUtils; import org.fao.geonet.api.users.recaptcha.RecaptchaChecker; import org.fao.geonet.constants.Geonet; -import org.fao.geonet.doi.client.DoiManager; import org.fao.geonet.domain.*; import org.fao.geonet.exceptions.OperationAbortedEx; import org.fao.geonet.index.Status; @@ -87,10 +91,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import java.awt.image.BufferedImage; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; @@ -169,8 +170,6 @@ public static void reloadServices(ServiceContext context) throws Exception { context.error(e); throw new OperationAbortedEx("Parameters saved but cannot set proxy information: " + e.getMessage()); } - DoiManager doiManager = gc.getBean(DoiManager.class); - doiManager.loadConfig(); HarvestManager harvestManager = context.getBean(HarvestManager.class); harvestManager.rescheduleActiveHarvesters(); @@ -388,11 +387,45 @@ public List getSettingsDetails( @io.swagger.v3.oas.annotations.Operation( summary = "Save settings", - description = "") - @RequestMapping( + description = "Save the provided settings.", + requestBody = @io.swagger.v3.oas.annotations.parameters.RequestBody( + description = "Map of settings to be saved", + required = true, + content = { + @Content( + mediaType = MediaType.APPLICATION_FORM_URLENCODED_VALUE, + schema = @Schema(implementation = Map.class), + examples = { + @ExampleObject( + name = "Example setting (application/x-www-form-urlencoded)", + value = "{\n \"additionalProp1\": \"string\",\n \"additionalProp2\": \"string\",\n \"additionalProp3\": \"string\"\n}" + ), + @ExampleObject( + name = "Example setting selection manager max records to 1000 (application/x-www-form-urlencoded)", + value = "{\n \"system/selectionmanager/maxrecords\": \"1000\"\n}" + ) + } + ), + @Content( + mediaType = MediaType.APPLICATION_JSON_VALUE, + schema = @Schema(implementation = Map.class), + examples = { + @ExampleObject( + name = "Example setting (application/json)", + value = "{\n \"additionalProp1\": \"string\",\n \"additionalProp2\": \"string\",\n \"additionalProp3\": \"string\"\n}" + ), + @ExampleObject( + name = "Example setting selection manager max records to 1000 (application/json)", + value = "{\n \"system/selectionmanager/maxrecords\": \"1000\"\n}" + ) + } + ) + } + ) + ) + @PostMapping( path = "/settings", - produces = MediaType.APPLICATION_JSON_VALUE, - method = RequestMethod.POST + consumes = {MediaType.APPLICATION_FORM_URLENCODED_VALUE, MediaType.APPLICATION_JSON_VALUE} ) @PreAuthorize("hasAuthority('Administrator')") @ResponseStatus(HttpStatus.NO_CONTENT) @@ -401,11 +434,20 @@ public List getSettingsDetails( @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) }) public void saveSettings( - @Parameter(hidden = false) - @RequestParam - Map allRequestParams, + // Mark parameter as hidden in open api specification as the Operation requestBody(above) will describe the format to be supplied + // Without this fix, the swagger ui will fail to work correctly. + @Parameter(description = "Map of settings to be saved", + required = true, hidden = true) + @RequestParam Map allRequestParams, HttpServletRequest request ) throws Exception { + //If sent as JSON then the allRequestParams will be empty, and we need to manually load it from the request body + if (MediaType.APPLICATION_JSON_VALUE.equals(request.getContentType()) && allRequestParams.isEmpty()) { + BufferedReader reader = request.getReader(); + ObjectMapper mapper = new ObjectMapper(); + allRequestParams = mapper.readValue(reader, new TypeReference>() {}); + } + ApplicationContext applicationContext = ApplicationContextHolder.get(); String currentUuid = settingManager.getSiteId(); String oldSiteName = settingManager.getSiteName(); @@ -516,7 +558,7 @@ public boolean isCasEnabled( method = RequestMethod.PUT) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Staging profile saved."), + @ApiResponse(responseCode = "204", description = "Staging profile saved.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) }) @PreAuthorize("hasAuthority('Administrator')") @@ -761,7 +803,7 @@ public ProxyConfiguration getProxyConfiguration( @PreAuthorize("hasAuthority('Administrator')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Logo set."), + @ApiResponse(responseCode = "204", description = "Logo set.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) public void setLogo( diff --git a/services/src/main/java/org/fao/geonet/api/sources/SourcesApi.java b/services/src/main/java/org/fao/geonet/api/sources/SourcesApi.java index 5b50c543e5cb..da0f179a4fc7 100644 --- a/services/src/main/java/org/fao/geonet/api/sources/SourcesApi.java +++ b/services/src/main/java/org/fao/geonet/api/sources/SourcesApi.java @@ -227,7 +227,7 @@ private void copySourceLogo(Source source, HttpServletRequest request) { @PreAuthorize("hasAuthority('UserAdmin')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Source updated."), + @ApiResponse(responseCode = "204", description = "Source updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "Source not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @@ -278,7 +278,7 @@ public ResponseEntity updateSource( @PreAuthorize("hasAuthority('Administrator')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Source deleted."), + @ApiResponse(responseCode = "204", description = "Source deleted.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) }) @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/api/status/StatusApi.java b/services/src/main/java/org/fao/geonet/api/status/StatusApi.java index df46dfe0bed3..fa3715dca084 100644 --- a/services/src/main/java/org/fao/geonet/api/status/StatusApi.java +++ b/services/src/main/java/org/fao/geonet/api/status/StatusApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.status; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -79,7 +81,7 @@ public List getStatusByType( @RequestMapping(method = RequestMethod.DELETE) @PreAuthorize("hasAuthority('Administrator')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "Status removed."), + @ApiResponse(responseCode = "204", description = "Status removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_ADMIN) }) @ResponseStatus(HttpStatus.NO_CONTENT) diff --git a/services/src/main/java/org/fao/geonet/api/uisetting/UiSettingApi.java b/services/src/main/java/org/fao/geonet/api/uisetting/UiSettingApi.java index 8fa7193367d6..1696eca05a99 100644 --- a/services/src/main/java/org/fao/geonet/api/uisetting/UiSettingApi.java +++ b/services/src/main/java/org/fao/geonet/api/uisetting/UiSettingApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.uisetting; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -178,7 +180,7 @@ public UiSetting getUiConfiguration( @PreAuthorize("hasAuthority('UserAdmin')") @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "UI configuration updated."), + @ApiResponse(responseCode = "204", description = "UI configuration updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) @ResponseBody @@ -232,7 +234,7 @@ public ResponseEntity updateUiConfiguration( method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "UI Configuration removed."), + @ApiResponse(responseCode = "204", description = "UI Configuration removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = "UI Configuration not found."), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_USER_ADMIN) }) diff --git a/services/src/main/java/org/fao/geonet/api/userfeedback/UserFeedbackAPI.java b/services/src/main/java/org/fao/geonet/api/userfeedback/UserFeedbackAPI.java index a782fb814f19..9f66a2208172 100644 --- a/services/src/main/java/org/fao/geonet/api/userfeedback/UserFeedbackAPI.java +++ b/services/src/main/java/org/fao/geonet/api/userfeedback/UserFeedbackAPI.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.userfeedback; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -146,7 +148,7 @@ public List getRatingCriteria( @RequestMapping(value = "/userfeedback/{uuid}", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('Reviewer')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "User feedback removed."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "User feedback removed.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER)}) @ResponseBody public ResponseEntity deleteUserFeedback( @@ -719,7 +721,7 @@ private void printOutputMessage(final HttpServletResponse response, final HttpSt @RequestMapping(value = "/userfeedback/{uuid}/publish", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.GET) @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('Reviewer')") - @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "User feedback published."), + @ApiResponses(value = {@ApiResponse(responseCode = "204", description = "User feedback published.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "403", description = ApiParams.API_RESPONSE_NOT_ALLOWED_ONLY_REVIEWER), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND)}) @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/api/users/MeApi.java b/services/src/main/java/org/fao/geonet/api/users/MeApi.java index 955f9e4f3926..ee5e882dc20f 100644 --- a/services/src/main/java/org/fao/geonet/api/users/MeApi.java +++ b/services/src/main/java/org/fao/geonet/api/users/MeApi.java @@ -25,6 +25,8 @@ import io.swagger.v3.oas.annotations.OpenAPIDefinition; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -66,7 +68,7 @@ public class MeApi { @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses(value = { @ApiResponse(responseCode = "200", description = "Authenticated. Return user details."), - @ApiResponse(responseCode = "204", description = "Not authenticated.") + @ApiResponse(responseCode = "204", description = "Not authenticated.", content = {@Content(schema = @Schema(hidden = true))}) }) @ResponseStatus(OK) @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/api/users/PasswordApi.java b/services/src/main/java/org/fao/geonet/api/users/PasswordApi.java index 13dcce6d877b..00e4010dad8c 100644 --- a/services/src/main/java/org/fao/geonet/api/users/PasswordApi.java +++ b/services/src/main/java/org/fao/geonet/api/users/PasswordApi.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -27,7 +27,6 @@ import io.swagger.v3.oas.annotations.tags.Tag; import jeeves.server.context.ServiceContext; import org.fao.geonet.ApplicationContextHolder; -import org.fao.geonet.api.API; import org.fao.geonet.api.ApiUtils; import org.fao.geonet.api.tools.i18n.LanguageUtils; import org.fao.geonet.constants.Geonet; @@ -57,6 +56,7 @@ import javax.servlet.http.HttpServletRequest; import java.text.SimpleDateFormat; import java.util.Calendar; +import java.util.List; import java.util.Locale; import java.util.ResourceBundle; @@ -76,6 +76,7 @@ public class PasswordApi { public static final String LOGGER = Geonet.GEONETWORK + ".api.user"; public static final String DATE_FORMAT = "yyyy-MM-dd"; + public static final String USER_PASSWORD_SENT = "user_password_sent"; @Autowired LanguageUtils languageUtils; @Autowired @@ -85,14 +86,13 @@ public class PasswordApi { @Autowired FeedbackLanguages feedbackLanguages; - @Autowired(required=false) + @Autowired(required = false) SecurityProviderConfiguration securityProviderConfiguration; @io.swagger.v3.oas.annotations.Operation(summary = "Update user password", description = "Get a valid changekey by email first and then update your password.") - @RequestMapping( + @PatchMapping( value = "/{username}", - method = RequestMethod.PATCH, produces = MediaType.TEXT_PLAIN_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @ResponseBody @@ -100,13 +100,12 @@ public ResponseEntity updatePassword( @Parameter(description = "The user name", required = true) @PathVariable - String username, + String username, @Parameter(description = "The new password and a valid change key", required = true) @RequestBody - PasswordUpdateParameter passwordAndChangeKey, - HttpServletRequest request) - throws Exception { + PasswordUpdateParameter passwordAndChangeKey, + HttpServletRequest request) { Locale locale = languageUtils.parseAcceptLanguage(request.getLocales()); ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale); Locale[] feedbackLocales = feedbackLanguages.getLocales(locale); @@ -117,8 +116,9 @@ public ResponseEntity updatePassword( ServiceContext context = ApiUtils.createServiceContext(request); - User user = userRepository.findOneByUsername(username); - if (user == null) { + List existingUsers = userRepository.findByUsernameIgnoreCase(username); + + if (existingUsers.isEmpty()) { Log.warning(LOGGER, String.format("User update password. Can't find user '%s'", username)); @@ -128,6 +128,9 @@ public ResponseEntity updatePassword( XslUtil.encodeForJavaScript(username) ), HttpStatus.PRECONDITION_FAILED); } + + User user = existingUsers.get(0); + if (LDAPConstants.LDAP_FLAG.equals(user.getSecurity().getAuthType())) { Log.warning(LOGGER, String.format("User '%s' is authenticated using LDAP. Password can't be sent by email.", username)); @@ -183,14 +186,16 @@ public ResponseEntity updatePassword( String content = localizedEmail.getParsedMessage(feedbackLocales); // send change link via email with admin in CC - if (!MailUtil.sendMail(user.getEmail(), + Boolean mailSent = MailUtil.sendMail(user.getEmail(), subject, content, null, sm, - adminEmail, "")) { + adminEmail, ""); + if (Boolean.FALSE.equals(mailSent)) { return new ResponseEntity<>(String.format( messages.getString("mail_error")), HttpStatus.PRECONDITION_FAILED); } + return new ResponseEntity<>(String.format( messages.getString("user_password_changed"), XslUtil.encodeForJavaScript(username) @@ -202,9 +207,8 @@ public ResponseEntity updatePassword( "reset his password. User MUST have an email to get the link. " + "LDAP users will not be able to retrieve their password " + "using this service.") - @RequestMapping( + @PutMapping( value = "/actions/forgot-password", - method = RequestMethod.PUT, produces = MediaType.TEXT_PLAIN_VALUE) @ResponseStatus(value = HttpStatus.CREATED) @ResponseBody @@ -212,9 +216,8 @@ public ResponseEntity sendPasswordByEmail( @Parameter(description = "The user name", required = true) @RequestParam - String username, - HttpServletRequest request) - throws Exception { + String username, + HttpServletRequest request) { Locale locale = languageUtils.parseAcceptLanguage(request.getLocales()); ResourceBundle messages = ResourceBundle.getBundle("org.fao.geonet.api.Messages", locale); Locale[] feedbackLocales = feedbackLanguages.getLocales(locale); @@ -225,17 +228,19 @@ public ResponseEntity sendPasswordByEmail( ServiceContext serviceContext = ApiUtils.createServiceContext(request); - final User user = userRepository.findOneByUsername(username); - if (user == null) { + List existingUsers = userRepository.findByUsernameIgnoreCase(username); + + if (existingUsers.isEmpty()) { Log.warning(LOGGER, String.format("User reset password. Can't find user '%s'", username)); // Return response not providing details about the issue, that should be logged. return new ResponseEntity<>(String.format( - messages.getString("user_password_sent"), + messages.getString(USER_PASSWORD_SENT), XslUtil.encodeForJavaScript(username) ), HttpStatus.CREATED); } + User user = existingUsers.get(0); if (LDAPConstants.LDAP_FLAG.equals(user.getSecurity().getAuthType())) { Log.warning(LOGGER, String.format("User '%s' is authenticated using LDAP. Password can't be sent by email.", @@ -243,19 +248,19 @@ public ResponseEntity sendPasswordByEmail( // Return response not providing details about the issue, that should be logged. return new ResponseEntity<>(String.format( - messages.getString("user_password_sent"), + messages.getString(USER_PASSWORD_SENT), XslUtil.encodeForJavaScript(username) ), HttpStatus.CREATED); } String email = user.getEmail(); - if (StringUtils.isEmpty(email)) { + if (!StringUtils.hasLength(email)) { Log.warning(LOGGER, String.format("User reset password. User '%s' has no email", username)); // Return response not providing details about the issue, that should be logged. return new ResponseEntity<>(String.format( - messages.getString("user_password_sent"), + messages.getString(USER_PASSWORD_SENT), XslUtil.encodeForJavaScript(username) ), HttpStatus.CREATED); } @@ -298,16 +303,18 @@ public ResponseEntity sendPasswordByEmail( String content = localizedEmail.getParsedMessage(feedbackLocales); // send change link via email with admin in CC - if (!MailUtil.sendMail(email, + Boolean mailSent = MailUtil.sendMail(email, subject, content, null, sm, - adminEmail, "")) { + adminEmail, ""); + if (Boolean.FALSE.equals(mailSent)) { return new ResponseEntity<>(String.format( messages.getString("mail_error")), HttpStatus.PRECONDITION_FAILED); } + return new ResponseEntity<>(String.format( - messages.getString("user_password_sent"), + messages.getString(USER_PASSWORD_SENT), XslUtil.encodeForJavaScript(username) ), HttpStatus.CREATED); } diff --git a/services/src/main/java/org/fao/geonet/api/users/UsersApi.java b/services/src/main/java/org/fao/geonet/api/users/UsersApi.java index ef49de62a212..bc2116541be5 100644 --- a/services/src/main/java/org/fao/geonet/api/users/UsersApi.java +++ b/services/src/main/java/org/fao/geonet/api/users/UsersApi.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2021 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -38,7 +38,9 @@ import org.fao.geonet.api.users.model.UserDto; import org.fao.geonet.api.users.validation.PasswordResetDtoValidator; import org.fao.geonet.api.users.validation.UserDtoValidator; +import org.fao.geonet.auditable.UserAuditableService; import org.fao.geonet.domain.*; +import org.fao.geonet.domain.auditable.UserAuditable; import org.fao.geonet.exceptions.UserNotFoundEx; import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.datamanager.IMetadataUtils; @@ -116,6 +118,9 @@ public class UsersApi { @Autowired(required=false) SecurityProviderConfiguration securityProviderConfiguration; + @Autowired + UserAuditableService userAuditableService; + private BufferedImage pixel; public UsersApi() { @@ -197,7 +202,7 @@ public User getUser( myUserId.equals(Integer.toString(userIdentifier))) { Optional user = userRepository.findById(userIdentifier); - if (!user.isPresent()) { + if (user.isEmpty()) { throw new UserNotFoundEx(Integer.toString(userIdentifier)); } @@ -246,7 +251,7 @@ public void getUserIdenticon( try { Optional user = userRepository.findById(userIdentifier); - if (!user.isPresent()) { + if (user.isEmpty()) { throw new UserNotFoundEx(Integer.toString(userIdentifier)); } @@ -341,17 +346,28 @@ public ResponseEntity deleteUser( } } + Optional userToDelete = userRepository.findById(userIdentifier); + List userGroups = userGroupRepository.findAll(UserGroupSpecs.hasUserId(userIdentifier)); + userGroupRepository.deleteAllByIdAttribute(UserGroupId_.userId, - Arrays.asList(userIdentifier)); + List.of(userIdentifier)); userSavedSelectionRepository.deleteAllByUser(userIdentifier); + + try { userRepository.deleteById(userIdentifier); } catch (org.springframework.dao.EmptyResultDataAccessException ex) { throw new UserNotFoundEx(Integer.toString(userIdentifier)); } + if (userToDelete.isPresent()) { + UserAuditable userAuditable = UserAuditable.build(userToDelete.get(), userGroups); + userAuditableService.auditDelete(userAuditable); + } + + return new ResponseEntity(HttpStatus.NO_CONTENT); } @@ -393,7 +409,7 @@ public ResponseEntity checkUserPropertyExist( return new ResponseEntity<>(HttpStatus.OK); } } else { - throw new IllegalArgumentException(String.format("Property '%s' is not supported. You can only check username and email")); + throw new IllegalArgumentException("Property is not supported. You can only check username and email"); } return new ResponseEntity<>(HttpStatus.NOT_FOUND); } @@ -442,7 +458,7 @@ public ResponseEntity createUser( // TODO: CheckAccessRights - if (!myProfile.getAll().contains(profile)) { + if (!myProfile.getProfileAndAllChildren().contains(profile)) { throw new IllegalArgumentException( "Trying to set profile to " + profile + " max profile permitted is: " + myProfile); @@ -488,6 +504,12 @@ public ResponseEntity createUser( user = userRepository.save(user); setUserGroups(user, groups); + List userGroups = userGroupRepository.findAll(UserGroupSpecs + .hasUserId(user.getId())); + + UserAuditable userAuditable = UserAuditable.build(user, userGroups); + userAuditableService.auditSave(userAuditable); + return new ResponseEntity(HttpStatus.NO_CONTENT); } @@ -535,16 +557,16 @@ public ResponseEntity updateUser( // TODO: CheckAccessRights - User user = userRepository.findById(userIdentifier).get(); - if (user == null) { - throw new IllegalArgumentException("No user found with id: " - + userDto.getId()); + Optional userOptional = userRepository.findById(userIdentifier); + if (userOptional.isEmpty()) { + throw new IllegalArgumentException(String.format("No user found with id: %s", userDto.getId())); } + User user = userOptional.get(); // Check no duplicated username and if we are adding a duplicate existing name with other case combination List usersWithUsernameIgnoreCase = userRepository.findByUsernameIgnoreCase(userDto.getUsername()); - if (usersWithUsernameIgnoreCase.size() != 0 && - (!usersWithUsernameIgnoreCase.stream().anyMatch(u -> u.getId() == userIdentifier) + if (!usersWithUsernameIgnoreCase.isEmpty() && + (usersWithUsernameIgnoreCase.stream().noneMatch(u -> u.getId() == userIdentifier) || usersWithUsernameIgnoreCase.stream().anyMatch(u -> u.getUsername().equals(userDto.getUsername()) && u.getId() != userIdentifier) )) { @@ -553,7 +575,7 @@ public ResponseEntity updateUser( } - if (!myProfile.getAll().contains(profile)) { + if (!myProfile.getProfileAndAllChildren().contains(profile)) { throw new IllegalArgumentException( "Trying to set profile to " + profile + " max profile permitted is: " + myProfile); @@ -566,7 +588,7 @@ public ResponseEntity updateUser( groups.addAll(processGroups(userDto.getGroupsReviewer(), Profile.Reviewer)); groups.addAll(processGroups(userDto.getGroupsUserAdmin(), Profile.UserAdmin)); - //If it is a useradmin updating, + //If it is an useradmin updating, //maybe we don't know all the groups the user is part of if (!Profile.Administrator.equals(myProfile)) { List myUserAdminGroups = userGroupRepository.findGroupIds(Specification.where( @@ -614,6 +636,12 @@ public ResponseEntity updateUser( if (securityProviderConfiguration == null || securityProviderConfiguration.isUserGroupUpdateEnabled()) { setUserGroups(user, groups); } + + List userGroups = userGroupRepository.findAll(UserGroupSpecs + .hasUserId(user.getId())); + + UserAuditable userAuditable = UserAuditable.build(user, userGroups); + userAuditableService.auditSave(userAuditable); return new ResponseEntity(HttpStatus.NO_CONTENT); } @@ -667,14 +695,14 @@ public ResponseEntity resetUserPassword( Profile myProfile = session.getProfile(); String myUserId = session.getUserId(); - if (!Profile.Administrator.equals(myProfile) - && !Profile.UserAdmin.equals(myProfile) + if (!Profile.Administrator.equals(myProfile) + && !Profile.UserAdmin.equals(myProfile) && !myUserId.equals(Integer.toString(userIdentifier))) { throw new IllegalArgumentException("You don't have rights to do this"); } Optional user = userRepository.findById(userIdentifier); - if (!user.isPresent()) { + if (user.isEmpty()) { throw new UserNotFoundEx(Integer.toString(userIdentifier)); } @@ -724,10 +752,12 @@ public List retrieveUserGroups( if (Profile.Administrator.equals(myProfile) || Profile.UserAdmin.equals(myProfile) || myUserId.equals(Integer.toString(userIdentifier))) { // -- get the profile of the user id supplied - User user = userRepository.findById(userIdentifier).get(); - if (user == null) { + Optional userOptional = userRepository.findById(userIdentifier); + + if (userOptional.isEmpty()) { throw new IllegalArgumentException("user " + userIdentifier + " doesn't exist"); } + User user = userOptional.get(); String userProfile = user.getProfile().name(); @@ -793,7 +823,7 @@ private void setUserGroups(final User user, List userGroups) .hasUserId(user.getId())); // Have a quick reference of existing groups and profiles for this user - Set listOfAddedProfiles = new HashSet(); + Set listOfAddedProfiles = new HashSet<>(); for (UserGroup ug : all) { String key = ug.getProfile().name() + ug.getGroup().getId(); listOfAddedProfiles.add(key); @@ -801,11 +831,10 @@ private void setUserGroups(final User user, List userGroups) // We start removing all old usergroup objects. We will remove the // explicitly defined for this call - Collection toRemove = new ArrayList(); - toRemove.addAll(all); + Collection toRemove = new ArrayList<>(all); // New pairs of group-profile we need to add - Collection toAdd = new ArrayList(); + Collection toAdd = new ArrayList<>(); // For each of the parameters on the request, make sure the group is // updated. @@ -865,7 +894,7 @@ private void setUserGroups(final User user, List userGroups) private List processGroups(List groupsToProcessList, Profile profile) { - List groups = new LinkedList(); + List groups = new LinkedList<>(); for (String g : groupsToProcessList) { groups.add(new GroupElem(profile.name(), Integer.parseInt(g))); } diff --git a/services/src/main/java/org/fao/geonet/api/users/transfer/OwnershipUtils.java b/services/src/main/java/org/fao/geonet/api/users/transfer/OwnershipUtils.java index 70c95f1b8d2c..168dc816eb91 100644 --- a/services/src/main/java/org/fao/geonet/api/users/transfer/OwnershipUtils.java +++ b/services/src/main/java/org/fao/geonet/api/users/transfer/OwnershipUtils.java @@ -86,7 +86,7 @@ public Element apply(@Nonnull User input) { Set hsMyGroups = getUserGroups(context, id); - Set profileSet = us.getProfile().getAll(); + Set profileSet = us.getProfile().getProfileAndAllChildren(); //--- now filter them diff --git a/services/src/main/java/org/fao/geonet/api/users/validation/UserRegisterDtoValidator.java b/services/src/main/java/org/fao/geonet/api/users/validation/UserRegisterDtoValidator.java index 2ba53946b18d..f22d7cc821f3 100644 --- a/services/src/main/java/org/fao/geonet/api/users/validation/UserRegisterDtoValidator.java +++ b/services/src/main/java/org/fao/geonet/api/users/validation/UserRegisterDtoValidator.java @@ -60,12 +60,10 @@ public void validate(Object target, Errors errors) { } UserRepository userRepository = ApplicationContextHolder.get().getBean(UserRepository.class); - if (userRepository.findOneByEmail(userRegisterDto.getEmail()) != null) { - errors.rejectValue("", "user_with_that_email_found", "A user with this email or username already exists."); + if ((userRepository.findOneByEmail(userRegisterDto.getEmail()) != null) || + (!userRepository.findByUsernameIgnoreCase(userRegisterDto.getEmail()).isEmpty())) { + errors.rejectValue("", "user_with_that_email_username_found", "A user with this email or username already exists."); } - if (userRepository.findByUsernameIgnoreCase(userRegisterDto.getEmail()).size() != 0) { - errors.rejectValue("", "user_with_that_username_found", "A user with this email or username already exists."); - } } } diff --git a/services/src/main/java/org/fao/geonet/api/usersearches/UserSearchesApi.java b/services/src/main/java/org/fao/geonet/api/usersearches/UserSearchesApi.java index 517744f9ce70..23a4a148a855 100644 --- a/services/src/main/java/org/fao/geonet/api/usersearches/UserSearchesApi.java +++ b/services/src/main/java/org/fao/geonet/api/usersearches/UserSearchesApi.java @@ -24,6 +24,8 @@ package org.fao.geonet.api.usersearches; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.tags.Tag; @@ -350,7 +352,7 @@ public ResponseEntity createUserCustomSearch( @ResponseStatus(value = HttpStatus.NO_CONTENT) @PreAuthorize("hasAuthority('UserAdmin')") @ApiResponses(value = { - @ApiResponse(responseCode = "204", description = "User search updated."), + @ApiResponse(responseCode = "204", description = "User search updated.", content = {@Content(schema = @Schema(hidden = true))}), @ApiResponse(responseCode = "404", description = ApiParams.API_RESPONSE_RESOURCE_NOT_FOUND) }) @ResponseBody diff --git a/services/src/main/java/org/fao/geonet/guiapi/search/XsltResponseWriter.java b/services/src/main/java/org/fao/geonet/guiapi/search/XsltResponseWriter.java index 3623a242f02f..fd6a7c78bd56 100644 --- a/services/src/main/java/org/fao/geonet/guiapi/search/XsltResponseWriter.java +++ b/services/src/main/java/org/fao/geonet/guiapi/search/XsltResponseWriter.java @@ -1,6 +1,6 @@ /* * ============================================================================= - * === Copyright (C) 2001-2023 Food and Agriculture Organization of the + * === Copyright (C) 2001-2024 Food and Agriculture Organization of the * === United Nations (FAO-UN), United Nations World Food Programme (WFP) * === and United Nations Environment Programme (UNEP) * === @@ -36,9 +36,7 @@ import org.fao.geonet.utils.Log; import org.fao.geonet.utils.Xml; import org.jdom.Element; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import org.springframework.stereotype.Component; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @@ -50,16 +48,17 @@ /** * Utility to mimic what Jeeves was doing */ -@Component public class XsltResponseWriter { public static final String TRANSLATIONS = "translations"; - @Autowired - GeonetworkDataDirectory dataDirectory; Element xml; Path xsl; Map xslParams = new HashMap<>(); public XsltResponseWriter(String envTagName, String serviceName) { + this(envTagName, serviceName, "eng"); + } + + public XsltResponseWriter(String envTagName, String serviceName, String lang) { SettingManager settingManager = ApplicationContextHolder.get().getBean(SettingManager.class); String url = settingManager.getBaseURL(); Element gui = new Element("gui"); @@ -70,8 +69,7 @@ public XsltResponseWriter(String envTagName, String serviceName) { gui.addContent(new Element("baseUrl").setText(settingManager.getBaseURL())); gui.addContent(new Element("serverUrl").setText(settingManager.getServerURL())); gui.addContent(new Element("nodeId").setText(settingManager.getNodeId())); - // TODO: set language based on header - gui.addContent(new Element("language").setText("eng")); + gui.addContent(new Element("language").setText(lang)); Element settings = settingManager.getAllAsXML(true); @@ -94,8 +92,7 @@ public XsltResponseWriter withXml(Element xml) { public XsltResponseWriter withXsl(String xsl) { ApplicationContext applicationContext = ApplicationContextHolder.get(); GeonetworkDataDirectory dataDirectory = applicationContext.getBean(GeonetworkDataDirectory.class); - Path xslt = dataDirectory.getWebappDir().resolve(xsl); - this.xsl = xslt; + this.xsl = dataDirectory.getWebappDir().resolve(xsl); return this; } @@ -153,7 +150,7 @@ public XsltResponseWriter withJson(String json) { }); } catch (IOException e) { Log.warning(Geonet.GEONETWORK, String.format( - "Can't find JSON file '%s'.", jsonPath.toString() + "Can't find JSON file '%s'.", jsonPath )); } diff --git a/services/src/main/java/org/fao/geonet/services/thesaurus/GetKeywordByIdAsConcept.java b/services/src/main/java/org/fao/geonet/services/thesaurus/GetKeywordByIdAsConcept.java index 1c86f3f694f8..4428f73c5862 100644 --- a/services/src/main/java/org/fao/geonet/services/thesaurus/GetKeywordByIdAsConcept.java +++ b/services/src/main/java/org/fao/geonet/services/thesaurus/GetKeywordByIdAsConcept.java @@ -73,7 +73,7 @@ public Element exec(Element params, ServiceContext context) searcher = new KeywordsSearcher(context, thesaurusMan); KeywordBean kb = null; - kb = searcher.searchById(uri, sThesaurusName, langForThesaurus); + kb = searcher.searchById(uri, sThesaurusName, langForThesaurus, "eng"); if (kb == null) { root = new Element("descKeys"); } else { @@ -93,11 +93,11 @@ public Element exec(Element params, ServiceContext context) reqType = KeywordRelation.RELATED; } - searcher.searchForRelated(params, reqType, KeywordSort.defaultLabelSorter(SortDirection.DESC), lang); + searcher.searchForRelated(params, reqType, KeywordSort.defaultLabelSorter(SortDirection.DESC), lang, "eng"); // build response for each request type Element keywordType = new Element(request); for (KeywordBean kbr : searcher.getResults()) { - keywordType.addContent(kbr.toElement(context.getLanguage())); + keywordType.addContent(kbr.toElement(context.getLanguage(), "eng")); } root.addContent(keywordType); } diff --git a/services/src/main/java/org/fao/geonet/services/thesaurus/GetTopConcept.java b/services/src/main/java/org/fao/geonet/services/thesaurus/GetTopConcept.java index 1654db883b24..86c6dcfb7a92 100644 --- a/services/src/main/java/org/fao/geonet/services/thesaurus/GetTopConcept.java +++ b/services/src/main/java/org/fao/geonet/services/thesaurus/GetTopConcept.java @@ -70,7 +70,7 @@ public Element exec(Element params, ServiceContext context) searcher = new KeywordsSearcher(context, thesaurusMan); try { - searcher.searchTopConcepts(sThesaurusName, langForThesaurus); + searcher.searchTopConcepts(sThesaurusName, langForThesaurus, "eng"); KeywordBean topConcept = new KeywordBean(the.getIsoLanguageMapper()); topConcept.setThesaurusInfo(the); diff --git a/services/src/main/java/org/fao/geonet/util/MetadataPublicationMailNotifier.java b/services/src/main/java/org/fao/geonet/util/MetadataPublicationMailNotifier.java index 02eab8975644..64e65f71a8ad 100644 --- a/services/src/main/java/org/fao/geonet/util/MetadataPublicationMailNotifier.java +++ b/services/src/main/java/org/fao/geonet/util/MetadataPublicationMailNotifier.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -34,6 +34,7 @@ import org.fao.geonet.repository.GroupRepository; import org.fao.geonet.utils.Log; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.util.*; @@ -48,6 +49,9 @@ @Component public class MetadataPublicationMailNotifier { + @Value("${metadata.publicationmail.format.html:true}") + private boolean sendHtmlMail; + @Autowired SettingManager settingManager; @@ -136,7 +140,6 @@ private void sendMailPublicationNotification(Locale[] feedbackLocales, LocalizedEmailComponent emailMessageComponent = new LocalizedEmailComponent(MESSAGE, "metadata_published_text", KeyType.MESSAGE_KEY, POSITIONAL_FORMAT); for (Locale feedbackLocale : feedbackLocales) { - emailSubjectComponent.addParameters( feedbackLocale, new LocalizedEmailParameter(ParameterType.RAW_VALUE, 1, settingManager.getSiteName()) @@ -175,15 +178,19 @@ private void sendMailPublicationNotification(Locale[] feedbackLocales, ); } - LocalizedEmail localizedEmail = new LocalizedEmail(true); + LocalizedEmail localizedEmail = new LocalizedEmail(sendHtmlMail); localizedEmail.addComponents(emailSubjectComponent, emailMessageComponent); String subject = localizedEmail.getParsedSubject(feedbackLocales); - String htmlMessage = localizedEmail.getParsedMessage(feedbackLocales); + String message = localizedEmail.getParsedMessage(feedbackLocales); // Send mail to notify about metadata publication / un-publication try { - MailUtil.sendHtmlMail(toAddress, subject, htmlMessage, settingManager); + if (sendHtmlMail) { + MailUtil.sendHtmlMail(toAddress, subject, message, settingManager); + } else { + MailUtil.sendMail(toAddress, subject, message, settingManager); + } } catch (IllegalArgumentException ex) { Log.warning(API.LOG_MODULE_NAME, ex.getMessage(), ex); } diff --git a/services/src/test/java/org/fao/geonet/api/doiservers/DoiServersApiTest.java b/services/src/test/java/org/fao/geonet/api/doiservers/DoiServersApiTest.java new file mode 100644 index 000000000000..d91e6924cfc9 --- /dev/null +++ b/services/src/test/java/org/fao/geonet/api/doiservers/DoiServersApiTest.java @@ -0,0 +1,281 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.api.doiservers; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import junit.framework.Assert; +import org.fao.geonet.api.JsonFieldNamingStrategy; +import org.fao.geonet.api.doiservers.model.DoiServerDto; +import org.fao.geonet.domain.*; +import org.fao.geonet.repository.DoiServerRepository; +import org.fao.geonet.repository.DoiServerRepositoryTest; +import org.fao.geonet.repository.GroupRepository; +import org.fao.geonet.repository.GroupRepositoryTest; +import org.fao.geonet.services.AbstractServiceIntegrationTest; +import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; +import org.jasypt.hibernate5.encryptor.HibernatePBEEncryptorRegistry; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.mock.web.MockHttpSession; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.MvcResult; +import org.springframework.test.web.servlet.setup.MockMvcBuilders; +import org.springframework.web.context.WebApplicationContext; + +import java.util.List; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; + +public class DoiServersApiTest extends AbstractServiceIntegrationTest { + @Autowired + private WebApplicationContext wac; + + @Autowired + private DoiServerRepository doiServerRepository; + + @Autowired + private GroupRepository groupRepository; + + private MockMvc mockMvc; + + private MockHttpSession mockHttpSession; + + private AtomicInteger inc = new AtomicInteger(); + + @BeforeClass + public static void init() { + StandardPBEStringEncryptor strongEncryptor = new StandardPBEStringEncryptor(); + strongEncryptor.setPassword("testpassword"); + + HibernatePBEEncryptorRegistry registry = + HibernatePBEEncryptorRegistry.getInstance(); + registry.registerPBEStringEncryptor("STRING_ENCRYPTOR", strongEncryptor); + } + + @Before + public void setUp() { + createTestData(); + } + + @Test + public void getDoiServers() throws Exception { + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(get("/srv/api/doiservers") + .session(this.mockHttpSession) + .accept(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", hasSize(2))) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)); + } + + @Test + public void getDoiServer() throws Exception { + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + List doiServers = doiServerRepository.findAll(); + assertEquals(2, doiServers.size()); + DoiServer doiServerToRetrieve = doiServers.get(0); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(get("/srv/api/doiservers/" + doiServerToRetrieve.getId()) + .session(this.mockHttpSession) + .accept(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", is(doiServerToRetrieve.getName()))) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)); + } + + @Test + public void deleteDoiServer() throws Exception { + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + List doiServers = doiServerRepository.findAll(); + assertEquals(2, doiServers.size()); + DoiServer doiServerToDelete = doiServers.get(0); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(delete("/srv/api/doiservers/" + doiServerToDelete.getId()) + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNoContent()); + + this.mockMvc.perform(get("/srv/api/doiservers/" + doiServerToDelete.getId()) + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNotFound()); + + Optional doiServerOpt = doiServerRepository.findOneById(doiServerToDelete.getId()); + Assert.assertTrue(doiServerOpt.isEmpty()); + } + + @Test + public void deleteNonExistingDoiServer() throws Exception { + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + Optional doiServerToDelete = doiServerRepository.findOneById(222); + Assert.assertFalse(doiServerToDelete.isPresent()); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(delete("/srv/api/doiservers/222") + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNotFound()) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)); + } + + @Test + public void updateDoiServer() throws Exception { + List doiServers = doiServerRepository.findAll(); + assertEquals(2, doiServers.size()); + DoiServer doiServerToUpdate = doiServers.get(0); + + DoiServerDto doiServerDto = DoiServerDto.from(doiServerToUpdate); + doiServerDto.setName("New name"); + doiServerDto.setDescription("New description"); + doiServerDto.setUrl("http://newurl"); + + Gson gson = new GsonBuilder() + .setFieldNamingStrategy(new JsonFieldNamingStrategy()) + .create(); + String json = gson.toJson(doiServerDto); + + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(put("/srv/api/doiservers/" + doiServerToUpdate.getId()) + .content(json) + .contentType(API_JSON_EXPECTED_ENCODING) + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNoContent()); + + Optional doiServerUpdatedOpt = doiServerRepository.findOneById(doiServerToUpdate.getId()); + assertTrue(doiServerUpdatedOpt.isPresent()); + assertEquals(doiServerDto.getName(), doiServerUpdatedOpt.get().getName()); + assertEquals(doiServerDto.getDescription(), doiServerUpdatedOpt.get().getDescription()); + assertEquals(doiServerDto.getUrl(), doiServerUpdatedOpt.get().getUrl()); + } + + @Test + public void updateNonExistingDoiServer() throws Exception { + Optional doiServerToUpdateOptional = doiServerRepository.findOneById(222); + Assert.assertFalse(doiServerToUpdateOptional.isPresent()); + + DoiServer doiServerToUpdate = DoiServerRepositoryTest.newDoiServer(inc); + doiServerToUpdate.setId(222); + DoiServerDto doiServerToUpdateDto = DoiServerDto.from(doiServerToUpdate); + + Gson gson = new GsonBuilder() + .setFieldNamingStrategy(new JsonFieldNamingStrategy()) + .create(); + String json = gson.toJson(doiServerToUpdateDto); + + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(put("/srv/api/doiservers/" + doiServerToUpdate.getId()) + .content(json) + .contentType(API_JSON_EXPECTED_ENCODING) + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNotFound()); + } + + @Test + public void updateDoiServerAuth() throws Exception { + List doiServers = doiServerRepository.findAll(); + assertEquals(2, doiServers.size()); + DoiServer doiServerToUpdate = doiServers.get(0); + + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + this.mockMvc.perform(post("/srv/api/doiservers/" + doiServerToUpdate.getId() + "/auth") + .param("username", "newusername") + .param("password", "newpassword") + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isNoContent()); + } + + @Test + public void addDoiServer() throws Exception { + DoiServer doiServerToAdd = DoiServerRepositoryTest.newDoiServer(inc); + DoiServerDto doiServerToAddDto = DoiServerDto.from(doiServerToAdd); + + Gson gson = new GsonBuilder() + .setFieldNamingStrategy(new JsonFieldNamingStrategy()) + .create(); + String json = gson.toJson(doiServerToAddDto); + + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + MvcResult result = this.mockMvc.perform(put("/srv/api/doiservers") + .content(json) + .contentType(API_JSON_EXPECTED_ENCODING) + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().is(201)) + .andReturn(); + + int createdDoiServerId = Integer.parseInt(result.getResponse().getContentAsString()); + Optional doiServerAdded = doiServerRepository.findOneById(createdDoiServerId); + Assert.assertTrue(doiServerAdded.isPresent()); + } + + private void createTestData() { + Group group1 = GroupRepositoryTest.newGroup(_inc); + groupRepository.save(group1); + + DoiServer doiServer1 = DoiServerRepositoryTest.newDoiServer(inc); + doiServer1.getPublicationGroups().add(group1); + doiServerRepository.save(doiServer1); + + DoiServer doiServer2 = DoiServerRepositoryTest.newDoiServer(inc); + doiServerRepository.save(doiServer2); + } +} diff --git a/services/src/test/java/org/fao/geonet/api/records/MetadataApiTest.java b/services/src/test/java/org/fao/geonet/api/records/MetadataApiTest.java index dd9f38809160..800df1659fd8 100644 --- a/services/src/test/java/org/fao/geonet/api/records/MetadataApiTest.java +++ b/services/src/test/java/org/fao/geonet/api/records/MetadataApiTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -27,8 +27,13 @@ import org.fao.geonet.NodeInfo; import org.fao.geonet.api.ApiParams; import org.fao.geonet.domain.AbstractMetadata; +import org.fao.geonet.domain.Source; +import org.fao.geonet.domain.SourceType; import org.fao.geonet.kernel.SpringLocalServiceInvoker; +import org.fao.geonet.kernel.datamanager.IMetadataIndexer; +import org.fao.geonet.kernel.search.IndexingMode; import org.fao.geonet.repository.MetadataRepository; +import org.fao.geonet.repository.SourceRepository; import org.fao.geonet.services.AbstractServiceIntegrationTest; import org.jdom.Element; import org.junit.Assert; @@ -67,6 +72,12 @@ public class MetadataApiTest extends AbstractServiceIntegrationTest { private EntityManager _entityManager; @Autowired private MetadataRepository metadataRepository; + @Autowired + private IMetadataIndexer metadataIndexer; + @Autowired + private SourceRepository sourceRepository; + @Autowired + private NodeInfo nodeInfo; private int id; private String uuid; @@ -83,6 +94,15 @@ private void createTestData() throws Exception { AbstractMetadata metadata = injectMetadataInDb(getSampleMetadataXml(), context, true); id = metadata.getId(); uuid = metadata.getUuid(); + + metadataIndexer.indexMetadata(String.valueOf(id), true, IndexingMode.full); + + Source subportal = new Source(); + subportal.setName("external"); + subportal.setType(SourceType.subportal); + subportal.setFilter("-uuid:" + uuid); + subportal.setUuid(UUID.randomUUID().toString()); + sourceRepository.save(subportal); } @Test @@ -330,6 +350,35 @@ public void getNonAllowedRecordAsXml() throws Exception { .andExpect(xpath("/apiError/message").string(equalTo(ApiParams.API_RESPONSE_NOT_ALLOWED_CAN_VIEW))); } + @Test + public void getNonAvailableRecordInSubportalAsXml() throws Exception { + MockMvc mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + MockHttpSession mockHttpSession = loginAsAdmin(); + + // Set the current node to the subportal + Source subportal = sourceRepository.findOneByName("external"); + Assert.assertNotNull(subportal); + nodeInfo.setId(subportal.getUuid()); + nodeInfo.setDefaultNode(false); + + mockMvc.perform(get("/external/api/records/" + this.uuid + "/formatters/json") + .session(mockHttpSession) + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isNotFound()) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)) + .andExpect(jsonPath("$.code").value(equalTo("resource_not_found"))) + .andExpect(jsonPath("$.message").value(equalTo("Metadata not found"))); + + mockMvc.perform(get("/external/api/records/" + this.uuid + "/formatters/xml") + .session(mockHttpSession) + .accept(MediaType.APPLICATION_XML)) + .andExpect(status().isNotFound()) + .andExpect(content().contentType(MediaType.APPLICATION_XML)) + .andExpect(xpath("/apiError/code").string(equalTo("resource_not_found"))) + .andExpect(xpath("/apiError/message").string(equalTo("Metadata not found"))); + } + + @Test public void getNonExistentRecordAsXml() throws Exception { MockMvc mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); diff --git a/services/src/test/java/org/fao/geonet/api/records/attachments/AbstractStoreTest.java b/services/src/test/java/org/fao/geonet/api/records/attachments/AbstractStoreTest.java index 6f1b02bf2efa..1a17aa1f64fc 100644 --- a/services/src/test/java/org/fao/geonet/api/records/attachments/AbstractStoreTest.java +++ b/services/src/test/java/org/fao/geonet/api/records/attachments/AbstractStoreTest.java @@ -39,8 +39,8 @@ import org.springframework.web.multipart.MultipartFile; import java.io.IOException; +import java.net.HttpURLConnection; import java.net.URL; -import java.net.URLConnection; import java.net.URLStreamHandler; import java.nio.file.Files; import java.nio.file.Path; @@ -69,15 +69,19 @@ public static URL getMockUrl(final String filename, final Path file = Paths.get(resources, filename); assertTrue("Mock file " + filename + " not found", Files.exists(file)); - final URLConnection mockConnection = Mockito.mock(URLConnection.class); + final HttpURLConnection mockConnection = Mockito.mock(HttpURLConnection.class); Mockito.when(mockConnection.getInputStream()).thenReturn( Files.newInputStream(file) ); + Mockito.when(mockConnection.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK); + + Mockito.when(mockConnection.getContentLengthLong()).thenReturn(-1L); + final URLStreamHandler handler = new URLStreamHandler() { @Override - protected URLConnection openConnection(final URL arg0) { + protected HttpURLConnection openConnection(final URL arg0) { return mockConnection; } }; diff --git a/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterAdminApiIntegrationTest.java b/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterAdminApiIntegrationTest.java index e25b8015b4a2..149c5703e1f4 100644 --- a/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterAdminApiIntegrationTest.java +++ b/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterAdminApiIntegrationTest.java @@ -59,8 +59,8 @@ public void testExec() throws Exception { serviceConfig.setValue(FormatterConstants.USER_XSL_DIR, dataDirectory.getWebappDir() + "/formatters"); listService.init(dataDirectory.getWebappDir(), serviceConfig); - assertFormattersForSchema(true, "iso19139", listService, "datacite", "eu-po-doi", "jsonld", "iso19115-3.2018"); - assertFormattersForSchema(false, "iso19139", listService, "datacite", "eu-po-doi", "jsonld", "xsl-view", "citation", "iso19115-3.2018"); + assertFormattersForSchema(true, "iso19139", listService, "datacite", "eu-po-doi", "jsonld", "iso19115-3.2018", "dcat", "eu-dcat-ap", "eu-dcat-ap-hvd", "eu-geodcat-ap", "eu-geodcat-ap-semiceu"); + assertFormattersForSchema(false, "iso19139", listService, "datacite", "eu-po-doi", "jsonld", "xsl-view", "citation", "iso19115-3.2018", "dcat", "eu-dcat-ap", "eu-dcat-ap-hvd", "eu-geodcat-ap", "eu-geodcat-ap-semiceu"); assertFormattersForSchema(true, "dublin-core", listService); } diff --git a/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterApiTest.java b/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterApiTest.java index e0780b4f7231..4d0f3c79c314 100644 --- a/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterApiTest.java +++ b/services/src/test/java/org/fao/geonet/api/records/formatters/FormatterApiTest.java @@ -22,11 +22,25 @@ */ package org.fao.geonet.api.records.formatters; +import java.io.File; import jeeves.server.context.ServiceContext; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.jena.graph.Graph; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.shacl.ShaclValidator; +import org.apache.jena.shacl.Shapes; +import org.apache.jena.shacl.ValidationReport; +import org.apache.jena.shacl.lib.ShLib; import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.services.AbstractServiceIntegrationTest; +import org.fao.geonet.utils.Xml; import org.jdom.Element; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; @@ -36,12 +50,17 @@ import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.StreamUtils; import org.springframework.web.context.WebApplicationContext; +import org.xmlunit.builder.DiffBuilder; +import org.xmlunit.builder.Input; +import org.xmlunit.diff.DefaultNodeMatcher; +import org.xmlunit.diff.Diff; +import org.xmlunit.diff.ElementSelectors; +import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.Assert.*; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -55,17 +74,26 @@ public class FormatterApiTest extends AbstractServiceIntegrationTest { public static Collection data() throws Exception { ArrayList data = new ArrayList<>(); - data.add(new String[]{"citation", "?format=?", "iso19139", "formats.txt"}); - data.add(new String[]{"citation", "?format=ris", "iso19139", "ris.txt"}); - data.add(new String[]{"citation", "?format=bibtex", "iso19139", "bibtex.txt"}); - data.add(new String[]{"citation", "?format=text", "iso19139", "text.txt"}); - data.add(new String[]{"citation", "?format=html", "iso19139", "html.html"}); - data.add(new String[]{"citation", "?format=?", "iso19115-3.2018", "formats.txt"}); - data.add(new String[]{"citation", "?format=ris", "iso19115-3.2018", "ris.txt"}); - data.add(new String[]{"citation", "?format=bibtex", "iso19115-3.2018", "bibtex.txt"}); - data.add(new String[]{"citation", "?format=text", "iso19115-3.2018", "text.txt"}); - data.add(new String[]{"citation", "?format=html", "iso19115-3.2018", "html.html"}); - data.add(new String[]{"citation", "?format=text&authorRoles=processor&publisherRoles=owner,custodian", "iso19115-3.2018", "text-custom-role.txt"}); + data.add(new String[]{"iso19139", "citation", "?format=?", "iso19139", "formats.txt"}); + data.add(new String[]{"iso19139", "citation", "?format=ris", "iso19139", "ris.txt"}); + data.add(new String[]{"iso19139", "citation", "?format=bibtex", "iso19139", "bibtex.txt"}); + data.add(new String[]{"iso19139", "citation", "?format=text", "iso19139", "text.txt"}); + data.add(new String[]{"iso19139", "citation", "?format=html", "iso19139", "html.html"}); + data.add(new String[]{"iso19139", "citation", "?format=?", "iso19115-3.2018", "formats.txt"}); + data.add(new String[]{"iso19115-3.2018", "citation", "?format=ris", "iso19115-3.2018", "ris.txt"}); + data.add(new String[]{"iso19115-3.2018", "citation", "?format=bibtex", "iso19115-3.2018", "bibtex.txt"}); + data.add(new String[]{"iso19115-3.2018", "citation", "?format=text", "iso19115-3.2018", "text.txt"}); + data.add(new String[]{"iso19115-3.2018", "citation", "?format=html", "iso19115-3.2018", "html.html"}); + data.add(new String[]{"iso19115-3.2018", "citation", "?format=text&authorRoles=processor&publisherRoles=owner,custodian", "iso19115-3.2018", "text-custom-role.txt"}); + + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "dcat", "", "iso19115-3.2018", "dataset-core.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "eu-dcat-ap", "", "iso19115-3.2018", "dataset-core.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "eu-dcat-ap", "?multipleAccrualPeriodicityAllowed=true", "iso19115-3.2018", "dataset-core-multipleAccrualPeriodicityAllowed.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "eu-geodcat-ap", "", "iso19115-3.2018", "dataset-core.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "eu-dcat-ap-mobility", "", "iso19115-3.2018", "dataset-core.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-dataset.xml", "eu-dcat-ap-hvd", "", "iso19115-3.2018", "dataset-core.rdf"}); + data.add(new String[]{"iso19115-3.2018-dcat-service.xml", "dcat", "", "iso19115-3.2018", "service-core.rdf"}); + return data; } @@ -81,47 +109,196 @@ public void checkFormatter() throws Exception { MockHttpSession mockHttpSession = loginAsAdmin(); for (String[] testParameter : data()) { - String formatter = testParameter[0]; - String urlParams = testParameter[1]; - String schema = testParameter[2]; - String checkfile = testParameter[3]; + String testFile = testParameter[0]; + String formatter = testParameter[1]; + String urlParams = testParameter[2]; + String schema = testParameter[3]; + String checkfile = testParameter[4]; String url = "/srv/api/records/" - + testDataUuidBySchema.get(schema) - + "/formatters/" + formatter + urlParams; + + testDataUuidBySchema.get(testFile) + + "/formatters/" + formatter + urlParams; try { MvcResult result = mockMvc.perform(get(url) - .session(mockHttpSession) - .accept(MediaType.ALL_VALUE)) - .andExpect(status().isOk()) - .andReturn(); - - assertEquals( - url, - StreamUtils.copyToString( - FormatterApiTest.class.getResourceAsStream( - String.format("%s-%s-%s", - schema, formatter, checkfile) - ), - StandardCharsets.UTF_8) + .session(mockHttpSession) + .accept(MediaType.ALL_VALUE)) + .andExpect(status().isOk()) + .andReturn(); + + String expected = StreamUtils.copyToString( + FormatterApiTest.class.getResourceAsStream( + String.format("%s-%s-%s", + schema, formatter, checkfile) + ), + StandardCharsets.UTF_8) .trim() - .replace("{uuid}", testDataUuidBySchema.get(schema)), - result.getResponse().getContentAsString() - .replaceAll("\\r\\n?", "\n") - ); + .replace("{uuid}", testDataUuidBySchema.get(testFile)); + + String actual = result.getResponse().getContentAsString(); + + boolean isRdf = checkfile.endsWith(".rdf"); + boolean isXml = checkfile.endsWith(".xml"); + + if (isXml || isRdf) { + if (isRdf) { + try { + Model model = ModelFactory.createMemModelMaker().createDefaultModel(); + RDFDataMgr.read(model, + IOUtils.toInputStream(actual, StandardCharsets.UTF_8), + Lang.RDFXML); + } catch (Exception rdfException) { + fail(String.format("%s. Checked with %s. RDF model error. %s. Checked with: %s", + url, checkfile, rdfException.getMessage(), actual)); + } + } + + +// FileUtils.writeStringToFile(new File("/tmp/services/src/test/resources/org/fao/geonet/api/records/formatters/new/" + String.format("%s-%s-%s", +// schema, formatter, checkfile)), actual.replaceFirst("urn:uuid/.*", "urn:uuid/{uuid}"), StandardCharsets.UTF_8); + + Diff diff = DiffBuilder + .compare(Input.fromString(actual)) + .withTest(Input.fromString(expected)) + .withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byName)) + .normalizeWhitespace() + .ignoreComments() + .checkForSimilar() + .build(); + assertFalse( + String.format("%s. Checked with %s. Differences: %s", url, checkfile, diff.toString()), + diff.hasDifferences()); + + if (isRdf) { + String[] shaclValidation = {}; + if ("eu-dcat-ap".equalsIgnoreCase(formatter)) { + // https://github.com/ISAITB/validator-resources-dcat-ap/blob/master/resources/config.properties#L117-L128 +// shaclValidation = new String[]{ +// "shacl/eu-dcat-ap-3.0.0/shapes.ttl", +// "shacl/eu-dcat-ap-3.0.0/range.ttl", +// "shacl/eu-dcat-ap-3.0.0/shapes_recommended.ttl", +// "shacl/eu-dcat-ap-3.0.0/imports.ttl", +// "shacl/eu-dcat-ap-3.0.0/deprecateduris.ttl"}; +// } else if("eu-dcat-ap-hvd".equalsIgnoreCase(formatter)){ +// shaclValidation = new String[]{"shacl/dcat-ap-hvd-2.2.0-SHACL.ttl"}; +// } else if("eu-geodcat-ap".equalsIgnoreCase(formatter)){ +// shaclValidation = new String[]{"shacl/geodcat-ap-2.0.1-SHACL.ttl"}; + } + for (String shaclShapes : shaclValidation) { + applyShaclValidation(formatter, schema, checkfile, url, shaclShapes); + } + } + } else { + assertEquals( + url, + expected, + actual.replaceAll("\\r\\n?", "\n") + ); + } } catch (Exception e) { - fail(url); + fail(String.format("Failure on %s. Error is: %s", url, e.getMessage())); } } } + + @Test + @Ignore + public void quickTestToValidateRdfModelAndShaclRules() throws IOException { + String formatter = "eu-dcat-ap"; + String schema = "iso19115-3.2018"; + String checkfile = "dataset-core.rdf"; + String file = String.format("%s-%s-%s", schema, formatter, checkfile); + String expected = StreamUtils.copyToString( + FormatterApiTest.class.getResourceAsStream(file), + StandardCharsets.UTF_8); + try { + Model model = ModelFactory.createMemModelMaker().createDefaultModel(); + RDFDataMgr.read(model, + IOUtils.toInputStream(expected, StandardCharsets.UTF_8), + Lang.RDFXML); + } catch (Exception rdfException) { + fail(String.format("%s. RDF model error. %s.", + file, rdfException.getMessage())); + } +// String[] shaclValidation = new String[]{"shacl/dcat-ap-2.1.1-base-SHACL.ttl"}; + String[] shaclValidation = new String[]{ + "shacl/eu-dcat-ap-3.0.0/shapes.ttl", + "shacl/eu-dcat-ap-3.0.0/range.ttl", + "shacl/eu-dcat-ap-3.0.0/shapes_recommended.ttl", + "shacl/eu-dcat-ap-3.0.0/imports.ttl", + "shacl/eu-dcat-ap-3.0.0/deprecateduris.ttl" + }; +// String[] shaclValidation = new String[]{"dcat-ap-hvd-2.2.0-SHACL.ttl"}; +// String[] shaclValidation = new String[]{"geodcat-ap-2.0.1-SHACL.ttl"}; + for (String shaclShapes : shaclValidation) { + applyShaclValidation(formatter, schema, checkfile, "", shaclShapes); + } + } + + private static void applyShaclValidation(String formatter, String schema, String checkfile, String url, String shaclShapes) { + String SHAPES = FormatterApiTest.class.getResource(shaclShapes).getFile(); + if (SHAPES.startsWith("/")) { + SHAPES.replaceFirst("/", ""); + } + + //Load document to validate. + String DATA = FormatterApiTest.class.getResource( + String.format("%s-%s-%s", + schema, formatter, checkfile) + ).getFile(); + if (DATA.startsWith("/")) { + DATA.replaceFirst("/", ""); + } + Graph shapesGraph; + Shapes shapes; + try { + shapesGraph = RDFDataMgr.loadGraph(SHAPES); + shapes = Shapes.parse(shapesGraph); + } catch (Exception e) { + fail(String.format( + "%s. Checked with %s [%s]. SHACL graph error. Error is: %s", + url, checkfile, shaclShapes, e.getMessage())); + return; + } + + Graph dataGraph = RDFDataMgr.loadGraph(DATA); + + ValidationReport report = ShaclValidator.get().validate(shapes, dataGraph); + + if (!report.conforms()) { + long count = report.getEntries().stream() + .filter(e -> e.severity().level().getURI().equals("http://www.w3.org/ns/shacl#Violation")) + .count(); + + ShLib.printReport(report); + System.out.println(); + RDFDataMgr.write(System.out, report.getModel(), Lang.TTL); + fail(String.format("%s. Checked with %s [%s]. Invalid DCAT-AP document. %d violations found. See report in the test console output.", + url, checkfile, shaclShapes, count)); + } + } + private void createTestData() throws Exception { loginAsAdmin(context); - loadFile(getSampleISO19139MetadataXml()); - loadFile(getSampleISO19115MetadataXml()); + + Set testFiles = new HashSet<>(); + for (String[] testParameter : data()) { + testFiles.add(testParameter[0]); + } + for (String file : testFiles) { + if (file.equals("iso19139")) { + loadFile("iso19139", getSampleISO19139MetadataXml()); + } else if (file.equals("iso19115-3.2018")) { + loadFile("iso19115-3.2018", getSampleISO19115MetadataXml()); + } else { + loadFile(file, + Xml.loadStream( + FormatterApiTest.class.getResourceAsStream(file))); + } + } } - private void loadFile(Element sampleMetadataXml) throws Exception { + private void loadFile(String key, Element sampleMetadataXml) throws Exception { AbstractMetadata metadata = injectMetadataInDbDoNotRefreshHeader(sampleMetadataXml, context); - testDataUuidBySchema.put(metadata.getDataInfo().getSchemaId(), metadata.getUuid()); + testDataUuidBySchema.put(key, metadata.getUuid()); } } diff --git a/services/src/test/java/org/fao/geonet/api/registries/vocabularies/KeywordsApiTest.java b/services/src/test/java/org/fao/geonet/api/registries/vocabularies/KeywordsApiTest.java index 01871cb85a04..2545a8978829 100644 --- a/services/src/test/java/org/fao/geonet/api/registries/vocabularies/KeywordsApiTest.java +++ b/services/src/test/java/org/fao/geonet/api/registries/vocabularies/KeywordsApiTest.java @@ -249,6 +249,6 @@ public void testImportOntologyToSkos() throws Exception { "Mobility Theme", scheme.getChildText("title", NAMESPACE_DCT)); List concepts = thesaurus.getChildren("Concept", SKOS_NAMESPACE); - assertEquals(123, concepts.size()); + assertEquals(121, concepts.size()); } } diff --git a/services/src/test/java/org/fao/geonet/api/site/SiteApiTest.java b/services/src/test/java/org/fao/geonet/api/site/SiteApiTest.java index 87f3852d8de3..7af3772df0d3 100644 --- a/services/src/test/java/org/fao/geonet/api/site/SiteApiTest.java +++ b/services/src/test/java/org/fao/geonet/api/site/SiteApiTest.java @@ -62,7 +62,7 @@ public void getSettingsSet() throws Exception { @Test - public void updateSettings() throws Exception { + public void updateSettingsFormUrlEncoded() throws Exception { this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); this.mockHttpSession = loginAsAdmin(); @@ -78,10 +78,42 @@ public void updateSettings() throws Exception { String newName = "DataHub"; this.mockMvc.perform(post("/srv/api/site/settings") + .contentType(MediaType.APPLICATION_FORM_URLENCODED_VALUE) .param("system/site/name", newName) + .session(this.mockHttpSession)) + .andExpect(status().is(204)) + .andExpect(content().string("")); // No content should be returned. + + this.mockMvc.perform(get("/srv/api/site/settings") .session(this.mockHttpSession) .accept(MediaType.parseMediaType("application/json"))) - .andExpect(status().is(204)); + .andExpect(status().isOk()) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)) + .andExpect(jsonPath("$['system/site/name']", is(newName))); + } + + @Test + public void updateSettingsJson() throws Exception { + this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); + + this.mockHttpSession = loginAsAdmin(); + + encryptor.initialize(); + + this.mockMvc.perform(get("/srv/api/site/settings") + .session(this.mockHttpSession) + .accept(MediaType.parseMediaType("application/json"))) + .andExpect(status().isOk()) + .andExpect(content().contentType(API_JSON_EXPECTED_ENCODING)) + .andExpect(jsonPath("$['system/site/name']", is("My GeoNetwork catalogue"))); + + String newName = "JsonDataHub"; + this.mockMvc.perform(post("/srv/api/site/settings") + .contentType(MediaType.APPLICATION_JSON_VALUE) + .content("{\"system/site/name\": \"" + newName + "\"}") + .session(this.mockHttpSession)) + .andExpect(status().is(204)) + .andExpect(content().string("")); // No content should be returned. this.mockMvc.perform(get("/srv/api/site/settings") .session(this.mockHttpSession) diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset-core.rdf new file mode 100644 index 000000000000..0b412821b496 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset-core.rdf @@ -0,0 +1,588 @@ + + + + + + + Dataset + + + + + dataset + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + BE.SPW.INFRASIG.CARTON/DGATLPE__PDS + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + custodian + + + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + Mobilité + Observation de la terre et environnement + + + 1143/2014 + + + + + 2023/138 + + + + + + + + No limitations to public access + + + + + + + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + 0.01 + 30 + 0.3048 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + + + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset.xml b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset.xml new file mode 100644 index 000000000000..20316b0d6b51 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-dataset.xml @@ -0,0 +1,1910 @@ + + + + + 7fe2f305-1302-4297-b67e-792f55acd834 + + + urn:uuid + + + + + + + + + + + + + + + + + + + + Collection de données thématiques + + + + + + + + + + + + Direction de la gestion des informations territoriales (SPW - Territoire, Logement, + Patrimoine, Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction de la + gestion des informations territoriales) + + + + + + + + donnees.dgo4@spw.wallonie.be + + + + + + + + + + + + + 2023-12-08T12:26:19.337626Z + + + + + + + + + + 2019-04-02T12:33:24 + + + + + + + + + + ISO 19115 + + + 2003/Cor 1:2006 + + + + + + + + https://metawal.wallonie.be/geonetwork/srv/api/records/7fe2f305-1302-4297-b67e-792f55acd834 + + + + Complete metadata + + + All information about the resource + + + + + + + + + + + + EPSG:31370 + + + Belge 1972 / Belgian Lambert 72 (EPSG:31370) + + + + + + + + + + + + + + Plan de secteur en vigueur (version coordonnée vectorielle) + + + PDS + + + + + 2023-03-31 + + + + + + + + + + 2023-02-21 + + + + + + + + 1.0 + + + + + 7fe2f305-1302-4297-b67e-792f55acd834 + + + http://geodata.wallonie.be/id/ + + + + + + + DGATLPE__PDS + + + BE.SPW.INFRASIG.CARTON + + + + + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + developpement.territorial@spw.wallonie.be + + + + + + + + + Thierry Berthet + + + + + + + Jean Berthet + + + + + + + jean.b@spw.org + + + + + + + + + https://orcid.org/jb98765 + + + + + + + + + + + + + + + + + + + + + Service public de Wallonie (SPW) + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + https://geoportail.wallonie.be + + + WWW:LINK + + + Géoportail de la Wallonie + + + Géoportail de la Wallonie + + + + + + + + + + + + + + + + + + + + + 10000 + + + + + + + + + 1 + + + + + + + 30 + + + + + + + 1 + + + + + P0Y2M0DT0H0M0S + + + planningCadastre + + + imageryBaseMapsEarthCover + + + location + + + + + Région wallonne + + + + + 2.75 + + + 6.50 + + + 49.45 + + + 50.85 + + + + + + + + + + https://en.wikipedia.org/wiki/Wallonia + + + + + + + + + + + + Région wallonne + + + + + + + + + + + + 2023-12-06 + + + + + 2023-12-08 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + P0Y0M0DT0H15M0S + + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des + dossiers abrogés se font au fur et à mesure de la réception des informations. + + + + + + + + + https://metawal.wallonie.be/geonetwork/srv/api/records/7fe2f305-1302-4297-b67e-792f55acd834/attachments/pds_codt.png + + + + pds_codt_pic + + + png + + + + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + + + + + + + Thèmes du + géoportail wallon + + + + + + 2014-06-26 + + + + + + + + + + + geonetwork.thesaurus.external.theme.Themes_geoportail_wallon_hierarchy + + + + + + + + + + + + espace + + + zones naturelles, paysages, écosystèmes + + + législation + + + géographie + + + agriculture + + + + + + + + GEMET themes + + + + + 2009-09-22 + + + + + + + + + + + geonetwork.thesaurus.external.theme.gemet-theme + + + + + + + + + + + + aménagement du paysage + + + réseau ferroviaire + + + planification écologique + + + plan d'aménagement + + + extraction + + + habitat rural + + + gestion et planification rurale + + + secteur d'activité + + + infrastructure + + + plan de gestion + + + planification rurale + + + planification économique + + + plan + + + développement du territoire + + + infrastructure routière + + + plan d'occupation des sols + + + activité économique + + + réseau routier + + + planification urbaine + + + loisirs + + + canalisation + + + habitat urbain + + + mesure d'aménagement du territoire + + + territoire + + + planification régionale + + + habitat + + + + + + + + GEMET + + + + + 2009-09-22 + + + + + + + + + + + geonetwork.thesaurus.external.theme.gemet + + + + + + + + + + + + PanierTelechargementGeoportail + + + Open Data + + + WalOnMap + + + Extraction_DIG + + + BDInfraSIGNO + + + + + + + + Mots-clés InfraSIG + + + + + 2022-10-03 + + + + + + + + + + + geonetwork.thesaurus.external.theme.infraSIG + + + + + + + + + + + + aménagement du territoire + + + plan de secteur + + + point remarquable + + + PDS + + + CoDT + + + Point de vue + + + centre d'enfouissement + + + servitude + + + Code du Développement Territorial + + + + + + + + + + Altitude + + + Caractéristiques géographiques + météorologiques + + + + Caractéristiques géographiques + océanographiques + + + + Conditions atmosphériques + + + Dénominations géographiques + + + Géologie + + + Hydrographie + + + Installations agricoles et aquacoles + + + + Régions maritimes + + + Répartition des espèces + + + Ressources minérales + + + Santé et sécurité des personnes + + + + + + + + GEMET - INSPIRE themes, version 1.0 + + + + + + 2008-01-01 + + + + + + + + + + 2008-06-01 + + + + + + + + + + + geonetwork.thesaurus.external.theme.httpinspireeceuropaeutheme-theme + + + + + + + + + + + + Mobilité + + + Observation de la terre et environnement + + + + + + + + High-value dataset categories + + + + + 2023-10-05 + + + + + + + + + + 2023-10-05 + + + + + + + + + + + geonetwork.thesaurus.external.theme.high-value-dataset-category + + + + + + + + + + + + 1143/2014 + + + 2023/138 + + + + + + + + Applicable + legislations + + + + + + 2024-04-04 + + + + + + + + + + 2024-04-04 + + + + + + + + + + + geonetwork.thesaurus.external.theme.applicable-legislation + + + + + + + + + + + + + + + No + limitations to public access + + + + + + + + + + + No limitations to public access + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + Les + conditions générales d'utilisation s'appliquent. + + + + Les + conditions générales d'accès s’appliquent. + + + + + Les conditions générales d'utilisation s'appliquent et sont étendues par les conditions particulières de + type A. + + + + BSL + + + https://opensource.org/licenses/CATOSL-1.1 + + + Ces données sont disponibles sous licence CC-BY 4.0. + + + + + + + + + Commission Implementing + Regulation (EU) 2023/138 of 21 December 2022 laying down a list of specific high-value datasets and + the arrangements for their publication and re-use + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ESRI Shapefile + (.shp) + + + + + - + + + + + + + + + + + ESRI File + Geodatabase (.fgdb) + + + + + 10.x + + + + + + + + + + + + + + + + Service public de Wallonie (SPW) + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + 2023-12-08T00:00:00 + + + Il est conseillé d'utiliser les liens référencés dans les ressources associés dans + le cas où la demande de téléchargement porte sur l'entièreté du territoire concerné par le jeu de + données. + + Si votre demande porte sur un format spécifique de donnée ou une partie spécifique du territoire, + veuillez suivre les instructions d'obtention d'une copie physique d’une donnée détaillées sur + https://geoportail.wallonie.be/telecharger. L’utilisation des géoservices est à privilégier. + + Cette ressource est une série de couches de données. En la commandant, l'ensemble des couches + constitutives de cette série vous sera automatiquement fourni. + + + + + + + + + + + + + + + + + 10 + + + + + https://data.monde.org/secteur.shp + + + WWW:DOWNLOAD:ESRI Shapefile (.shp) + + + + + + + + + + + + + + + + + + GeoPackage + + + + + ZIP + + + + + + + + + + + + + http://geoapps.wallonie.be/webgisdgo4/#CTX=PDS + + + WWW:LINK + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + + + + + + + + https://geoportail.wallonie.be/walonmap/#ADU=https://geoservices.wallonie.be/arcgis/rest/services/AMENAGEMENT_TERRITOIRE/PDS/MapServer + + + + WWW:LINK + + + WalOnMap + + + Application WalOnMap - Toute la Wallonie à la carte + + + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + + + + + + + + https://geoservices.wallonie.be/arcgis/rest/services/AMENAGEMENT_TERRITOIRE/PDS/MapServer + + + + ESRI:REST + + + Service de visualisation ESRI-REST + + + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + + + + + + + + https://geoservices.wallonie.be/arcgis/services/AMENAGEMENT_TERRITOIRE/PDS/MapServer/WMSServer?request=GetCapabilities&service=WMS + + + + OGC:WMS + + + Service de visualisation WMS + + + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + + + + + + + http://spw.wallonie.be/dgo4/site_thema/index.php?thema=modif_ps + + + + WWW:LINK + + + Base de données du Plan de secteur + + + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + + + + + + + https://lampspw.wallonie.be/dgo4/site_thema/index.php/synthese + + + + WWW:LINK + + + Inventaire des données géographiques de la DGO4 + + + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + + + + + + + http://spw.wallonie.be/dgo4/site_amenagement/site/directions/dar + + + + WWW:LINK + + + La Direction de l'Aménagement Régional + + + Site de la Direction de l'Aménagement Régional (DAR) + + + + + + + + + + + http://geoservices.wallonie.be/geotraitement/spwdatadownload/get/7fe2f305-1302-4297-b67e-792f55acd834/PDS_SHAPE_31370.zip + + + + WWW:DOWNLOAD-1.0-http--download + + + Plan de Secteur au format SHP + + + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + + + + + + + + + + + + + + + + + + Série de couches de données thématiques + + + + + + + + + + + + + RÈGLEMENT (UE) N o 1089/2010 + DE LA COMMISSION du 23 novembre 2010 portant modalités d'application de la directive 2007/2/CE du + Parlement européen et du Conseil en ce qui concerne l'interopérabilité des séries et des services + de données géographiques + + + + + + 2010-12-08 + + + + + + + + + + Voir la spécification référencée + + + false + + + + + + + + + + + + + RÈGLEMENT (UE) N o 1089/2010 DE LA + COMMISSION du 23 novembre 2010 portant modalités d'application de la directive 2007/2/CE du + Parlement européen et du Conseil en ce qui concerne l'interopérabilité des séries et des services + de données géographiques + + + + + + 2010-12-08 + + + + + + + + + + Voir la spécification référencée + + + true + + + + + + + + + + + + + INSPIRE Data Specification + on Transport Networks – Technical Guidelines, version 3.2 + + + + + + 2014-04-17 + + + + + + + + + + Voir la spécification référencée + + + true + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + + + + + + 2014-04-17 + + + + + + + + + + Voir la spécification référencée + + + true + + + + + + + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + + + + + + + + + + + + + Légende du Plan de secteur + + + + + + https://geoservices.wallonie.be/arcgis/rest/services/AMENAGEMENT_TERRITOIRE/PDS/MapServer/legend + + + + WWW:LINK + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + + + + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service-core.rdf new file mode 100644 index 000000000000..e4e3941c31f8 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service-core.rdf @@ -0,0 +1,399 @@ + + + + + + + Service + + + + + service + + + + + + urn:uuid:{uuid} + 2023-12-11T07:25:51.082626Z + 2019-04-02T12:35:21 + INSPIRE - Sites protégés en Wallonie (BE) - Service de téléchargement + + + + INSPIRE - Protected site in Walloon region (BE) - Download + service + Ce service de téléchargement ATOM Feed donne accès aux couches de données du thème INSPIRE + "Sites protégés" au sein du territoire wallon (Belgique). + + Ce service de téléchargement simple est fourni par le Service public de Wallonie (SPW) et permet le + téléchargement direct des couches de données géographiques constitutives du thème "sites protégés" de la + Directive INSPIRE (Annexe 1.9) sur l'ensemble du territoire wallon. Il utilise la technologie de flux de + données ATOM Feed. + + Le service est conforme aux spécifications de la Directive INSPIRE en la matière. + + Ce service de téléchargement simple permet d’accéder en téléchargement aux couches de données présentes dans + le thème "Sites protégés". Via ce service, les informations suivantes sont téléchargeables : + - "INSPIRE - Sites protégés en Wallonie" : série de couches de données regroupant l'ensemble des sites + protégés en Wallonie; + - "INSPIRE - Sites protégés Natura 2000 en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation Natura 2000; + - "INSPIRE - Sites protégés par type IUCN en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation IUCN; + - "INSPIRE - Sites protégés UNESCO en Wallonie" : série de couches de données présentant uniquement les sites + classés selon le mécanisme de désignation UNESCO; + - "INSPIRE - Sites protégés Monument National en Wallonie" : série de couches de données présentant uniquement + les sites classés selon le mécanisme de désignation Monument National + + Le service propose les opérations suivantes : + - Accéder aux métadonnées du service de téléchargement; + - Décrire la série de couche de données géographiques relative au thème "Sites protégés" ainsi que les séries + dérivées selon le mécanisme de désignation; + - Accéder aux séries de couches de données géographiques relatives au thème "Sites protégés" et aux sites de + désignation par site protégé. + + + + + + + + + INSPIRE - Sites protégés en Wallonie (BE) - Service de téléchargement + + + + INSPIRE - Protected site in Walloon region (BE) - Download + service + 2017-11-15 + http://geodata.wallonie.be/id/3dbe0017-a71f-4923-9b44-fdb5afef5778 + Ce service de téléchargement ATOM Feed donne accès aux couches de données du thème INSPIRE + "Sites protégés" au sein du territoire wallon (Belgique). + + Ce service de téléchargement simple est fourni par le Service public de Wallonie (SPW) et permet le + téléchargement direct des couches de données géographiques constitutives du thème "sites protégés" de la + Directive INSPIRE (Annexe 1.9) sur l'ensemble du territoire wallon. Il utilise la technologie de flux de + données ATOM Feed. + + Le service est conforme aux spécifications de la Directive INSPIRE en la matière. + + Ce service de téléchargement simple permet d’accéder en téléchargement aux couches de données présentes dans + le thème "Sites protégés". Via ce service, les informations suivantes sont téléchargeables : + - "INSPIRE - Sites protégés en Wallonie" : série de couches de données regroupant l'ensemble des sites + protégés en Wallonie; + - "INSPIRE - Sites protégés Natura 2000 en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation Natura 2000; + - "INSPIRE - Sites protégés par type IUCN en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation IUCN; + - "INSPIRE - Sites protégés UNESCO en Wallonie" : série de couches de données présentant uniquement les sites + classés selon le mécanisme de désignation UNESCO; + - "INSPIRE - Sites protégés Monument National en Wallonie" : série de couches de données présentant uniquement + les sites classés selon le mécanisme de désignation Monument National + + Le service propose les opérations suivantes : + - Accéder aux métadonnées du service de téléchargement; + - Décrire la série de couche de données géographiques relative au thème "Sites protégés" ainsi que les séries + dérivées selon le mécanisme de désignation; + - Accéder aux séries de couches de données géographiques relatives au thème "Sites protégés" et aux sites de + désignation par site protégé. + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + + + Direction de l'Intégration des géodonnées (SPW - Secrétariat général - SPW Digital + - Département de la Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + + + https://geoportail.wallonie.be + + + + + + + Nature et environnement + + + + + + + Faune et flore + + + + + + + Sites protégés + + + + + zones naturelles, paysages, écosystèmes + + + politique environnementale + + + biologie + + + site naturel + + + écologie + + + évaluation du patrimoine naturel + + + politique de conservation de la nature + + + monument historique + + + parc naturel + + + espace naturel + + + législation en matière de préservation de la nature + + + conservation des ressources naturelles + + + site naturel protégé + + + archéologie + + + milieu naturel + + + patrimoine naturel + + + paysage + + + géologie + + + monument + + + conservation + + + espace protégé + + + patrimoine culturel + + + Reporting INSPIRE + + + natura2000 + + + N2K + + + biodiversité + + + protected sites + + + site protégé + + + aire protégée + + + inspire + + + téléchargement + + + Feed + + + ATOM + + + IUCN + + + ProtectedSite + + + + + Service d’accès aux produits + + + + + Location of sites (Habitats Directive) + + + + + Régional + + + + + + + + + + + + + Conditions d'utilisation spécifiques + + + + + + + + + + + Ce service de téléchargement simple INSPIRE basé sur ATOM est au standard Atom RFC 4287, à + la spécification GeoRSS Simple et à la spécification OpenSearch (pour les éléments concernés). + + + + + + + + + + + INSPIRE_PS_DS_PIC + + + + + + + INSPIRE Sites Protégés - Service de téléchargement + + + Adresse de connexion au service de téléchargement ATOM Feed des couches de données + du thème "Sites protégés". + + + + + + + INSPIRE Sites Protégés - Service de téléchargement + + + + + + + + + + Statistiques de disponibilité WMS + Statistiques de disponibilité du service WMS fournies par Spatineo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service.xml b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service.xml new file mode 100644 index 000000000000..46357e918400 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-dcat-service.xml @@ -0,0 +1,1803 @@ + + + + + + 3dbe0017-a71f-4923-9b44-fdb5afef5778 + + + urn:uuid + + + + + + + + + + + + + + + + + + + + Service + + + Service + + + + + + + + + + + + + + Direction de l'Intégration des géodonnées (SPW - Secrétariat général - SPW Digital - + Département de la Géomatique - Direction de l'Intégration des géodonnées) + + + + Direction de l'Intégration des géodonnées (SPW - Secrétariat + général - SPW Digital - Département de la Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + 2023-12-11T07:25:51.082626Z + + + + + + + + + + 2019-04-02T12:35:21 + + + + + + + + + + ISO 19119 + + + ISO 19119 + + + + + 2005/Amd.1:2008 + + + + + + + + + + + + + + + + + + https://metawal.wallonie.be/geonetwork/srv/api/records/3dbe0017-a71f-4923-9b44-fdb5afef5778 + + + + + + + + + + + + + EPSG:31370 + + + Belge 1972 / Belgian Lambert 72 (EPSG:31370) + + + Belge 1972 / Belgian Lambert 72 (EPSG:31370) + + + + + + + + + + + + + + + + + EPSG:4258 + + + ETRS89 (EPSG:4258) + + + ETRS89 (EPSG:4258) + + + + + + + + + + + + + + + + EPSG:3035 + + + ETRS89 / LAEA Europe (EPSG:3035) + + + ETRS89 / LAEA Europe (EPSG:3035) + + + + + + + + + + + + + + + + + EPSG:3812 + + + ETRS89 / Belgian Lambert 2008 (EPSG:3812) + + + ETRS89 / Belgian Lambert 2008 (EPSG:3812) + + + + + + + + + + + + + + + + + INSPIRE - Sites protégés en Wallonie (BE) - Service de téléchargement + + + + INSPIRE - Sites protégés en Wallonie (BE) - Service de + téléchargement + + + + INSPIRE - Protected site in Walloon region (BE) - Download + service + + + + + + + + 2017-11-15 + + + + + + + + + + 3dbe0017-a71f-4923-9b44-fdb5afef5778 + + + http://geodata.wallonie.be/id/ + + + + + + + Ce service de téléchargement ATOM Feed donne accès aux couches de données du thème INSPIRE + "Sites protégés" au sein du territoire wallon (Belgique). + + Ce service de téléchargement simple est fourni par le Service public de Wallonie (SPW) et permet le + téléchargement direct des couches de données géographiques constitutives du thème "sites protégés" de la + Directive INSPIRE (Annexe 1.9) sur l'ensemble du territoire wallon. Il utilise la technologie de flux de + données ATOM Feed. + + Le service est conforme aux spécifications de la Directive INSPIRE en la matière. + + Ce service de téléchargement simple permet d’accéder en téléchargement aux couches de données présentes dans + le thème "Sites protégés". Via ce service, les informations suivantes sont téléchargeables : + - "INSPIRE - Sites protégés en Wallonie" : série de couches de données regroupant l'ensemble des sites + protégés en Wallonie; + - "INSPIRE - Sites protégés Natura 2000 en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation Natura 2000; + - "INSPIRE - Sites protégés par type IUCN en Wallonie" : série de couches de données présentant uniquement les + sites classés selon le mécanisme de désignation IUCN; + - "INSPIRE - Sites protégés UNESCO en Wallonie" : série de couches de données présentant uniquement les sites + classés selon le mécanisme de désignation UNESCO; + - "INSPIRE - Sites protégés Monument National en Wallonie" : série de couches de données présentant uniquement + les sites classés selon le mécanisme de désignation Monument National + + Le service propose les opérations suivantes : + - Accéder aux métadonnées du service de téléchargement; + - Décrire la série de couche de données géographiques relative au thème "Sites protégés" ainsi que les séries + dérivées selon le mécanisme de désignation; + - Accéder aux séries de couches de données géographiques relatives au thème "Sites protégés" et aux sites de + désignation par site protégé. + + + + Ce service de téléchargement ATOM Feed donne accès aux couches de + données du thème INSPIRE + "Sites protégés" au sein du territoire wallon (Belgique). + + Ce service de téléchargement simple est fourni par le Service public de Wallonie (SPW) et permet le + téléchargement direct des couches de données géographiques constitutives du thème "sites protégés" de la + Directive INSPIRE (Annexe 1.9) sur l'ensemble du territoire wallon. Il utilise la technologie de flux de + données ATOM Feed. + + Le service est conforme aux spécifications de la Directive INSPIRE en la matière. + + Ce service de téléchargement simple permet d’accéder en téléchargement aux couches de données présentes + dans + le thème "Sites protégés". Via ce service, les informations suivantes sont téléchargeables : + - "INSPIRE - Sites protégés en Wallonie" : série de couches de données regroupant l'ensemble des sites + protégés en Wallonie; + - "INSPIRE - Sites protégés Natura 2000 en Wallonie" : série de couches de données présentant uniquement + les + sites classés selon le mécanisme de désignation Natura 2000; + - "INSPIRE - Sites protégés par type IUCN en Wallonie" : série de couches de données présentant uniquement + les + sites classés selon le mécanisme de désignation IUCN; + - "INSPIRE - Sites protégés UNESCO en Wallonie" : série de couches de données présentant uniquement les + sites + classés selon le mécanisme de désignation UNESCO; + - "INSPIRE - Sites protégés Monument National en Wallonie" : série de couches de données présentant + uniquement + les sites classés selon le mécanisme de désignation Monument National + + Le service propose les opérations suivantes : + - Accéder aux métadonnées du service de téléchargement; + - Décrire la série de couche de données géographiques relative au thème "Sites protégés" ainsi que les + séries + dérivées selon le mécanisme de désignation; + - Accéder aux séries de couches de données géographiques relatives au thème "Sites protégés" et aux sites + de + désignation par site protégé. + + + + + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW + Digital - Département de la Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + + Direction de l'Intégration des géodonnées (SPW - Secrétariat général - SPW Digital + - Département de la Géomatique - Direction de l'Intégration des géodonnées) + + + + Direction de l'Intégration des géodonnées (SPW - + Secrétariat général - SPW Digital - Département de la Géomatique - Direction de l'Intégration des + géodonnées) + + + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + + Service public de Wallonie (SPW) + + + Service public de Wallonie (SPW) + + + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + https://geoportail.wallonie.be + + + https://geoportail.wallonie.be + + + + + + WWW:LINK + + + Géoportail de la Wallonie + + + Géoportail de la Wallonie + + + + + + Géoportail de la Wallonie + + + Géoportail de la Wallonie + + + + + + + + + + + + + + + + + + + Région wallonne + + + Région wallonne + + + + + + + 2.75 + + + 6.50 + + + 49.45 + + + 50.85 + + + + + + + + + + https://metawal.wallonie.be/geonetwork/srv/api/records/3dbe0017-a71f-4923-9b44-fdb5afef5778/attachments/download_Inspire_20190430.png + + + + INSPIRE_PS_DS_PIC + + + INSPIRE_PS_DS_PIC + + + + + PNG + + + + + + + + Nature et environnement + + + + Nature et environnement + + + + + + Faune et flore + + + + Faune et flore + + + + + + + + + + Thèmes du + géoportail wallon + + + + Thèmes du géoportail wallon + + + + + + + + 2014-06-26 + + + + + + + + + + + geonetwork.thesaurus.external.theme.Themes_geoportail_wallon_hierarchy + + + + + + + + + + + + Sites protégés + + + Sites protégés + + + + + + + + + + GEMET - INSPIRE themes, version 1.0 + + + + GEMET - INSPIRE themes, version 1.0 + + + + + + + + 2008-06-01 + + + + + + + + + + + geonetwork.thesaurus.external.theme.httpinspireeceuropaeutheme-theme + + + + + + + + + + + + zones naturelles, paysages, écosystèmes + + + zones naturelles, paysages, écosystèmes + + + + + + politique environnementale + + + politique environnementale + + + + + biologie + + + biologie + + + + + + + + + + GEMET themes + + + GEMET themes + + + + + + + 2009-09-22 + + + + + + + + + + + geonetwork.thesaurus.external.theme.gemet-theme + + + + + + + + + + + + site naturel + + + site naturel + + + + + écologie + + + écologie + + + + + évaluation du patrimoine naturel + + + évaluation du patrimoine naturel + + + + + + politique de conservation de la nature + + + politique de conservation de la nature + + + + + + monument historique + + + monument historique + + + + + parc naturel + + + parc naturel + + + + + espace naturel + + + espace naturel + + + + + législation en matière de préservation de la nature + + + législation en matière de préservation de la nature + + + + + + conservation des ressources naturelles + + + conservation des ressources naturelles + + + + + + site naturel protégé + + + site naturel protégé + + + + + archéologie + + + archéologie + + + + + milieu naturel + + + milieu naturel + + + + + patrimoine naturel + + + patrimoine naturel + + + + + paysage + + + paysage + + + + + géologie + + + géologie + + + + + monument + + + monument + + + + + conservation + + + conservation + + + + + espace protégé + + + espace protégé + + + + + patrimoine culturel + + + patrimoine culturel + + + + + + + + + + GEMET + + + GEMET + + + + + + + 2009-09-22 + + + + + + + + + + + geonetwork.thesaurus.external.theme.gemet + + + + + + + + + + + + Reporting INSPIRE + + + Reporting INSPIRE + + + + + + + + + + Mots-clés InfraSIG + + + Mots-clés InfraSIG + + + + + + + 2022-10-03 + + + + + + + + + + + geonetwork.thesaurus.external.theme.infraSIG + + + + + + + + + + + + natura2000 + + + natura2000 + + + + + N2K + + + N2K + + + + + biodiversité + + + biodiversité + + + + + protected sites + + + protected sites + + + + + site protégé + + + site protégé + + + + + aire protégée + + + aire protégée + + + + + inspire + + + inspire + + + + + téléchargement + + + téléchargement + + + + + Feed + + + Feed + + + + + ATOM + + + ATOM + + + + + IUCN + + + IUCN + + + + + ProtectedSite + + + ProtectedSite + + + + + + + + + + + + + Service d’accès aux produits + + + + Service d’accès aux produits + + + + + + + + + + + Classification of spatial data services + + + + Classification of spatial data services + + + + + + + + 2008-12-03 + + + + + + + + + + + geonetwork.thesaurus.external.theme.httpinspireeceuropaeumetadatacodelistSpatialDataServiceCategory-SpatialDataServiceCategory + + + + + + + + + + + + Location of sites (Habitats Directive) + + + Location of sites (Habitats Directive) + + + + + + + + INSPIRE priority data set + + + INSPIRE priority data set + + + + + + + 2017-11-16 + + + + + + + + + + + + + + Régional + + + + Régional + + + + + + + + + + Champ géographique + + + + Champ géographique + + + + + + + 2019-05-22 + + + + + + + + + + + geonetwork.thesaurus.external.theme.httpinspireeceuropaeumetadatacodelistSpatialScope-SpatialScope + + + + + + + + + + + + + + + No + limitations to public access + + + + No + limitations to public access + + + + + + + + + + Conditions d'utilisation spécifiques + + + Conditions d'utilisation spécifiques + + + + + + + + + Les + conditions d'utilisation du service sont régies par les conditions d’accès et d’utilisation des services + web géographiques de visualisation du Service public de Wallonie. + + + + Les + conditions d'utilisation du service sont régies par les conditions d’accès et d’utilisation des + services + web géographiques de visualisation du Service public de Wallonie. + + + + + + + + download + + + + + + + + + GetOpenSearchDescription + + + + + + + + https://geoservices.wallonie.be/inspire/atom/PS_Opensearch.xml + + + + + https://geoservices.wallonie.be/inspire/atom/PS_Opensearch.xml + + + + + + INSPIRE Atom + + + Point de connection GetOpenSearchDescription + + + Point de connection GetOpenSearchDescription + + + + + + download, operation: GetOpenSearchDescription + + + download, operation: GetOpenSearchDescription + + + + + + + + + + + + + + + GetServiceATOMFeed + + + + + + + + https://geoservices.wallonie.be/inspire/atom/PS_Service.xml + + + + https://geoservices.wallonie.be/inspire/atom/PS_Service.xml + + + + + + INSPIRE Atom + + + Point de connection GetServiceATOMFeed + + + Point de connection GetServiceATOMFeed + + + + + + download, operation: GetServiceATOMFeed + + + download, operation: GetServiceATOMFeed + + + + + + + + + + + + + + + GetCapabilities + + + + + + + + https://geoservices.wallonie.be/wms/PS_Service?ProtocolIsGetCapabilities + + + https://geoservices.wallonie.be/wms/PS_Service + + + + + + GetCapabilities + + + + + + + + + GetCapabilities + + + + + + + + https://geoservices.wallonie.be/wms/PS_Service + + + https://geoservices.wallonie.be/wms/PS_Service?protocolIsWms + + + + + + OGC:WMS + + + + + + + + + + + + + + + + + + + + + + + + + + + Service public de Wallonie (SPW) + + + Service public de Wallonie (SPW) + + + + + + + + + + helpdesk.carto@spw.wallonie.be + + + + + + + + + + + + + + + + + https://geoservices.wallonie.be/inspire/atom/PS_Service.xml + + + atom:feed + + + INSPIRE Sites Protégés - Service de téléchargement + + + INSPIRE Sites Protégés - Service de téléchargement + + + + + + Adresse de connexion au service de téléchargement ATOM Feed des couches de données + du thème "Sites protégés". + + + + Adresse de connexion au service de téléchargement ATOM + Feed des couches de données + du thème "Sites protégés". + + + + + + + + + + + + + https://directory.spatineo.com/service/164564 + + + WWW:LINK + + + Statistiques de disponibilité WMS + + + Statistiques de disponibilité du service WMS fournies par Spatineo + + + + + + + + + + https://docs.wallonie.be/services?url=https://geoservices.wallonie.be/arcgis/services/SOL_SOUS_SOL/WAL_OCS_IA__CHA_18_20/MapServer/WMSServer + + + + + + + + + + + + + + + + + + + + + Service + + + Service + + + + + + + + + + + + + + + Règlement (CE) n o 976/2009 de la + Commission du 19 octobre 2009 portant modalités d’application de la directive 2007/2/CE du + Parlement européen et du Conseil en ce qui concerne les services en réseau + + + + Règlement (CE) n o 976/2009 de la Commission du 19 + octobre 2009 portant modalités d’application de la directive 2007/2/CE du Parlement européen + et du Conseil en ce qui concerne les services en réseau + + + + + + + + 2009-10-19 + + + + + + + + + + Voir la spécification référencée + + + Voir la spécification référencée + + + + + + true + + + + + + + + + + + + + RÈGLEMENT (UE) N o 1089/2010 DE LA + COMMISSION du 23 novembre 2010 portant modalités d'application de la directive 2007/2/CE du + Parlement européen et du Conseil en ce qui concerne l'interopérabilité des séries et des services + de données géographiques + + + + RÈGLEMENT (UE) N o 1089/2010 DE LA COMMISSION du 23 + novembre 2010 portant modalités d'application de la directive 2007/2/CE du Parlement européen + et du Conseil en ce qui concerne l'interopérabilité des séries et des services de données + géographiques + + + + + + + + 2010-12-08 + + + + + + + + + + Voir la spécification référencée + + + Voir la spécification référencée + + + + + + true + + + + + + + + + + + Ce service de téléchargement simple INSPIRE basé sur ATOM est au standard Atom RFC 4287, à + la spécification GeoRSS Simple et à la spécification OpenSearch (pour les éléments concernés). + + + + Ce service de téléchargement simple INSPIRE basé sur ATOM est au + standard Atom RFC 4287, à + la spécification GeoRSS Simple et à la spécification OpenSearch (pour les éléments concernés). + + + + + + + + + + + + + Service + + + Service + + + + + + + + + + + Service quality report + + + + + + + + https://stats.wallonie.be/services?url=https://geoservices.wallonie.be/arcgis/services/SOL_SOUS_SOL/WAL_OCS_IA__CHA_18_20/MapServer/WMSServer + + + + + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core-multipleAccrualPeriodicityAllowed.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core-multipleAccrualPeriodicityAllowed.rdf new file mode 100644 index 000000000000..5d1c6834518f --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core-multipleAccrualPeriodicityAllowed.rdf @@ -0,0 +1,682 @@ + + + + + + + Dataset + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et + d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et + d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - + Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, + Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + custodian + + + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, + Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + + Caractéristiques géographiques + océanographiques + + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + Mobilité + Observation de la terre et environnement + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version + papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + 0.01 + + P0Y2M0DT0H0M0S + + + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + + + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan + de + secteur" + + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la + base + de données + + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core.rdf new file mode 100644 index 000000000000..f18a44ea9d82 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-dataset-core.rdf @@ -0,0 +1,620 @@ + + + + + + + Dataset + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + custodian + + + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + Mobilité + Observation de la terre et environnement + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + 0.01 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-hvd-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-hvd-dataset-core.rdf new file mode 100644 index 000000000000..18f4b33dfbf0 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-hvd-dataset-core.rdf @@ -0,0 +1,630 @@ + + + + + + + Dataset + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + custodian + + + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + + + Mobilité + + + + + Observation de la terre et environnement + + + + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + + 0.01 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-mobility-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-mobility-dataset-core.rdf new file mode 100644 index 000000000000..d91e6abe9bc8 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-dcat-ap-mobility-dataset-core.rdf @@ -0,0 +1,624 @@ + + + + + + + Dataset + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + custodian + + + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + custodian + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + Mobilité + Observation de la terre et environnement + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + + + + 0.01 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf new file mode 100644 index 000000000000..070e01fb18aa --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf @@ -0,0 +1,640 @@ + + + + + + + Dataset + + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + UTF-8 + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + Direction de la gestion des informations territoriales (SPW - Territoire, Logement, + Patrimoine, Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction de la + gestion des informations territoriales) + + + + + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + + + Mobilité + + + + + Observation de la terre et environnement + + + + + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + + + + + + + + + + + + 0.01 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/mobilitydcat-ap_shacl_shapes.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/mobilitydcat-ap_shacl_shapes.ttl new file mode 100644 index 000000000000..7fc10e5f229f --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/mobilitydcat-ap_shacl_shapes.ttl @@ -0,0 +1,411 @@ +@prefix : . +@prefix mobilitydcatap: . +@prefix adms: . +@prefix bibo: . +@prefix dcat: . +@prefix dcatap: . +@prefix dct: . +@prefix dqv: . +@prefix foaf: . +@prefix locn: . +@prefix vcard: . +@prefix owl: . +@prefix rdf: . +@prefix oa: . +@prefix skos: . +@prefix rdfs: . +@prefix sh: . +@prefix xsd: . + + + a owl:Ontology , adms:Asset ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:versionIRI ; + adms:status ; + dcatap:availability dcatap:stable ; + dct:conformsTo ; + rdfs:isDefinedBy ; + dct:license ; + dct:created "2023-08-14"^^xsd:date ; + dct:issued "2023-08-14"^^xsd:date ; + dct:modified "2023-10-19"^^xsd:date ; + dct:dateCopyrighted "2023"^^xsd:gYear ; + dct:title "The constraints of mobilityDCAT-AP Application Profile for Data Portals in Europe"@en ; + owl:versionInfo "1.0.0" ; + dct:description "This document specifies the constraints on properties and classes expressed by mobilityDCAT-AP in SHACL."@en ; + bibo:editor [ + a foaf:Person ; + owl:sameAs ; + owl:sameAs ; + foaf:name "Lina Molinas Comet" + ] ; + dct:creator [ a foaf:Group ; + foaf:name "NAPCORE SWG 4.4" ; + foaf:page ] ; + dct:publisher ; + dct:rightsHolder ; + dcat:distribution [ a adms:AssetDistribution ; + dct:format , + ; + dct:title "SHACL (Turtle)"@en ; + dcat:downloadURL ; + dcat:mediaType "text/turtle"^^dct:IMT + ] ; + . + +#------------------------------------------------------------------------- +# The shapes in this file complement the DCAT-AP ones to cover all classes +# in mobilityDCAT-AP 1.0.0. +#------------------------------------------------------------------------- + +:Address_Agent_Shape + a sh:NodeShape ; + sh:name "Address (Agent)"@en ; + sh:property [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:adminUnitL2 ; + sh:name "administrative area" ; + sh:description "The administrative area of an Address of the Agent. Depending on the country, this corresponds to a province, a county, a region, or a state." ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:postName ; + sh:name "city" ; + sh:description "The city of an Address of the Agent." ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:adminUnitL1 ; + sh:name "country" ; + sh:description "The country of an Address of the Agent." ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:postCode ; + sh:name "postal code" ; + sh:description "The postal code of an Address of the Agent." ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:thoroughfare ; + sh:name "street address" ; + sh:description "In mobilityDCAT-AP, this is a recommended property to be used for Address (Agent)" ; + sh:severity sh:Violation + ] ; + sh:targetClass locn:Address . + +:Agent_Shape + a sh:NodeShape ; + sh:name "Agent"@en ; + sh:property [ + sh:maxCount 1 ; + sh:class owl:Thing ; + sh:path foaf:mbox ; + sh:name "email" ; + sh:description "This property SHOULD be used to provide the email address of the Agent, specified using fully qualified mailto: URI scheme [RFC6068]. The email SHOULD be used to establish a communication channel to the agent." ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:class rdfs:Resource ; + sh:path foaf:workplaceHomepage ; + sh:name "URL" ; + sh:description "This property MAY be used to specify the Web site of the Agent." ; + sh:severity sh:Violation + ]; + sh:targetClass foaf:Agent . + +:CatalogRecord_Shape + a sh:NodeShape ; + sh:name "Catalogue Record"@en ; + sh:property [ + sh:minCount 1 ; + sh:maxCount 1 ; + sh:path dct:created ; + sh:or ( + [ + sh:datatype xsd:date ; + ] + [ + sh:datatype xsd:dateTime ; + ] + ); + sh:name "creation date" ; + sh:description "This property contains the date stamp (date and time) when the metadata entry was created for the first time. It SHOULD be generated by the system, whenever a platform user enters the metadata entry. " ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:CatalogRecord . + +:Dataset_Shape + a sh:NodeShape ; + sh:name "Dataset"@en ; + sh:property [ + sh:minCount 1 ; + sh:class skos:Concept ; + sh:path mobilitydcatap:mobilityTheme ; + sh:name "mobility theme" ; + sh:description "This property refers to the mobility-related theme (i.e., a specific subject, category, or type) of the delivered content. A dataset may be associated with multiple themes. A theme is important for data seekers who are interested in a particular type of data content. " ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path mobilitydcatap:georeferencingMethod ; + sh:name "georeferencing method" ; + sh:description "This property SHOULD be used to specify the georeferencing method used in the dataset." ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path mobilitydcatap:networkCoverage ; + sh:name "network coverage" ; + sh:description "This property describes the part of the transport network that is covered by the delivered content. For road traffic, the property SHOULD refer to the network classification for which the data is provided. As a minimum, an international or higher-level classification, e.g., via functional road classes, is recommended to allow data search across different countries. In addition, national classifications are allowed." ; + sh:severity sh:Violation + ], [ + sh:class dct:Standard ; + sh:path dct:conformsTo ; + sh:name "reference system" ; + sh:description "This property SHOULD be used to specify the spatial reference system used in the dataset. Spatial reference systems SHOULD be specified by using the corresponding URIs from the “EPSG coordinate reference systems” register operated by OGC." ; + sh:severity sh:Violation + ], [ + sh:class foaf:Agent ; + sh:path dct:rightsHolder ; + sh:name "rights holder" ; + sh:description "This property refers to an entity that legally owns or holds the rights of the data provided in a dataset. This entity is legally responsible for the content of the data. It is also responsible for any statements about the data quality (if applicable, see property dqv:hasQualityAnnotation) and/or the relevance to legal frameworks (if applicable, see property dcatap:applicableLegislation)." ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path mobilitydcatap:transportMode ; + sh:name "transport mode" ; + sh:description "This property describes the transport mode that is covered by the delivered content. Data can be valid for more than one mode, so a multiple choice should be applied. " ; + sh:severity sh:Violation + ]; + sh:targetClass dcat:Dataset . + +:Distribution_Shape + a sh:NodeShape ; + sh:name "Distribution"@en ; + sh:property [ + sh:class skos:Concept ; + sh:minCount 1 ; + sh:maxCount 1 ; + sh:path mobilitydcatap:mobilityDataStandard ; + sh:name "mobility data standard" ; + sh:description "This property describes the mobility data standard, as applied for the delivered content within the Distribution. A mobility data standard, e.g., DATEX II, combines syntax and semantic definitions of entities in a certain domain (e.g., for DATEX II: road traffic information), and optionally adds technical rules for data exchange. " ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path mobilitydcatap:applicationLayerProtocol ; + sh:name "application layer protocol" ; + sh:description "This property describes the transmitting channel, i.e., the Application Layer Protocol, of the distribution." ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Distribution . + +:Kind_Shape + a sh:NodeShape ; + sh:name "Kind"@en ; + sh:property [ + sh:class owl:Thing ; + sh:minCount 1 ; + sh:path vcard:hasEmail ; + sh:name "email" ; + sh:description "This property contains an email address of the Kind, specified using fully qualified mailto: URI scheme [RFC6068]. " ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path vcard:fn ; + sh:minCount 1 ; + sh:name "name" ; + sh:description "This property contains a name of the Kind. This property can be repeated for different versions of the name (e.g., the name in different languages) - see § 8. Accessibility and Multilingual Aspects." ; + sh:severity sh:Violation + ], [ + sh:class owl:Thing ; + sh:path vcard:hasURL ; + sh:maxCount 1 ; + sh:name "URL" ; + sh:description "This property points to a Web site of the Kind." ; + sh:severity sh:Violation + ]; + sh:targetClass vcard:Kind . + +:LicenseDocument_Shape + a sh:NodeShape ; + sh:name "License Document"@en ; + sh:property [ + sh:class skos:Concept ; + sh:maxCount 1 ; + sh:path dct:identifier ; + sh:name "Standard licence" ; + sh:description "This property MAY be be used to link to a concrete standard license. A controlled vocabulary § 5.2 Controlled vocabularies to be used is provided. " ; + sh:severity sh:Violation + ]; + sh:targetClass dct:LicenseDocument . + +:Location_Shape + a sh:NodeShape ; + sh:name "Location"@en ; + sh:property [ + sh:class skos:ConceptScheme ; + sh:path skos:inScheme ; + sh:maxCount 1 ; + sh:name "gazetteer" ; + sh:description "This property MAY be used to specify the gazetteer to which the Location belongs. " ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:identifier ; + sh:name "geographic identifier" ; + sh:description "This property contains the geographic identifier for the Location, e.g., the URI or other unique identifier in the context of the relevant gazetteer." ; + sh:severity sh:Violation + ]; + sh:targetClass dct:Location . + +:RightsStatement_Shape + a sh:NodeShape ; + sh:name "Rights Statement"@en ; + sh:property [ + sh:class skos:Concept ; + sh:path dct:type ; + sh:maxCount 1 ; + sh:name "conditions for access and usage" ; + sh:description "This property SHOULD be used to indicate the conditions if any contracts, licences and/or are applied for the use of the dataset. The conditions are declared on an aggregated level: whether a free and unrestricted use is possible, a contract has to be concluded and/or a licence has to be agreed on to use a dataset. " ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path rdfs:label ; + sh:name "Additional information for access and usage" ; + sh:description "This property MAY describes in a textual form any additional access, usage or licensing information, besides other information under classes dct:RightsStatement and dct:LicenseDocument. " ; + sh:severity sh:Violation + ]; + sh:targetClass dct:RightsStatement . + +#------------------------------------------------------------------------- +# Concepts from controlled vocabularies defined and used in mobilityDCAT-AP. +#------------------------------------------------------------------------- + + a skos:ConceptScheme ; + skos:prefLabel "Data model"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Data content category"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Update frequency"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Georeferencing method"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Network coverage"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Transport mode"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Service category"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Grammar"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Application layer protocol"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Communication method"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Conditions for access and usage"@en ; +. + +#------------------------------------------------------------------------- +# Concepts from additional controlled vocabularies used in mobilityDCAT-AP. +#------------------------------------------------------------------------- + + a skos:ConceptScheme ; + skos:prefLabel "Data Themes"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Dataset Theme Vocabulary"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Access right"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Frequency"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "OGC EPSG Coordinate Reference Systems Register"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "File Type"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Language"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Corporate body"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Continents"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Countries"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "Places"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "GeoNames"@en ; +. + + a skos:ConceptScheme; + skos:prefLabel "NUTS (Nomenclature of Territorial Units for Statistics)"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "ADMS publisher type"@en ; +. + + a skos:ConceptScheme ; + skos:prefLabel "European Legislation Identifier (ELI)"@en ; +. \ No newline at end of file diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-2.1.1-base-SHACL.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-2.1.1-base-SHACL.ttl new file mode 100644 index 000000000000..1e5ab9d194f4 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-2.1.1-base-SHACL.ttl @@ -0,0 +1,7771 @@ +# FROM https://www.itb.ec.europa.eu/shacl/dcat-ap/upload +@prefix adms: . +@prefix cc: . +@prefix dc: . +@prefix dcam: . +@prefix dcat: . +@prefix dcatap: . +@prefix dcterms: . +@prefix dctype: . +@prefix doap: . +@prefix foaf: . +@prefix geo: . +@prefix gsp: . +@prefix j.0: . +@prefix locn: . +@prefix odrl: . +@prefix org: . +@prefix owl: . +@prefix prov: . +@prefix rdf: . +@prefix rdfs: . +@prefix rec: . +@prefix sdo: . +@prefix sf: . +@prefix sh: . +@prefix sioc: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vann: . +@prefix vcard: . +@prefix voaf: . +@prefix vs: . +@prefix wdsr: . +@prefix xsd: . + +prov:entity rdf:type owl:ObjectProperty ; + rdfs:domain prov:EntityInfluence ; + rdfs:isDefinedBy ; + rdfs:label "entity" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:influencer ; + prov:category "qualified" ; + prov:editorialNote "This property behaves in spirit like rdf:object; it references the object of a prov:wasInfluencedBy triple."@en ; + prov:editorsDefinition "The prov:entity property references an prov:Entity which influenced a resource. This property applies to an prov:EntityInfluence, which is given by a subproperty of prov:qualifiedInfluence from the influenced prov:Entity, prov:Activity or prov:Agent." ; + prov:inverse "entityOfInfluence" . + +spdx:relationshipType_packageOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A is used as a package as part of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +dcat:DataService rdf:type owl:Class ; + rdfs:comment "A site or end-point providing operations related to the discovery of, access to, or processing functions on, data or related resources."@en , "Umístění či přístupový bod poskytující operace související s hledáním, přistupem k, či výkonem funkcí na datech či souvisejících zdrojích."@cs , "Et websted eller endpoint der udstiller operationer relateret til opdagelse af, adgang til eller behandlende funktioner på data eller relaterede ressourcer."@da , "Un sitio o end-point que provee operaciones relacionadas a funciones de descubrimiento, acceso, o procesamiento de datos o recursos relacionados."@es , "Un sito o end-point che fornisce operazioni relative alla scoperta, all'accesso o all'elaborazione di funzioni su dati o risorse correlate."@it ; + rdfs:label "Servizio di dati"@it , "Data service"@en , "Servicio de datos"@es , "Datatjeneste"@da ; + rdfs:subClassOf dctype:Service , dcat:Resource ; + skos:altLabel "Dataservice"@da ; + skos:changeNote "New class added in DCAT 2.0."@en , "Nová třída přidaná ve verzi DCAT 2.0."@cs , "Ny klasse tilføjet i DCAT 2.0."@da , "Nueva clase añadida en DCAT 2.0."@es , "Nuova classe aggiunta in DCAT 2.0."@it ; + skos:definition "Umístění či přístupový bod poskytující operace související s hledáním, přistupem k, či výkonem funkcí na datech či souvisejících zdrojích."@cs , "Un sitio o end-point que provee operaciones relacionadas a funciones de descubrimiento, acceso, o procesamiento de datos o recursos relacionados."@es , "A site or end-point providing operations related to the discovery of, access to, or processing functions on, data or related resources."@en , "Et site eller endpoint der udstiller operationer relateret til opdagelse af, adgang til eller behandlende funktioner på data eller relaterede ressourcer."@da , "Un sito o end-point che fornisce operazioni relative alla scoperta, all'accesso o all'elaborazione di funzioni su dati o risorse correlate."@it ; + skos:scopeNote "Pokud je dcat:DataService navázána na jednu či více Datových sad, jsou tyto indikovány vlstností dcat:servesDataset."@cs , "El tipo de servicio puede indicarse usando la propiedad dct:type. Su valor puede provenir de un vocabulario controlado, como por ejemplo el vocabulario de servicios de datos espaciales de INSPIRE."@es , "Hvis en dcat:DataService er bundet til en eller flere specifikke datasæt kan dette indikeres ved hjælp af egenskaben dcat:servesDataset. "@da , "Druh služby může být indikován vlastností dct:type. Její hodnota může být z řízeného slovníku, kterým je například slovník typů prostorových datových služeb INSPIRE."@cs , "If a dcat:DataService is bound to one or more specified Datasets, they are indicated by the dcat:servesDataset property."@en , "Si un dcat:DataService está asociado con uno o más conjuntos de datos especificados, dichos conjuntos de datos pueden indicarse con la propiedad dcat:servesDataset."@es , "Il tipo di servizio può essere indicato usando la proprietà dct:type. Il suo valore può essere preso da un vocabolario controllato come il vocabolario dei tipi di servizi per dati spaziali di INSPIRE."@it , "Se un dcat:DataService è associato a uno o più Dataset specificati, questi sono indicati dalla proprietà dcat:serveDataset."@it , "The kind of service can be indicated using the dct:type property. Its value may be taken from a controlled vocabulary such as the INSPIRE spatial data service type vocabulary."@en , "Datatjenestetypen kan indikeres ved hjælp af egenskaben dct:type. Værdien kan tages fra kontrollerede udfaldsrum såsom INSPIRE spatial data service vocabulary."@da . + +spdx:relationshipType + rdf:type owl:ObjectProperty ; + rdfs:comment "Describes the type of relationship between two SPDX elements."@en ; + rdfs:domain spdx:Relationship ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_amendment ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_ancestorOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_buildToolOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_containedBy ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_contains ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_copyOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_dataFile ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_dataFileOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_descendantOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_describedBy ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_describes ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_distributionArtifact ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_documentation ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_dynamicLink ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_expandedFromArchive ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_fileAdded ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_fileDeleted ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_fileModified ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_generatedFrom ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_generates ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_hasPrerequisite ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_metafileOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_optionalComponentOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_other ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_packageOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_patchApplied ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_patchFor ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_prerequisiteFor ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_staticLink ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_testcaseOf ; + owl:onProperty spdx:relationshipType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:relationshipType_variantOf ; + owl:onProperty spdx:relationshipType + ] + ) + ] ; + vs:term_status "stable"@en . + +spdx:packageName rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify the full name of the package as given by Package Originator."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + rdfs:subPropertyOf spdx:name ; + vs:term_status "stable"@en . + +spdx:licenseInfoFromFiles + rdf:type owl:ObjectProperty ; + rdfs:comment "The licensing information that was discovered directly within the package. There will be an instance of this property for each distinct value of alllicenseInfoInFile properties of all files contained in the package.\n\nIf the licenseInfoFromFiles field is not present for a package and filesAnalyzed property for that same pacakge is true or omitted, it implies an equivalent meaning to NOASSERTION."@en ; + rdfs:domain spdx:Package ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( spdx:AnyLicenseInfo + [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseInfoFromFiles + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseInfoFromFiles + ] + ) + ] ; + vs:term_status "stable"@en . + +vcard:Msg rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Msg"@en ; + rdfs:subClassOf vcard:TelephoneType ; + owl:deprecated true . + +[ rdf:type owl:AllDifferent ; + owl:distinctMembers ( spdx:annotationType_other spdx:annotationType_review ) +] . + +vcard:Intl rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Intl"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +spdx:licenseComments rdf:type owl:DatatypeProperty ; + rdfs:comment "The licenseComments property allows the preparer of the SPDX document to describe why the licensing in spdx:licenseConcluded was chosen."@en ; + rdfs:domain spdx:SpdxItem ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:packageVerificationCodeValue + rdf:type owl:DatatypeProperty ; + rdfs:comment "The actual package verification code as a hex encoded value."@en ; + rdfs:domain spdx:PackageVerificationCode ; + rdfs:range xsd:hexBinary ; + vs:term_status "stable"@en . + +dcterms:creator rdf:type rdf:Property ; + rdfs:comment "An entity responsible for making the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Creator"@en ; + rdfs:subPropertyOf dcterms:contributor , dc:creator ; + dcam:rangeIncludes dcterms:Agent ; + dcterms:description "Recommended practice is to identify the creator with a URI. If this is not possible or feasible, a literal value that identifies the creator may be provided."@en ; + dcterms:issued "2008-01-14"^^xsd:date ; + owl:equivalentProperty foaf:maker . + +dcterms:contributor rdf:type rdf:Property ; + rdfs:comment "An entity responsible for making contributions to the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Contributor"@en ; + rdfs:subPropertyOf dc:contributor ; + dcam:rangeIncludes dcterms:Agent ; + dcterms:description "The guidelines for using names of persons or organizations as creators apply to contributors."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:Car rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Car"@en ; + rdfs:subClassOf vcard:TelephoneType ; + owl:deprecated true . + +spdx:ListedLicense rdf:type owl:Class ; + rdfs:comment "A license which is included in the SPDX License List (http://spdx.org/licenses)."@en ; + rdfs:subClassOf spdx:License ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:deprecatedVersion + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isDeprecatedLicenseId + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseTextHtml + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:standardLicenseHeaderHtml + ] ; + vs:term_status "stable"@en . + +vcard:hasValue rdf:type owl:ObjectProperty ; + rdfs:comment "Used to indicate the resource value of an object property that requires property parameters"@en ; + rdfs:isDefinedBy ; + rdfs:label "has value"@en . + +vcard:n rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "name"@en ; + owl:equivalentProperty vcard:hasName . + +vcard:honorific-suffix + rdf:type owl:DatatypeProperty ; + rdfs:comment "The honorific suffix of the name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "honorific suffix"@en ; + rdfs:range xsd:string . + +dcterms:dateSubmitted + rdf:type rdf:Property ; + rdfs:comment "Date of submission of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Submitted"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty. Examples of resources to which a 'Date Submitted' may be relevant include a thesis (submitted to a university department) or an article (submitted to a journal)."@en ; + dcterms:issued "2002-07-13"^^xsd:date . + +prov:editorsDefinition + rdf:type owl:AnnotationProperty ; + rdfs:comment "When the prov-o term does not have a definition drawn from prov-dm, and the prov-o editor provides one."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf prov:definition . + +dcat:compressFormat rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "Il formato di compressione della distribuzione nel quale i dati sono in forma compressa, ad es. per ridurre le dimensioni del file da scaricare."@it , "Formát komprese souboru, ve kterém jsou data poskytována v komprimované podobě, např. ke snížení velikosti souboru ke stažení."@cs , "The compression format of the distribution in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en , "Kompressionsformatet for distributionen som indeholder data i et komprimeret format, fx for at reducere størrelsen af downloadfilen."@da , "El formato de la distribución en el que los datos están en forma comprimida, e.g. para reducir el tamaño del archivo a bajar."@es ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "compression format"@en , "kompressionsformat"@da , "formato de compresión"@es , "formato di compressione"@it , "formát komprese"@cs ; + rdfs:range dcterms:MediaType ; + rdfs:subPropertyOf dcterms:format ; + skos:changeNote "Ny egenskab tilføjet i DCAT 2.0."@da , "Nueva propiedad agregada en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property added in DCAT 2.0."@en ; + skos:definition "Kompressionsformatet for distributionen som indeholder data i et komprimeret format, fx for at reducere størrelsen af downloadfilen."@da , "Formát komprese souboru, ve kterém jsou data poskytována v komprimované podobě, např. ke snížení velikosti souboru ke stažení."@cs , "El formato de la distribución en el que los datos están en forma comprimida, e.g. para reducir el tamaño del archivo a bajar."@es , "Il formato di compressione della distribuzione nel quale i dati sono in forma compressa, ad es. per ridurre le dimensioni del file da scaricare."@it , "The compression format of the distribution in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en ; + skos:scopeNote "Questa proprietà deve essere utilizzata quando i file nella distribuzione sono compressi, ad es. in un file ZIP. Il formato DOVREBBE essere espresso usando un tipo di media come definito dal registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, se disponibile."@it , "Denne egenskab kan anvendes når filerne i en distribution er blevet komprimeret, fx i en ZIP-fil. Formatet BØR udtrykkes ved en medietype som defineret i 'IANA media types registry', hvis der optræder en relevant medietype dér: https://www.iana.org/assignments/media-types/."@da , "This property is to be used when the files in the distribution are compressed, e.g. in a ZIP file. The format SHOULD be expressed using a media type as defined by IANA media types registry https://www.iana.org/assignments/media-types/, if available."@en , "Tato vlastnost se použije, když jsou soubory v distribuci komprimovány, např. v ZIP souboru. Formát BY MĚL být vyjádřen pomocí typu média definovaného v registru IANA https://www.iana.org/assignments/media-types/, pokud existuje."@cs , "Esta propiedad se debe usar cuando los archivos de la distribución están comprimidos, por ejemplo en un archivo ZIP. El formato DEBERÍA expresarse usando un 'media type', tales como los definidos en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, si está disponibles."@es . + +dcat:keyword rdf:type rdf:Property , owl:DatatypeProperty ; + rdfs:comment "Una parola chiave o un'etichetta per descrivere la risorsa."@it , "Et nøgleord eller tag til beskrivelse af en ressource."@da , "Μία λέξη-κλειδί ή μία ετικέτα που περιγράφει το σύνολο δεδομένων."@el , "Un mot-clé ou étiquette décrivant une ressource."@fr , "Una palabra clave o etiqueta que describe un recurso."@es , "Klíčové slovo nebo značka popisující zdroj."@cs , "A keyword or tag describing a resource."@en , "データセットを記述しているキーワードまたはタグ。"@ja , "كلمة مفتاحيه توصف قائمة البيانات"@ar ; + rdfs:isDefinedBy ; + rdfs:label "كلمة مفتاحية "@ar , "mot-clés "@fr , "λέξη-κλειδί"@el , "キーワード/タグ"@ja , "nøgleord"@da , "palabra clave"@es , "keyword"@en , "parola chiave"@it , "klíčové slovo"@cs ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dcterms:subject ; + skos:definition "Klíčové slovo nebo značka popisující zdroj."@cs , "データセットを記述しているキーワードまたはタグ。"@ja , "Un mot-clé ou étiquette décrivant une ressource."@fr , "A keyword or tag describing a resource."@en , "Una palabra clave o etiqueta que describe un recurso."@es , "Et nøgleord eller tag til beskrivelse af en ressource."@da , "كلمة مفتاحيه توصف قائمة البيانات"@ar , "Μία λέξη-κλειδί ή μία ετικέτα που περιγράφει το σύνολο δεδομένων."@el , "Una parola chiave o un'etichetta per descrivere la risorsa."@it . + +[ rdf:type owl:Axiom ; + owl:annotatedProperty rdfs:range ; + owl:annotatedSource prov:wasInfluencedBy ; + owl:annotatedTarget [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + prov:definition "influencer: an identifier (o1) for an ancestor entity, activity, or agent that the former depends on;" ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence" +] . + +vcard:sort-string rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the string to be used for national-language-specific sorting. Used as a property parameter only."@en ; + rdfs:isDefinedBy ; + rdfs:label "sort as"@en ; + rdfs:range xsd:string . + +dcat:centroid rdf:type owl:DatatypeProperty , rdf:Property ; + rdfs:domain dcterms:Location ; + rdfs:label "centroid"@cs , "centroide"@it , "centroide"@es , "centroid"@en , "geometrisk tyngdepunkt"@da ; + rdfs:range rdfs:Literal ; + skos:altLabel "centroide"@da ; + skos:changeNote "Ny egenskab tilføjet i DCAT 2.0."@da , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Nueva propiedad agregada en DCAT 2.0."@es , "New property added in DCAT 2.0."@en ; + skos:definition "Il centro geografico (centroide) di una risorsa."@it , "Geografický střed (centroid) zdroje."@cs , "Det geometrisk tyngdepunkt (centroid) for en ressource."@da , "El centro geográfico (centroide) de un recurso."@es , "The geographic center (centroid) of a resource."@en ; + skos:scopeNote "Rækkevidden for denne egenskab er bevidst generisk definere med det formål at tillade forskellige geokodninger. Geometrien kan eksempelvis repræsenteres som WKT (geosparql:asWKT [GeoSPARQL]) eller [GML] (geosparql:asGML [GeoSPARQL])."@da , "The range of this property is intentionally generic, with the purpose of allowing different geometry encodings. E.g., the geometry could be encoded with as WKT (geosparql:wktLiteral [GeoSPARQL]) or [GML] (geosparql:asGML [GeoSPARQL])."@en , "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé kódování geometrií. Geometrie by kupříkladu mohla být kódována jako WKT (geosparql:wktLiteral [GeoSPARQL]) či [GML] (geosparql:asGML [GeoSPARQL])."@cs , "El rango de esta propiedad es intencionalmente genérico con el objetivo de permitir distintas codificaciones geométricas. Por ejemplo, la geometría puede codificarse como WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@es , "Il range di questa proprietà è volutamente generica, con lo scopo di consentire diverse codifiche geometriche. Ad esempio, la geometria potrebbe essere codificata con WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@it . + +spdx:relationshipType_dataFile + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a data file used in SPDXRef-B. Replaced by relationshipType_dataFileOf"@en ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +dcterms:hasVersion rdf:type rdf:Property ; + rdfs:comment "A related resource that is a version, edition, or adaptation of the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Has Version"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "Changes in version imply substantive changes in content rather than differences in format. This property is intended to be used with non-literal values. This property is an inverse property of Is Version Of."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +skos:changeNote rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "change note"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "A note about a modification to a concept."@en . + +spdx:relationshipType_ancestorOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_ancestorOf expresses that an SPDXElement is an ancestor of (same lineage but pre-dates) the relatedSPDXElement. For example, an upstream File is an ancestor of a modified downstream File"@en ; + vs:term_status "stable"@en . + +vcard:hasUID rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a value that represents a globally unique identifier corresponding to the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has uid"@en . + +prov:Usage rdf:type owl:Class ; + rdfs:comment "An instance of prov:Usage provides additional descriptions about the binary prov:used relation from some prov:Activity to an prov:Entity that it used. For example, :keynote prov:used :podium; prov:qualifiedUsage [ a prov:Usage; prov:entity :podium; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Usage" ; + rdfs:subClassOf prov:InstantaneousEvent , prov:EntityInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Usage is the beginning of utilizing an entity by an activity. Before usage, the activity had not begun to utilize this entity and could not have been affected by the entity."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Usage"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Usage"^^xsd:anyURI ; + prov:unqualifiedForm prov:used . + +time:TRS rdf:type owl:Class ; + rdfs:comment "A temporal reference system, such as a temporal coordinate system (with an origin, direction, and scale), a calendar-clock combination, or a (possibly hierarchical) ordinal system. \n\nThis is a stub class, representing the set of all temporal reference systems."@en , "Un sistema de referencia temporal, tal como un sistema de coordenadas temporales (con un origen, una dirección y una escala), una combinación calendario-reloj, o un sistema ordinal (posiblemente jerárquico).\n Esta clase comodín representa el conjunto de todos los sistemas de referencia temporal."@es ; + rdfs:label "Temporal Reference System"@en , "sistema de referencia temporal"@es ; + skos:definition "A temporal reference system, such as a temporal coordinate system (with an origin, direction, and scale), a calendar-clock combination, or a (possibly hierarchical) ordinal system. \n\nThis is a stub class, representing the set of all temporal reference systems."@en , "Un sistema de referencia temporal, tal como un sistema de coordenadas temporales (con un origen, una dirección y una escala), una combinación calendario-reloj, o un sistema ordinal (posiblemente jerárquico).\n Esta clase comodín representa el conjunto de todos los sistemas de referencia temporal."@es ; + skos:note "A taxonomy of temporal reference systems is provided in ISO 19108:2002 [ISO19108], including (a) calendar + clock systems; (b) temporal coordinate systems (i.e. numeric offset from an epoch); (c) temporal ordinal reference systems (i.e. ordered sequence of named intervals, not necessarily of equal duration)."@en , "En el ISO 19108:2002 [ISO19108] se proporciona una taxonomía de sistemas de referencia temporal, incluyendo (a) sistemas de calendario + reloj; (b) sistemas de coordenadas temporales (es decir, desplazamiento numérico a partir de una época); (c) sistemas de referencia ordinales temporales (es decir, secuencia ordenada de intervalos nombrados, no necesariamente de igual duración)."@es . + +vcard:given-name rdf:type owl:DatatypeProperty ; + rdfs:comment "The given name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "given name"@en ; + rdfs:range xsd:string . + +dcterms:source rdf:type rdf:Property ; + rdfs:comment "A related resource from which the described resource is derived."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Source"@en ; + rdfs:subPropertyOf dc:source , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. The described resource may be derived from the related resource in whole or in part. Best practice is to identify the related resource by means of a URI or a string conforming to a formal identification system."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:checksumAlgorithm_blake2b384 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was BLAKE2b-384."@en ; + vs:term_status "stable"@en . + +spdx:licenseExceptionText + rdf:type owl:DatatypeProperty ; + rdfs:comment "Full text of the license exception."@en ; + rdfs:domain spdx:LicenseException ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +dcterms:PhysicalMedium + rdf:type rdfs:Class ; + rdfs:comment "A physical material or carrier."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Physical Medium"@en ; + rdfs:subClassOf dcterms:MediaType ; + dcterms:description "Examples include paper, canvas, or DVD."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:relationshipType_testOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is used for testing SPDXRef-B."@en ; + vs:term_status "stable"@en . + +vcard:value rdf:type owl:DatatypeProperty ; + rdfs:comment "Used to indicate the literal value of a data property that requires property parameters"@en ; + rdfs:isDefinedBy ; + rdfs:label "value"@en . + +spdx:relationshipType_generates + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_generates expresses that an SPDXElement generates the relatedSPDXElement. For example, a source File generates a binary File."@en ; + vs:term_status "stable"@en . + +spdx:referenceCategory + rdf:type owl:ObjectProperty ; + rdfs:comment "Category for the external reference"@en ; + rdfs:domain spdx:ExternalRef ; + rdfs:range spdx:ReferenceCategory ; + vs:term_status "stable"@en . + +spdx:relationshipType_fileDeleted + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_fileDeleted expresses that the SPDXElement is a package where the relatedSPDXElement file has been removed. For example, a package has been patched to remove a file a file (the relatedSPDXElement resulting in the patched package (the SPDXElement). This relationship is typically used to express the result of a patched package when the actual patchfile is not present."@en ; + vs:term_status "stable"@en . + +spdx:checksumAlgorithm_md6 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was MD6"@en ; + vs:term_status "stable" . + +dcat:temporalResolution + rdf:type owl:DatatypeProperty ; + rdfs:comment "minimum time period resolvable in a dataset."@en , "minimální doba trvání rozlišitelná v datové sadě."@cs , "período de tiempo mínimo en el conjunto de datos."@es , "mindste tidsperiode der kan resolveres i datasættet."@da , "periodo di tempo minimo risolvibile in un set di dati."@it ; + rdfs:label "resolución temporal"@es , "tidslig opløsning"@da , "temporal resolution"@en , "časové rozlišení"@cs , "risoluzione temporale"@it ; + rdfs:range xsd:duration ; + skos:changeNote "Nueva propiedad añadida en DCAT 2.0."@es , "New property added in DCAT 2.0."@en , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs ; + skos:definition "minimální doba trvání rozlišitelná v datové sadě."@cs , "período de tiempo mínimo en el conjunto de datos."@es , "minimum time period resolvable in a dataset."@en , "periodo di tempo minimo risolvibile in un set di dati."@it , "mindste tidsperiode der kan resolveres i datasættet."@da ; + skos:editorialNote "Může se vyskytnout v popisu Datové sady nebo Distribuce, takže nebyl specifikován definiční obor."@cs , "Might appear in the description of a Dataset or a Distribution, so no domain is specified."@en , "Kan optræde i forbindelse med beskrivelse af datasættet eller datasætditributionen, så der er ikke angivet et domæne for egenskaben."@da ; + skos:scopeNote "Alternative temporal resolutions might be provided as different dataset distributions."@en , "Různá časová rozlišení mohou být poskytována jako různé distribuce datové sady."@cs , "Si el conjunto de datos es una serie temporal, debe corresponder al espaciado de los elementos de la serie. Para otro tipo de conjuntos de datos, esta propiedad indicará usualmente la menor diferencia de tiempo entre elementos en el dataset."@es , "Alternative tidslige opløsninger kan leveres som forskellige datasætdistributioner."@da , "Pokud je datová sada časovou řadou, měla by tato vlastnost odpovídat rozestupu položek v řadě. Pro ostatní druhy datových sad bude tato vlastnost obvykle indikovat nejmenší časovou vzdálenost mezi položkami této datové sady."@cs , "Distintas distribuciones del conjunto de datos pueden tener resoluciones temporales diferentes."@es , "If the dataset is a time-series this should correspond to the spacing of items in the series. For other kinds of dataset, this property will usually indicate the smallest time difference between items in the dataset."@en , "Se il set di dati è una serie temporale, questo dovrebbe corrispondere alla spaziatura degli elementi della serie. Per altri tipi di set di dati, questa proprietà di solito indica la più piccola differenza di tempo tra gli elementi nel set di dati."@it , "Hvis datasættet er en tidsserie, så bør denne egenskab svare til afstanden mellem elementerne i tidsserien. For andre typer af datasæt indikerer denne egenskab den mindste tidsforskel mellem elementer i datasættet."@da , "Risoluzioni temporali alternative potrebbero essere fornite come diverse distribuzioni di set di dati."@it . + +spdx:externalReferenceSite + rdf:type owl:DatatypeProperty ; + rdfs:comment "Website for the maintainers of the external reference site"@en ; + rdfs:domain spdx:ReferenceType ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +spdx:relationshipType_testcaseOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a test case used in testing SPDXRef-B."@en ; + vs:term_status "stable"@en . + +time:intervalAfter rdf:type owl:ObjectProperty ; + rdfs:comment "If a proper interval T1 is intervalAfter another proper interval T2, then the beginning of T1 is after the end of T2."@en , "Si un intervalo propio T1 es posterior a otro intervalo propio T2, entonces el principio de T1 está después que el final de T2." ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval after"@en , "intervalo posterior"@es ; + rdfs:range time:ProperInterval ; + rdfs:subPropertyOf time:after , time:intervalDisjoint ; + owl:inverseOf time:intervalBefore ; + skos:definition "If a proper interval T1 is intervalAfter another proper interval T2, then the beginning of T1 is after the end of T2."@en , "Si un intervalo propio T1 es posterior a otro intervalo propio T2, entonces el principio de T1 está después que el final de T2."@es . + +spdx:none rdf:type owl:NamedIndividual ; + rdfs:comment "Individual to indicate that no value is applicable for the Object." . + +locn:postCode rdf:type rdf:Property ; + rdfs:comment "The post code (a.k.a postal code, zip code etc.). Post codes are common elements in many countries' postal address systems. The domain of locn:postCode is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "post code"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:postCode" ; + vs:term_status "testing"@en . + +vcard:X400 rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "X400"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +spdx:checksumAlgorithm_sha3_256 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA3-256."@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_distributionArtifact + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_distributionArtifact expresses that distributing the SPDXElement requires that the relatedSPDXElement also be distributed. For example, distributing a binary File may require that a source tarball (another File) be made available with the distribuiton. "@en ; + vs:term_status "stable"@en . + +vcard:hasLanguage rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the language data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has language"@en . + +vcard:Work rdf:type owl:Class ; + rdfs:comment "This implies that the property is related to an individual's work place"@en ; + rdfs:isDefinedBy ; + rdfs:label "Work"@en ; + rdfs:subClassOf vcard:Type . + +time:DateTimeInterval + rdf:type owl:Class ; + rdfs:comment "DateTimeInterval is a subclass of ProperInterval, defined using the multi-element DateTimeDescription."@en , "'intervalo de fecha-hora' es una subclase de 'intervalo propio', definida utilizando el multi-elemento 'descripción de fecha-hora'."@es ; + rdfs:label "intervalo de fecha-hora"@es , "Date-time interval"@en ; + rdfs:subClassOf time:ProperInterval ; + skos:definition "DateTimeInterval is a subclass of ProperInterval, defined using the multi-element DateTimeDescription."@en , "'intervalo de fecha-hora' es una subclase de 'intervalo propio', definida utilizando el multi-elemento 'descripción de fecha-hora'."@es ; + skos:note ":DateTimeInterval can only be used for an interval whose limits coincide with a date-time element aligned to the calendar and timezone indicated. For example, while both have a duration of one day, the 24-hour interval beginning at midnight at the beginning of 8 May in Central Europe can be expressed as a :DateTimeInterval, but the 24-hour interval starting at 1:30pm cannot."@en , "'intervalo de fecha-hora' se puede utilizar sólo para un intervalo cuyos límites coinciden con un elemento de fecha-hora alineados con el calendario y la zona horaria indicados. Por ejemplo, aunque ambos tienen una duración de un día, el intervalo de 24 horas que empieza en la media noche del comienzo del 8 mayo en Europa Central se puede expresar como un 'intervalo de fecha-hora', el intervalo de 24 horas que empieza a las 1:30pm no."@es . + +vcard:org rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped. Use the organization-name data property."@en ; + rdfs:isDefinedBy ; + rdfs:label "organization"@en ; + owl:equivalentProperty vcard:organization-name . + +rdfs:isDefinedBy rdf:type owl:AnnotationProperty . + +vcard:Tel rdf:type owl:Class ; + rdfs:comment "This class is deprecated. Use the hasTelephone object property."@en ; + rdfs:isDefinedBy ; + rdfs:label "Tel"@en ; + owl:deprecated true . + +prov:wasAssociatedWith + rdf:type owl:ObjectProperty ; + rdfs:comment "An prov:Agent that had some (unspecified) responsibility for the occurrence of this prov:Activity."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "wasAssociatedWith" ; + rdfs:range prov:Agent ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedAssociation prov:agent ) ; + owl:propertyChainAxiom ( prov:qualifiedAssociation prov:agent ) ; + prov:category "starting-point" ; + prov:component "agents-responsibility" ; + prov:inverse "wasAssociateFor" ; + prov:qualifiedForm prov:Association , prov:qualifiedAssociation . + +vcard:PCS rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "PCS"@en ; + rdfs:subClassOf vcard:TelephoneType ; + owl:deprecated true . + +prov:Generation rdf:type owl:Class ; + rdfs:comment "An instance of prov:Generation provides additional descriptions about the binary prov:wasGeneratedBy relation from a generated prov:Entity to the prov:Activity that generated it. For example, :cake prov:wasGeneratedBy :baking; prov:qualifiedGeneration [ a prov:Generation; prov:activity :baking; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Generation" ; + rdfs:subClassOf prov:ActivityInfluence , prov:InstantaneousEvent ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Generation is the completion of production of a new entity by an activity. This entity did not exist before generation and becomes available for usage after this generation."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Generation"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Generation"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasGeneratedBy . + +dcterms:isRequiredBy rdf:type rdf:Property ; + rdfs:comment "A related resource that requires the described resource to support its function, delivery, or coherence."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Required By"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Requires."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +dcterms:mediator rdf:type rdf:Property ; + rdfs:comment "An entity that mediates access to the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Mediator"@en ; + rdfs:subPropertyOf dcterms:audience ; + dcam:rangeIncludes dcterms:AgentClass ; + dcterms:description "In an educational context, a mediator might be a parent, teacher, teaching assistant, or care-giver."@en ; + dcterms:issued "2001-05-21"^^xsd:date . + +adms:Identifier rdf:type owl:Class ; + rdfs:comment "This is based on the UN/CEFACT Identifier class."@en ; + rdfs:isDefinedBy ; + rdfs:label "Identifier"@en . + +dcterms:URI rdf:type rdfs:Datatype ; + rdfs:comment "The set of identifiers constructed according to the generic syntax for Uniform Resource Identifiers as specified by the Internet Engineering Task Force."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "URI"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +locn:adminUnitL2 rdf:type rdf:Property ; + rdfs:comment "The region of the address, usually a county, state or other such area that typically encompasses several localities. The domain of locn:adminUnitL2 is locn:Address and the range is a literal, conceptually defined by the INSPIRE Geographical Name data type."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "admin unit level 2"@en ; + dcterms:identifier "locn:adminUnitL2" ; + vs:term_status "testing"@en . + +dcterms:rights rdf:type rdf:Property ; + rdfs:comment "Information about rights held in and over the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Rights"@en ; + rdfs:subPropertyOf dc:rights ; + dcam:rangeIncludes dcterms:RightsStatement ; + dcterms:description "Typically, rights information includes a statement about various property rights associated with the resource, including intellectual property rights. Recommended practice is to refer to a rights statement with a URI. If this is not possible or feasible, a literal value (name, label, or short text) may be provided."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:nickname rdf:type owl:DatatypeProperty ; + rdfs:comment "The nick name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "nickname"@en ; + rdfs:range xsd:string . + +spdx:relationshipType_fileModified + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_fileModified expresses that the SPDXElement is a file which is a modified version of the relatedSPDXElement file. For example, a file (the SPDXElement) has been patched to modify the contents of the original file (the SPDXElement). This relationship is typically used to express the result of a patched package when the actual patchfile is not present."@en ; + vs:term_status "stable"@en . + +adms:status rdf:type owl:ObjectProperty ; + rdfs:comment "The status of the Asset in the context of a particular workflow process."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "status"@en ; + rdfs:range skos:Concept . + +prov:Agent rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Agent" ; + owl:disjointWith prov:InstantaneousEvent ; + prov:category "starting-point" ; + prov:component "agents-responsibility" ; + prov:definition "An agent is something that bears some form of responsibility for an activity taking place, for the existence of an entity, or for another agent's activity. "@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-agent"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Agent"^^xsd:anyURI . + +spdx:ChecksumAlgorithm + rdf:type owl:Class ; + rdfs:comment "Algorighm for Checksums."@en ; + vs:term_status "stable"@en . + +skos:narrowMatch rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has narrower match"@en ; + rdfs:subPropertyOf skos:mappingRelation , skos:narrower ; + owl:inverseOf skos:broadMatch ; + skos:definition "skos:narrowMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes."@en . + +dcat:themeTaxonomy rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Il sistema di organizzazione della conoscenza (KOS) usato per classificare i dataset del catalogo."@it , "The knowledge organization system (KOS) used to classify catalog's datasets."@en , "Le systhème d'ogranisation de connaissances utilisé pour classifier les jeux de données du catalogue."@fr , "El sistema de organización del conocimiento utilizado para clasificar conjuntos de datos de catálogos."@es , "Vidensorganiseringssystem (KOS) som anvendes til at klassificere datasæt i kataloget."@da , "Systém organizace znalostí (KOS) použitý pro klasifikaci datových sad v katalogu."@cs , "カタログのデータセットを分類するために用いられる知識組織化体系(KOS;knowledge organization system)。"@ja , "لائحة التصنيفات المستخدمه لتصنيف قوائم البيانات ضمن الفهرس"@ar , "Το σύστημα οργάνωσης γνώσης που χρησιμοποιείται για την κατηγοριοποίηση των συνόλων δεδομένων του καταλόγου."@el ; + rdfs:domain dcat:Catalog ; + rdfs:isDefinedBy ; + rdfs:label "taxonomie de thèmes"@fr , "tassonomia dei temi"@it , "テーマ"@ja , "theme taxonomy"@en , "قائمة التصنيفات"@ar , "Ταξινομία θεματικών κατηγοριών."@el , "emnetaksonomi"@da , "taxonomie témat"@cs , "taxonomía de temas"@es ; + rdfs:range rdfs:Resource ; + sdo:rangeIncludes skos:ConceptScheme , owl:Ontology , skos:Collection ; + skos:altLabel "temataksonomi"@da ; + skos:definition "Vidensorganiseringssystem (KOS) som anvendes til at klassificere datasæt i kataloget."@da , "لائحة التصنيفات المستخدمه لتصنيف قوائم البيانات ضمن الفهرس"@ar , "El sistema de organización del conocimiento utilizado para clasificar conjuntos de datos de catálogos."@es , "Systém organizace znalostí (KOS) použitý pro klasifikaci datových sad v katalogu."@cs , "カタログのデータセットを分類するために用いられる知識組織化体系(KOS;knowledge organization system)。"@ja , "The knowledge organization system (KOS) used to classify catalog's datasets."@en , "Il sistema di organizzazione della conoscenza (KOS) usato per classificare i dataset del catalogo."@it , "Το σύστημα οργάνωσης γνώσης που χρησιμοποιείται για την κατηγοριοποίηση των συνόλων δεδομένων του καταλόγου."@el , "Le systhème d'ogranisation de connaissances utilisé pour classifier les jeux de données du catalogue."@fr ; + skos:scopeNote "Det anbefales at taksonomien organiseres i et skos:ConceptScheme, skos:Collection, owl:Ontology eller lignende, som giver mulighed for at ethvert medlem af taksonomien kan forsynes med en IRI og udgives som linked-data."@da , "It is recommended that the taxonomy is organized in a skos:ConceptScheme, skos:Collection, owl:Ontology or similar, which allows each member to be denoted by an IRI and published as linked-data."@en , "Je doporučeno, aby byla taxonomie vyjádřena jako skos:ConceptScheme, skos:Collection, owl:Ontology nebo podobné, aby mohla být každá položka identifikována pomocí IRI a publikována jako propojená data."@cs , "Se recomienda que la taxonomía se organice como un skos:ConceptScheme, skos:Collection, owl:Ontology o similar, los cuáles permiten que cada miembro se denote con una IRI y se publique como datos enlazados."@es , "Si raccomanda che la tassonomia sia organizzata in uno skos:ConceptScheme, skos:Collection, owl:Ontology o simili, che permette ad ogni membro di essere indicato da un IRI e pubblicato come linked-data."@it . + +spdx:packageFileName rdf:type owl:DatatypeProperty ; + rdfs:comment "The base name of the package file name. For example, zlib-1.2.5.tar.gz."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +prov:influenced rdf:type owl:ObjectProperty ; + rdfs:isDefinedBy ; + rdfs:label "influenced" ; + owl:inverseOf prov:wasInfluencedBy ; + prov:category "expanded" ; + prov:component "agents-responsibility" ; + prov:inverse "wasInfluencedBy" ; + prov:sharesDefinitionWith prov:Influence . + +[ rdf:type owl:Axiom ; + rdfs:comment "Attribution is a particular case of trace (see http://www.w3.org/TR/prov-dm/#concept-trace), in the sense that it links an entity to the agent that ascribed it." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasAttributedTo ; + owl:annotatedTarget prov:wasInfluencedBy ; + prov:definition "IF wasAttributedTo(e2,ag1,aAttr) holds, THEN wasInfluencedBy(e2,ag1) also holds. " +] . + +dcterms:accessRights rdf:type rdf:Property ; + rdfs:comment "Information about who access the resource or an indication of its security status."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Access Rights"@en ; + rdfs:subPropertyOf dc:rights , dcterms:rights ; + dcam:rangeIncludes dcterms:RightsStatement ; + dcterms:description "Access Rights may include information regarding access or restrictions based on privacy, security, or other policies."@en ; + dcterms:issued "2003-02-15"^^xsd:date . + +spdx:checksumAlgorithm_sha384 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA384"@en ; + vs:term_status "stable"@en . + +[ rdf:type owl:Axiom ; + rdfs:comment "Quotation is a particular case of derivation (see http://www.w3.org/TR/prov-dm/#term-quotation) in which an entity is derived from an original entity by copying, or \"quoting\", some or all of it. " ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasQuotedFrom ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +skos:hiddenLabel rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:comment "The range of skos:hiddenLabel is the class of RDF plain literals."@en , "skos:prefLabel, skos:altLabel and skos:hiddenLabel are pairwise disjoint properties."@en ; + rdfs:isDefinedBy ; + rdfs:label "hidden label"@en ; + rdfs:subPropertyOf rdfs:label ; + skos:definition "A lexical label for a resource that should be hidden when generating visual displays of the resource, but should still be accessible to free text search operations."@en . + +prov:Plan rdf:type owl:Class ; + rdfs:comment "There exist no prescriptive requirement on the nature of plans, their representation, the actions or steps they consist of, or their intended goals. Since plans may evolve over time, it may become necessary to track their provenance, so plans themselves are entities. Representing the plan explicitly in the provenance can be useful for various tasks: for example, to validate the execution as represented in the provenance record, to manage expectation failures, or to provide explanations."@en ; + rdfs:isDefinedBy ; + rdfs:label "Plan" ; + rdfs:subClassOf prov:Entity ; + prov:category "expanded" , "qualified" ; + prov:component "agents-responsibility" ; + prov:definition "A plan is an entity that represents a set of actions or steps intended by one or more agents to achieve some goals." ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Association"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Association"^^xsd:anyURI . + +vcard:anniversary rdf:type owl:DatatypeProperty ; + rdfs:comment "The date of marriage, or equivalent, of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "anniversary"@en ; + rdfs:range [ rdf:type rdfs:Datatype ; + owl:unionOf ( xsd:dateTime xsd:gYear ) + ] . + +prov:inverse rdf:type owl:AnnotationProperty ; + rdfs:comment "PROV-O does not define all property inverses. The directionalities defined in PROV-O should be given preference over those not defined. However, if users wish to name the inverse of a PROV-O property, the local name given by prov:inverse should be used."@en ; + rdfs:isDefinedBy ; + rdfs:seeAlso . + +dcterms:accrualPeriodicity + rdf:type rdf:Property ; + rdfs:comment "The frequency with which items are added to a collection."@en ; + rdfs:domain dctype:Collection ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Accrual Periodicity"@en ; + dcam:rangeIncludes dcterms:Frequency ; + dcterms:description "Recommended practice is to use a value from the Collection Description Frequency Vocabulary [[DCMI-COLLFREQ](https://dublincore.org/groups/collections/frequency/)]."@en ; + dcterms:issued "2005-06-13"^^xsd:date . + +vs:term_status rdf:type owl:AnnotationProperty . + +skos:ConceptScheme rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Concept Scheme"@en ; + owl:disjointWith skos:Concept ; + skos:definition "A set of concepts, optionally including statements about semantic relationships between those concepts."@en ; + skos:example "Thesauri, classification schemes, subject heading lists, taxonomies, 'folksonomies', and other types of controlled vocabulary are all examples of concept schemes. Concept schemes are also embedded in glossaries and terminologies."@en ; + skos:scopeNote "A concept scheme may be defined to include concepts from different sources."@en . + +dcterms:ProvenanceStatement + rdf:type rdfs:Class ; + rdfs:comment "Any changes in ownership and custody of a resource since its creation that are significant for its authenticity, integrity, and interpretation."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Provenance Statement"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:identifier rdf:type rdf:Property ; + rdfs:comment "An unambiguous reference to the resource within a given context."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Identifier"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:identifier ; + dcterms:description "Recommended practice is to identify the resource by means of a string conforming to an identification system. Examples include International Standard Book Number (ISBN), Digital Object Identifier (DOI), and Uniform Resource Name (URN). Persistent identifiers should be provided as HTTP URIs."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +time:hasTRS rdf:type owl:FunctionalProperty , owl:ObjectProperty ; + rdfs:comment "El sistema de referencia temporal utilizado por una posición temporal o descripción de extensión."@es , "The temporal reference system used by a temporal position or extent description. "@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( time:TemporalPosition time:GeneralDurationDescription ) + ] ; + rdfs:label "sistema de referencia temporal utilizado"@es , "Temporal reference system used"@en ; + rdfs:range time:TRS ; + skos:definition "The temporal reference system used by a temporal position or extent description. "@en , "El sistema de referencia temporal utilizado por una posición temporal o descripción de extensión."@es . + +spdx:timestamp rdf:type owl:DatatypeProperty ; + rdfs:comment "Timestamp"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:dateTime . + +spdx:standardLicenseHeaderHtml + rdf:type owl:DatatypeProperty ; + rdfs:comment "HTML representation of the standard license header"@en ; + rdfs:domain spdx:ListedLicense ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:Organization rdf:type owl:Class ; + rdfs:comment "An object representing an organization. An organization is a single entity, and might represent a business or government, a department or division within a business or government, a club, an association, or the like.\n"@en ; + rdfs:isDefinedBy ; + rdfs:label "Organization"@en ; + rdfs:subClassOf vcard:Kind . + +spdx:Review rdf:type owl:Class ; + rdfs:comment "This class has been deprecated in favor of an Annotation with an Annotation type of review."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:dateTime ; + owl:onProperty spdx:reviewDate ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:reviewer + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +spdx:checksumAlgorithm_sha224 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA224"@en ; + vs:term_status "stable"@en . + +spdx:checksumAlgorithm_sha1 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA-1" ; + vs:term_status "stable" . + +prov:aq rdf:type owl:AnnotationProperty ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + +spdx:Snippet rdf:type owl:Class ; + rdfs:comment "The set of bytes in a file. The name of the snippet is the name of the file appended with the byte range in parenthesis (ie: \"./file/name(2145:5532)\")"@en ; + rdfs:subClassOf spdx:SpdxItem ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:licenseInfoInSnippet + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass ; + owl:onProperty spdx:range + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:File ; + owl:onProperty spdx:snippetFromFile ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +time:inXSDDateTimeStamp + rdf:type owl:DatatypeProperty ; + rdfs:comment "Position of an instant, expressed using xsd:dateTimeStamp"@en , "Posición de un instante, expresado utilizando xsd:dateTimeStamp."@es ; + rdfs:domain time:Instant ; + rdfs:label "in XSD Date-Time-Stamp"@en , "en fecha-sello de tiempo XSD"@es ; + rdfs:range xsd:dateTimeStamp ; + skos:definition "Position of an instant, expressed using xsd:dateTimeStamp"@en , "Posición de un instante, expresado utilizando xsd:dateTimeStamp."@es . + +dcat:bbox rdf:type rdf:Property , owl:DatatypeProperty ; + rdfs:domain dcterms:Location ; + rdfs:label "bounding box"@da , "bounding box"@en , "cuadro delimitador"@es , "ohraničení oblasti"@cs , "quadro di delimitazione"@it ; + rdfs:range rdfs:Literal ; + skos:changeNote "Ny egenskab tilføjet i DCAT 2.0."@da , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "Propiedad nueva agregada en DCAT 2.0."@es , "New property added in DCAT 2.0."@en , "Nuova proprietà aggiunta in DCAT 2.0."@it ; + skos:definition "Den geografiske omskrevne firkant af en ressource."@da , "Il riquadro di delimitazione geografica di una risorsa."@it , "Ohraničení geografické oblasti zdroje."@cs , "El cuadro delimitador geográfico para un recurso."@es , "The geographic bounding box of a resource."@en ; + skos:scopeNote "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintas codificaciones geométricas. Por ejemplo, la geometría puede ser codificada como WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@es , "The range of this property is intentionally generic, with the purpose of allowing different geometry encodings. E.g., the geometry could be encoded with as WKT (geosparql:wktLiteral [GeoSPARQL]) or [GML] (geosparql:asGML [GeoSPARQL])."@en , "Il range di questa proprietà è volutamente generica, con lo scopo di consentire diverse codifiche geometriche. Ad esempio, la geometria potrebbe essere codificata con WKT (geosparql:wktLiteral [GeoSPARQL]) o [GML] (geosparql:asGML [GeoSPARQL])."@it , "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé kódování geometrií. Geometrie by kupříkladu mohla být kódována jako WKT (geosparql:wktLiteral [GeoSPARQL]) či [GML] (geosparql:asGML [GeoSPARQL])."@cs , "Rækkevidden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige kodninger af geometrier. Geometrien kan eksempelvis repræsenteres som WKT (geosparql:asWKT [GeoSPARQL]) eller [GML] (geosparql:asGML [GeoSPARQL])."@da . + +spdx:SimpleLicensingInfo + rdf:type owl:Class ; + rdfs:comment "The SimpleLicenseInfo class includes all resources that represent simple, atomic, licensing information."@en ; + rdfs:subClassOf spdx:AnyLicenseInfo ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:CrossRef ; + owl:onProperty spdx:crossRef + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:anyURI ; + owl:onProperty rdfs:seeAlso + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseId ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:name + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + vs:term_status "stable"@en . + +locn:Geometry rdf:type rdfs:Class ; + rdfs:comment "The locn:Geometry class provides the means to identify a location as a point, line, polygon, etc. expressed using coordinates in some coordinate reference system."@en ; + rdfs:isDefinedBy ; + rdfs:label "Geometry"@en ; + dcterms:identifier "locn:Geometry" ; + vann:usageNote "This class defines the notion of \"geometry\" at the conceptual level, and it shall be encoded by using different formats (see usage note of the locn:geometry property)."@en ; + vs:term_status "unstable"@en . + +time:intervalDuring rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 está durante otro intervalo propio T2, entonces del principio de T1 está después del principio de T2, y el final de T1 está antes que el final de T2."@es , "If a proper interval T1 is intervalDuring another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is before the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo durante"@es , "interval during"@en ; + rdfs:range time:ProperInterval ; + rdfs:subPropertyOf time:intervalIn ; + owl:inverseOf time:intervalContains ; + skos:definition "Si un intervalo propio T1 está durante otro intervalo propio T2, entonces del principio de T1 está después del principio de T2, y el final de T1 está antes que el final de T2."@es , "If a proper interval T1 is intervalDuring another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is before the end of T2."@en . + +spdx:relationshipType_contains + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_contains expresses that an SPDXElement contains the relatedSPDXElement. For example, a Package contains a File. (relationshipType_contains introduced in SPDX 2.0 deprecates property 'hasFile' from SPDX 1.2)"@en ; + vs:term_status "stable"@en . + +dcat:catalog rdf:type owl:ObjectProperty ; + rdfs:comment "Un catálogo cuyo contenido es de interés en el contexto del catálogo que está siendo descripto."@es , "Un catalogo i cui contenuti sono di interesse nel contesto di questo catalogo."@it , "Et katalog hvis indhold er relevant i forhold til det aktuelle katalog."@da , "Katalog, jehož obsah je v kontextu tohoto katalogu zajímavý."@cs , "A catalog whose contents are of interest in the context of this catalog."@en ; + rdfs:domain dcat:Catalog ; + rdfs:label "catálogo"@es , "catalogo"@it , "katalog"@cs , "katalog"@da , "catalog"@en ; + rdfs:range dcat:Catalog ; + rdfs:subPropertyOf rdfs:member , dcterms:hasPart ; + skos:altLabel "har delkatalog"@da ; + skos:changeNote "Nuova proprietà aggiunta in DCAT 2.0."@it , "New property added in DCAT 2.0."@en , "Nueva propiedad agregada en DCAT 2.0."@es , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs ; + skos:definition "Un catalogo i cui contenuti sono di interesse nel contesto di questo catalogo."@it , "Katalog, jehož obsah je v kontextu tohoto katalogu zajímavý."@cs , "Et katalog hvis indhold er relevant i forhold til det aktuelle katalog."@da , "Un catálogo cuyo contenido es de interés en el contexto del catálogo que está siendo descripto."@es , "A catalog whose contents are of interest in the context of this catalog."@en . + +time:weeks rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in weeks"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en semanas."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "weeks duration"@en , "duración en semanas"@es ; + rdfs:range xsd:decimal . + + + rdf:type sh:NodeShape ; + sh:name "Catalog Record"@en ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:node ; + sh:path foaf:primaryTopic ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:source ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:language ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path adms:status ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:description ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:node ; + sh:path dcterms:modified ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:conformsTo ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:issued ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:CatalogRecord . + +spdx:SpdxElement rdf:type owl:Class ; + rdfs:comment "An SpdxElement is any thing described in SPDX, either a document or an SpdxItem. SpdxElements can be related to other SpdxElements."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Annotation ; + owl:onProperty spdx:annotation + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Relationship ; + owl:onProperty spdx:relationship + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:name ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + vs:term_status "stable" . + +time:monthOfYear rdf:type owl:ObjectProperty ; + rdfs:comment "The month of the year, whose value is a member of the class time:MonthOfYear"@en , "El mes del año, cuyo valor es un miembro de la clase 'mes del año'."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "month of year"@en , "mes del año"@es ; + rdfs:range time:MonthOfYear ; + skos:definition "The month of the year, whose value is a member of the class time:MonthOfYear"@en , "El mes del año, cuyo valor es un miembro de la clase 'mes del año'."@es ; + skos:editorialNote "Característica arriesgada - añadida en la revisión de 2017, y todavía no ampliamente utilizada."@es , "Feature at risk - added in 2017 revision, and not yet widely used. "@en . + +skos:member rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:domain skos:Collection ; + rdfs:isDefinedBy ; + rdfs:label "has member"@en ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( skos:Concept skos:Collection ) + ] ; + skos:definition "Relates a collection to one of its members."@en . + +vcard:organization-name + rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the organizational name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "organization name"@en ; + rdfs:range xsd:string . + +dcterms:hasPart rdf:type rdf:Property ; + rdfs:comment "A related resource that is included either physically or logically in the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Has Part"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Part Of."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +prov:qualifiedPrimarySource + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:hadPrimarySource Entity :e, then it can qualify how using prov:qualifiedPrimarySource [ a prov:PrimarySource; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedPrimarySource" ; + rdfs:range prov:PrimarySource ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "qualifiedSourceOf" ; + prov:sharesDefinitionWith prov:PrimarySource ; + prov:unqualifiedForm prov:hadPrimarySource . + +time:hour rdf:type owl:DatatypeProperty ; + rdfs:comment "Hour position in a calendar-clock system."@en , "Posición de hora en un sistema calendario-reloj."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "hour"@en , "hora"@es ; + rdfs:range xsd:nonNegativeInteger ; + skos:definition "Hour position in a calendar-clock system."@en , "Posición de hora en un sistema calendario-reloj."@es . + +dcat:endDate rdf:type rdf:Property , owl:DatatypeProperty ; + rdfs:domain dcterms:PeriodOfTime ; + rdfs:label "data di fine"@it , "datum konce"@cs , "slutdato"@da , "end date"@en , "fecha final"@es ; + rdfs:range rdfs:Literal ; + skos:altLabel "sluttidspunkt"@da ; + skos:changeNote "New property added in DCAT 2.0."@en , "Nueva propiedad agregada en DCAT 2.0."@es , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Ny egenskab i DCAT 2.0."@da ; + skos:definition "El fin del período."@es , "Slutningen på perioden."@da , "La fine del periodo."@it , "Konec doby trvání."@cs , "The end of the period."@en ; + skos:scopeNote "La range di questa proprietà è volutamente generico, con lo scopo di consentire diversi livelli di precisione temporale per specificare la fine di un periodo. Ad esempio, può essere espresso con una data (xsd:date), una data e un'ora (xsd:dateTime), o un anno (xsd:gYear)."@it , "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé úrovně časového rozlišení pro specifikaci konce doby trvání. Ten může být kupříkladu vyjádřen datumem (xsd:date), datumem a časem (xsd:dateTime) či rokem (xsd:gYear)."@cs , "Rækkeviden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige niveauer af tidslig præcision ifm. angivelse af slutdatoen for en periode. Den kan eksempelvis udtrykkes som en dato (xsd:date), en dato og et tidspunkt (xsd:dateTime), eller et årstal (xsd:gYear)."@da , "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintos niveles de precisión temporal para especificar el fin del período. Por ejemplo, puede expresarse como una fecha (xsd:date), una fecha y un tiempo (xsd:dateTime), o un año (xsd:gYear)."@es , "The range of this property is intentionally generic, with the purpose of allowing different level of temporal precision for specifying the end of a period. E.g., it can be expressed with a date (xsd:date), a date and time (xsd:dateTime), or a year (xsd:gYear)."@en . + +prov:End rdf:type owl:Class ; + rdfs:comment "An instance of prov:End provides additional descriptions about the binary prov:wasEndedBy relation from some ended prov:Activity to an prov:Entity that ended it. For example, :ball_game prov:wasEndedBy :buzzer; prov:qualifiedEnd [ a prov:End; prov:entity :buzzer; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "End" ; + rdfs:subClassOf prov:EntityInfluence , prov:InstantaneousEvent ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "End is when an activity is deemed to have been ended by an entity, known as trigger. The activity no longer exists after its end. Any usage, generation, or invalidation involving an activity precedes the activity's end. An end may refer to a trigger entity that terminated the activity, or to an activity, known as ender that generated the trigger."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-End"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-End"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasEndedBy . + +prov:invalidatedAtTime + rdf:type owl:DatatypeProperty ; + rdfs:comment "The time at which an entity was invalidated (i.e., no longer usable)."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "invalidatedAtTime" ; + rdfs:range xsd:dateTime ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:editorialNote "It is the intent that the property chain holds: (prov:qualifiedInvalidation o prov:atTime) rdfs:subPropertyOf prov:invalidatedAtTime."@en ; + prov:qualifiedForm prov:atTime , prov:Invalidation . + +time:after rdf:type owl:ObjectProperty ; + rdfs:comment "Gives directionality to time. If a temporal entity T1 is after another temporal entity T2, then the beginning of T1 is after the end of T2."@en , "Asume una dirección en el tiempo. Si una entidad temporal T1 está después de otra entidad temporal T2, entonces el principio de T1 está después del final de T2."@es ; + rdfs:domain time:TemporalEntity ; + rdfs:label "después"@es , "after"@en ; + rdfs:range time:TemporalEntity ; + owl:inverseOf time:before ; + skos:definition "Asume una dirección en el tiempo. Si una entidad temporal T1 está después de otra entidad temporal T2, entonces el principio de T1 está después del final de T2."@es , "Gives directionality to time. If a temporal entity T1 is after another temporal entity T2, then the beginning of T1 is after the end of T2."@en . + +prov:agent rdf:type owl:ObjectProperty ; + rdfs:domain prov:AgentInfluence ; + rdfs:isDefinedBy ; + rdfs:label "agent" ; + rdfs:range prov:Agent ; + rdfs:subPropertyOf prov:influencer ; + prov:category "qualified" ; + prov:editorialNote "This property behaves in spirit like rdf:object; it references the object of a prov:wasInfluencedBy triple."@en ; + prov:editorsDefinition "The prov:agent property references an prov:Agent which influenced a resource. This property applies to an prov:AgentInfluence, which is given by a subproperty of prov:qualifiedInfluence from the influenced prov:Entity, prov:Activity or prov:Agent."@en ; + prov:inverse "agentOfInfluence" . + +rdfs:seeAlso rdf:type owl:AnnotationProperty , owl:DatatypeProperty ; + rdfs:comment ""@en , "rdfs:seeAlso fully represents the ISA Programme Location Core Vocabulary concept of a geographic identifier."@en ; + rdfs:isDefinedBy rdfs: ; + rdfs:label "geographic identifier"@en ; + dcterms:identifier "rdfs:seeAlso" ; + vann:usageNote "Used in the ISA Programme Location Core Vocabulary to provide a URI that identifies the location. This should be expressed using the rdfs:seeAlso property unless the identifier is already the subject of the description. Examples include URIs from GeoNames.org and DBpedia such as http://dbpedia.org/resource/ISO_3166-2:XX where XX is the ISO 3166 two character code for a country."@en ; + vs:term_status "unstable"@en . + +vcard:fn rdf:type owl:DatatypeProperty ; + rdfs:comment "The formatted text corresponding to the name of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "formatted name"@en ; + rdfs:range xsd:string . + +skos:definition rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "definition"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "A statement or formal explanation of the meaning of a concept."@en . + +time:TemporalPosition + rdf:type owl:Class ; + rdfs:comment "A position on a time-line"@en , "Una posición sobre una línea de tiempo."@es ; + rdfs:label "Temporal position"@en , "posición temporal"@es ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:hasTRS + ] ; + skos:definition "A position on a time-line"@en , "Una posición sobre una línea de tiempo."@es . + +spdx:DisjunctiveLicenseSet + rdf:type owl:Class ; + rdfs:comment "A DisjunctiveLicenseSet represents a set of licensing information where only one license applies at a time. This class implies that the recipient gets to choose one of these licenses they would prefer to use."@en ; + rdfs:subClassOf spdx:AnyLicenseInfo , rdfs:Container ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "2"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:member + ] ; + vs:term_status "stable"@en . + + + rdf:type sh:NodeShape ; + sh:name "Licence Document"@en ; + sh:property [ sh:path dcterms:type ; + sh:severity sh:Violation + ] ; + sh:targetClass dcterms:LicenseDocument . + +spdx:relationshipType_buildToolOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A is used to to build SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:originator rdf:type owl:DatatypeProperty ; + rdfs:comment "The name and, optionally, contact information of the person or organization that originally created the package. Values of this property must conform to the agent and tool syntax."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string . + +dcterms:MediaType rdf:type rdfs:Class ; + rdfs:comment "A file format or physical medium."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Media Type"@en ; + rdfs:subClassOf dcterms:MediaTypeOrExtent ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:SpdxDocument rdf:type owl:Class ; + rdfs:comment "An SpdxDocument is a summary of the contents, provenance, ownership and licensing analysis of a specific software package. This is, effectively, the top level of SPDX information."@en ; + rdfs:subClassOf spdx:SpdxElement ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:ExternalDocumentRef ; + owl:onProperty spdx:externalDocumentRef + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Package ; + owl:onProperty spdx:describesPackage + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:specVersion ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Review ; + owl:onProperty spdx:reviewed + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:CreationInfo ; + owl:onProperty spdx:creationInfo ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue ; + owl:onProperty spdx:dataLicense + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:dataLicense ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:ExtractedLicensingInfo ; + owl:onProperty spdx:hasExtractedLicensingInfo + ] ; + vs:term_status "stable" . + +skos:related rdf:type owl:ObjectProperty , owl:SymmetricProperty , rdf:Property ; + rdfs:comment "skos:related is disjoint with skos:broaderTransitive"@en ; + rdfs:isDefinedBy ; + rdfs:label "has related"@en ; + rdfs:subPropertyOf skos:semanticRelation ; + skos:definition "Relates a concept to a concept with which there is an associative semantic relationship."@en . + +vcard:category rdf:type owl:DatatypeProperty ; + rdfs:comment "The category information about the object, also known as tags"@en ; + rdfs:isDefinedBy ; + rdfs:label "category"@en ; + rdfs:range xsd:string . + +[ rdf:type owl:Axiom ; + rdfs:comment "A collection is an entity that provides a structure to some constituents, which are themselves entities. These constituents are said to be member of the collections."@en ; + owl:annotatedProperty rdfs:range ; + owl:annotatedSource prov:hadMember ; + owl:annotatedTarget prov:Entity ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-collection" +] . + +vcard:Child rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Child"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:licenseId rdf:type owl:DatatypeProperty ; + rdfs:comment "A human readable short form license identifier for a license. The license ID is either on the standard license list or the form \"LicenseRef-[idString]\" where [idString] is a unique string containing letters, numbers, \".\" or \"-\". When used within a license expression, the license ID can optionally include a reference to an external document in the form \"DocumentRef-[docrefIdString]:LicenseRef-[idString]\" where docRefIdString is an ID for an external document reference."@en ; + rdfs:domain spdx:SimpleLicensingInfo ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:annotator rdf:type owl:DatatypeProperty ; + rdfs:comment "This field identifies the person, organization, or tool that has commented on a file, package, snippet, or the entire document." ; + rdfs:domain spdx:Annotation ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +time:intervalDisjoint + rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 es disjunto con otro intervalo propio T2, entonces el principio de T1 está después del final de T2, o el final de T1 está antes que el principio de T2, es decir, los intervalos no se solapan de ninguna forma, aunque su relación de orden no se conozca."@es , "If a proper interval T1 is intervalDisjoint another proper interval T2, then the beginning of T1 is after the end of T2, or the end of T1 is before the beginning of T2, i.e. the intervals do not overlap in any way, but their ordering relationship is not known."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval disjoint"@en , "intervalo disjunto"@es ; + rdfs:range time:ProperInterval ; + skos:definition "If a proper interval T1 is intervalDisjoint another proper interval T2, then the beginning of T1 is after the end of T2, or the end of T1 is before the beginning of T2, i.e. the intervals do not overlap in any way, but their ordering relationship is not known."@en , "Si un intervalo propio T1 es disjunto con otro intervalo propio T2, entonces el principio de T1 está después del final de T2, o el final de T1 está antes que el principio de T2, es decir, los intervalos no se solapan de ninguna forma, aunque su relación de orden no se conozca."@es ; + skos:note "This interval relation is not included in the 13 basic relationships defined in Allen (1984), but is defined in (T.3) as the union of :intervalBefore v :intervalAfter . However, that is outside OWL2 expressivity, so is implemented as an explicit property, with :intervalBefore , :intervalAfter as sub-properties"@en , "Esta relación entre intervalos no estaba incluida en las 13 relaciones básicas definidas por Allen (1984), pero está definida en T.3 como la unión de 'intervalo anterior' con 'intervalo posterior'. Sin embargo, esto está fuera de la expresividad de OWL2, por tanto, está implementado como una propiedad explícita, con 'intervalo anterior' e 'intervalo posterior' como sub-propiedades."@es . + + + rdf:type owl:DatatypeProperty ; + rdfs:domain ; + rdfs:range xsd:positiveInteger ; + vs:term_status "stable"@en . + +time:Duration rdf:type owl:Class ; + rdfs:comment "Duration of a temporal extent expressed as a number scaled by a temporal unit"@en , "Duración de una extensión temporal expresada como un número escalado por una unidad temporal."@es ; + rdfs:label "duración de tiempo" , "Time duration"@en ; + rdfs:subClassOf time:TemporalDuration ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:unitType + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:numericDuration + ] ; + skos:definition "Duration of a temporal extent expressed as a number scaled by a temporal unit"@en , "Duración de una extensión temporal expresada como un número escalado por una unidad temporal."@es ; + skos:note "Alternative to time:DurationDescription to support description of a temporal duration other than using a calendar/clock system."@en , "Alternativa a 'descripción de tiempo' para proporcionar descripción soporte a una duración temporal diferente a utilizar un sistema de calendario/reloj."@es . + +locn:address rdf:type rdf:Property ; + rdfs:comment "The locn:address property relationship associates any resource with the locn:Address class "@en ; + rdfs:isDefinedBy ; + rdfs:label "address"@en ; + rdfs:range locn:Address ; + dcterms:identifier "locn:address" ; + vs:term_status "testing"@en . + +dcterms:ISO639-3 rdf:type rdfs:Datatype ; + rdfs:comment "The set of three-letter codes listed in ISO 639-3 for the representation of names of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "ISO 639-3"@en ; + rdfs:seeAlso ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:fileName rdf:type owl:DatatypeProperty ; + rdfs:comment "The name of the file relative to the root of the package."@en ; + rdfs:domain spdx:File ; + rdfs:range xsd:string ; + rdfs:subPropertyOf spdx:name ; + vs:term_status "stable"@en . + +time:days rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in days"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en días."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "days duration"@en , "duración en días"@es ; + rdfs:range xsd:decimal ; + skos:definition "length of, or element of the length of, a temporal extent expressed in days"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en días."@es . + +dcterms:isPartOf rdf:type rdf:Property ; + rdfs:comment "A related resource in which the described resource is physically or logically included."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Part Of"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Has Part."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:hasOrganizationUnit + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the organization unit name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has organization unit name"@en . + +spdx:versionInfo rdf:type owl:DatatypeProperty ; + rdfs:comment "Provides an indication of the version of the package that is described by this SpdxDocument."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:hasAddress rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the components of the delivery address for the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has address"@en ; + rdfs:range vcard:Address . + +prov:Bundle rdf:type owl:Class ; + rdfs:comment "Note that there are kinds of bundles (e.g. handwritten letters, audio recordings, etc.) that are not expressed in PROV-O, but can be still be described by PROV-O."@en ; + rdfs:isDefinedBy ; + rdfs:label "Bundle" ; + rdfs:subClassOf prov:Entity ; + prov:category "expanded" ; + prov:definition "A bundle is a named set of provenance descriptions, and is itself an Entity, so allowing provenance of provenance to be expressed."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-bundle-entity"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-bundle-declaration"^^xsd:anyURI . + + + rdf:type owl:ObjectProperty ; + rdfs:domain ; + vs:term_status "stable"@en . + +time:intervalOverlaps + rdf:type owl:ObjectProperty ; + rdfs:comment "If a proper interval T1 is intervalOverlaps another proper interval T2, then the beginning of T1 is before the beginning of T2, the end of T1 is after the beginning of T2, and the end of T1 is before the end of T2."@en , "Si un intervalo propio T1 se solapa con otro intervalo propio T2, entonces el principio de T1 es anterior al principio de T2, el final de T1 es posterior al principio de T2, y el final de T1 es anterior al final de T2."@es , "Asume una dirección en el tiempo. Si una entidad temporal T1 está después de otra entidad temporal T2, entonces el principio de T1 está después del final de T2."@es ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval overlaps"@en , "intervalo se solapa"@es ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalOverlappedBy ; + skos:definition "If a proper interval T1 is intervalOverlaps another proper interval T2, then the beginning of T1 is before the beginning of T2, the end of T1 is after the beginning of T2, and the end of T1 is before the end of T2."@en , "Si un intervalo propio T1 se solapa con otro intervalo propio T2, entonces el principio de T1 es anterior al principio de T2, el final de T1 es posterior al principio de T2, y el final de T1 es anterior al final de T2."@es . + +time:hasEnd rdf:type owl:ObjectProperty ; + rdfs:comment "End of a temporal entity."@en , "Final de una entidad temporal."@es ; + rdfs:domain time:TemporalEntity ; + rdfs:label "tiene fin"@es , "has end"@en ; + rdfs:range time:Instant ; + rdfs:subPropertyOf time:hasTime ; + skos:definition "Final de una entidad temporal."@es , "End of a temporal entity."@en . + +spdx:checksumAlgorithm_sha3_512 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA3-512."@en ; + vs:term_status "stable"@en . + +skos:altLabel rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:comment "The range of skos:altLabel is the class of RDF plain literals."@en , "skos:prefLabel, skos:altLabel and skos:hiddenLabel are pairwise disjoint properties."@en ; + rdfs:isDefinedBy ; + rdfs:label "alternative label"@en ; + rdfs:subPropertyOf rdfs:label ; + skos:definition "An alternative lexical label for a resource."@en ; + skos:example "Acronyms, abbreviations, spelling variants, and irregular plural/singular forms may be included among the alternative labels for a concept. Mis-spelled terms are normally included as hidden labels (see skos:hiddenLabel)."@en . + +spdx:purpose_device rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package refers to a chipset, processor, or electronic board."@en ; + vs:term_status "stable"@en . + +spdx:standardLicenseHeader + rdf:type owl:DatatypeProperty ; + rdfs:comment "License author's preferred text to indicated that a file is covered by the license."@en ; + rdfs:domain spdx:License ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:locality rdf:type owl:DatatypeProperty ; + rdfs:comment "The locality (e.g. city or town) associated with the address of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "locality"@en ; + rdfs:range xsd:string . + +time:unitMonth rdf:type time:TemporalUnit ; + rdfs:label "Month (unit of temporal duration)"@en ; + skos:prefLabel "month"@en , "mese"@it , "mois"@fr , "mes"@es , "한달"@kr , "один месяц"@ru , "Monat"@de , "maand"@nl , "miesiąc"@pl , "一個月"@zh , "شهر واحد"@ar , "一か月"@jp ; + time:days "0"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "1"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +vcard:Coresident rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Coresident"@en ; + rdfs:subClassOf vcard:RelatedType . + + + rdf:type sh:NodeShape ; + sh:name "Agent"@en ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:type ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path foaf:name ; + sh:severity sh:Violation + ] ; + sh:targetClass foaf:Agent . + +prov:invalidated rdf:type owl:ObjectProperty ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "invalidated" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:influenced ; + owl:inverseOf prov:wasInvalidatedBy ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:editorialNote "prov:invalidated is one of few inverse property defined, to allow Activity-oriented assertions in addition to Entity-oriented assertions."@en ; + prov:inverse "wasInvalidatedBy" ; + prov:sharesDefinitionWith prov:Invalidation . + +vcard:Group rdf:type owl:Class ; + rdfs:comment "Object representing a group of persons or entities. A group object will usually contain hasMember properties to specify the members of the group."@en ; + rdfs:isDefinedBy ; + rdfs:label "Group"@en ; + rdfs:subClassOf vcard:Kind ; + owl:disjointWith vcard:Individual , vcard:Location , vcard:Organization ; + owl:equivalentClass [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:hasMember ; + owl:someValuesFrom vcard:Kind + ] + [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass vcard:Kind ; + owl:onProperty vcard:hasMember + ] + ) + ] . + +spdx:reviewed rdf:type owl:ObjectProperty , owl:NamedIndividual ; + rdfs:comment "This property has been deprecated since SPDX version 2.0. It has been replaced by an Annotation with an annotation type review."@en , "Reviewed" ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:Review ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +skos:scopeNote rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "scope note"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "A note that helps to clarify the meaning and/or the use of a concept."@en . + +prov:ActivityInfluence + rdf:type owl:Class ; + rdfs:comment "It is not recommended that the type ActivityInfluence be asserted without also asserting one of its more specific subclasses."@en , "ActivityInfluence provides additional descriptions of an Activity's binary influence upon any other kind of resource. Instances of ActivityInfluence use the prov:activity property to cite the influencing Activity."@en ; + rdfs:isDefinedBy ; + rdfs:label "ActivityInfluence" ; + rdfs:seeAlso prov:activity ; + rdfs:subClassOf prov:Influence ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty prov:hadActivity + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty prov:hadActivity + ] ; + owl:disjointWith prov:EntityInfluence ; + prov:category "qualified" ; + prov:editorsDefinition "ActivitiyInfluence is the capacity of an activity to have an effect on the character, development, or behavior of another by means of generation, invalidation, communication, or other."@en . + +spdx:annotationType_review + rdf:type owl:NamedIndividual , spdx:AnnotationType ; + rdfs:comment "A Review represents an audit and signoff by an individual, organization or tool on the information for an SpdxElement."@en ; + vs:term_status "stable"@en . + +spdx:order rdf:type owl:DatatypeProperty ; + rdfs:comment "The ordinal order of this element within a list"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:nonNegativeInteger . + +spdx:filesAnalyzed rdf:type owl:DatatypeProperty ; + rdfs:comment "Indicates whether the file content of this package has been available for or subjected to analysis when creating the SPDX document. If false indicates packages that represent metadata or URI references to a project, product, artifact, distribution or a component. If set to false, the package must not contain any files."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:boolean ; + vs:term_status "stable"@en . + +spdx:relationshipType_requirementDescriptionFor + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A describes, illustrates, or specifies a requirement statement for SPDXRef-B."@en ; + vs:term_status "stable"@en . + +vcard:RelatedType rdf:type owl:Class ; + rdfs:comment "Used for relation type codes. The URI of the relation type code must be used as the value for the Relation Type."@en ; + rdfs:isDefinedBy ; + rdfs:label "Relation Type"@en . + +prov:Invalidation rdf:type owl:Class ; + rdfs:comment "An instance of prov:Invalidation provides additional descriptions about the binary prov:wasInvalidatedBy relation from an invalidated prov:Entity to the prov:Activity that invalidated it. For example, :uncracked_egg prov:wasInvalidatedBy :baking; prov:qualifiedInvalidation [ a prov:Invalidation; prov:activity :baking; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Invalidation" ; + rdfs:subClassOf prov:ActivityInfluence , prov:InstantaneousEvent ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Invalidation is the start of the destruction, cessation, or expiry of an existing entity by an activity. The entity is no longer available for use (or further invalidation) after invalidation. Any generation or usage of an entity precedes its invalidation." ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Invalidation"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Invalidation"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasInvalidatedBy . + +[ owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger ] . + +vcard:Parcel rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Parcel"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + vs:term_status "stable" . + +vcard:Female rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Female"@en ; + rdfs:subClassOf vcard:Gender . + +skos:example rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "example"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "An example of the use of a concept."@en . + +spdx:annotationType rdf:type owl:ObjectProperty ; + rdfs:comment "Type of the annotation."@en ; + rdfs:domain spdx:Annotation ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Restriction ; + owl:hasValue spdx:annotationType_other ; + owl:onProperty spdx:annotationType + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:annotationType_review ; + owl:onProperty spdx:annotationType + ] + ) + ] ; + vs:term_status "stable"@en . + +prov:hadPrimarySource + rdf:type owl:ObjectProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "hadPrimarySource" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasDerivedFrom ; + owl:propertyChainAxiom ( prov:qualifiedPrimarySource prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedPrimarySource prov:entity ) ; + prov:category "expanded" ; + prov:component "derivations" ; + prov:inverse "wasPrimarySourceOf" ; + prov:qualifiedForm prov:PrimarySource , prov:qualifiedPrimarySource . + +prov:wasQuotedFrom rdf:type owl:ObjectProperty ; + rdfs:comment "An entity is derived from an original entity by copying, or 'quoting', some or all of it."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasQuotedFrom" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasDerivedFrom ; + owl:propertyChainAxiom ( prov:qualifiedQuotation prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedQuotation prov:entity ) ; + prov:category "expanded" ; + prov:component "derivations" ; + prov:inverse "quotedAs" ; + prov:qualifiedForm prov:qualifiedQuotation , prov:Quotation . + +dcterms:instructionalMethod + rdf:type rdf:Property ; + rdfs:comment "A process, used to engender knowledge, attitudes and skills, that the described resource is designed to support."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Instructional Method"@en ; + dcam:rangeIncludes dcterms:MethodOfInstruction ; + dcterms:description "Instructional Method typically includes ways of presenting instructional materials or conducting instructional activities, patterns of learner-to-learner and learner-to-instructor interactions, and mechanisms by which group and individual levels of learning are measured. Instructional methods include all aspects of the instruction and learning processes from planning and implementation through evaluation and feedback."@en ; + dcterms:issued "2005-06-13"^^xsd:date . + +skos:hasTopConcept rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:domain skos:ConceptScheme ; + rdfs:isDefinedBy ; + rdfs:label "has top concept"@en ; + rdfs:range skos:Concept ; + owl:inverseOf skos:topConceptOf ; + skos:definition "Relates, by convention, a concept scheme to a concept which is topmost in the broader/narrower concept hierarchies for that scheme, providing an entry point to these hierarchies."@en . + +prov:influencer rdf:type owl:ObjectProperty ; + rdfs:comment "Subproperties of prov:influencer are used to cite the object of an unqualified PROV-O triple whose predicate is a subproperty of prov:wasInfluencedBy (e.g. prov:used, prov:wasGeneratedBy). prov:influencer is used much like rdf:object is used."@en ; + rdfs:domain prov:Influence ; + rdfs:isDefinedBy ; + rdfs:label "influencer" ; + rdfs:range owl:Thing ; + prov:category "qualified" ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence"^^xsd:anyURI ; + prov:editorialNote "This property and its subproperties are used in the same way as the rdf:object property, i.e. to reference the object of an unqualified prov:wasInfluencedBy or prov:influenced triple."@en ; + prov:editorsDefinition "This property is used as part of the qualified influence pattern. Subclasses of prov:Influence use these subproperties to reference the resource (Entity, Agent, or Activity) whose influence is being qualified."@en ; + prov:inverse "hadInfluence" . + +time:timeZone rdf:type owl:ObjectProperty ; + rdfs:comment "The time zone for clock elements in the temporal position"@en ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "in time zone"@en , "en huso horario"@es ; + rdfs:range time:TimeZone ; + skos:historyNote "En la versión original de OWL-Time de 2006, el rango de 'en huso horario' se definió en un espacio de nombres diferente \"http://www.w3.org/2006/timezone#\".\n Un axioma de alineación permite que los datos codificados de acuerdo con la versión anterior sean consistentes con la ontología actualizada."@es , "In the original 2006 version of OWL-Time, the range of time:timeZone was a TimeZone class in a separate namespace \"http://www.w3.org/2006/timezone#\". \nAn alignment axiom \n\ttzont:TimeZone rdfs:subClassOf time:TimeZone . \nallows data encoded according to the previous version to be consistent with the updated ontology. " ; + skos:note "IANA maintains a database of timezones. These are well maintained and generally considered authoritative, but individual items are not available at individual URIs, so cannot be used directly in data expressed using OWL-Time.\n\nDBPedia provides a set of resources corresponding to the IANA timezones, with a URI for each (e.g. http://dbpedia.org/resource/Australia/Eucla). The World Clock service also provides a list of time zones with the description of each available as an individual webpage with a convenient individual URI (e.g. https://www.timeanddate.com/time/zones/acwst). These or other, similar, resources might be used as a value of the time:timeZone property." , "IANA mantiene una base de datos de husos horarios. Éstas están bien mantenidas y generalmente se consideran autorizadas, pero los ítems individuales no están disponibles en URIs individuales, por tanto, no se pueden utilizar directamente en datos expresados utilizando OWL-Time.\n La BDPedia proporciona un conjunto de recursos correspondientes a los husos horarios de IANA, con una URI para cada uno (por ejemplo, http://dbpedia.org/resource/Australia/Eucla). El Servicio de Reloj Mundial también proporciona una lista de husos horarios con la descripción de cada uno de los disponibles como una página Web individual con una URI adecuada individual (por ejemplo, https://www.timeanddate.com/time/zones/acwst). Éstos, y otros recursos similares, se puden usar como un valor de la propiedad 'huso horario'."@es . + +locn:locatorName rdf:type rdf:Property ; + rdfs:comment "Proper noun(s) applied to the real world entity identified by the locator. The locator name could be the name of the property or complex, of the building or part of the building, or it could be the name of a room inside a building. \n "@en ; + rdfs:isDefinedBy ; + rdfs:label "locator name"@en ; + dcterms:identifier "locn:locatorName" ; + vs:term_status "testing"@en . + +spdx:referenceCategory_persistentId + rdf:type owl:NamedIndividual , spdx:ReferenceCategory ; + rdfs:comment "These point to objects present in the Software Heritage archive by the means of persistent identifiers that are guaranteed to remain stable (persistent) over time."@en ; + vs:term_status "stable"@en . + +dcterms:references rdf:type rdf:Property ; + rdfs:comment "A related resource that is referenced, cited, or otherwise pointed to by the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "References"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Referenced By."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:relatedSpdxElement + rdf:type owl:ObjectProperty ; + rdfs:comment "A related SpdxElement."@en ; + rdfs:domain spdx:Relationship ; + rdfs:range spdx:SpdxElement ; + vs:term_status "stable"@en . + +prov:hadRole rdf:type owl:ObjectProperty ; + rdfs:comment "The _optional_ Role that an Entity assumed in the context of an Activity. For example, :baking prov:used :spoon; prov:qualified [ a prov:Usage; prov:entity :spoon; prov:hadRole roles:mixing_implement ]."@en , "This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile." ; + rdfs:domain prov:Influence ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Association prov:InstantaneousEvent ) + ] ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Association prov:InstantaneousEvent ) + ] ; + rdfs:isDefinedBy ; + rdfs:label "hadRole" ; + rdfs:range prov:Role ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:editorsDefinition "prov:hadRole references the Role (i.e. the function of an entity with respect to an activity), in the context of an instantaneous usage, generation, association, start, and end."@en ; + prov:inverse "wasRoleIn" ; + prov:sharesDefinitionWith prov:Role . + +dcterms:MethodOfAccrual + rdf:type rdfs:Class ; + rdfs:comment "A method by which resources are added to a collection."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Method of Accrual"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:Sweetheart rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Sweetheart"@en ; + rdfs:subClassOf vcard:RelatedType . + +prov:wasInfluencedBy rdf:type owl:ObjectProperty ; + rdfs:comment "This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile." , "Because prov:wasInfluencedBy is a broad relation, its more specific subproperties (e.g. prov:wasInformedBy, prov:actedOnBehalfOf, prov:wasEndedBy, etc.) should be used when applicable."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + rdfs:isDefinedBy ; + rdfs:label "wasInfluencedBy" ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:editorialNote "The sub-properties of prov:wasInfluencedBy can be elaborated in more detail using the Qualification Pattern. For example, the binary relation :baking prov:used :spoon can be qualified by asserting :baking prov:qualifiedUsage [ a prov:Usage; prov:entity :spoon; prov:atLocation :kitchen ] .\n\nSubproperties of prov:wasInfluencedBy may also be asserted directly without being qualified.\n\nprov:wasInfluencedBy should not be used without also using one of its subproperties. \n"@en ; + prov:inverse "influenced" ; + prov:qualifiedForm prov:Influence , prov:qualifiedInfluence ; + prov:sharesDefinitionWith prov:Influence . + +time:Thursday rdf:type time:DayOfWeek ; + rdfs:label "Thursday"@en ; + skos:prefLabel "Четверг"@ru , "الخميس"@ar , "Donnerstag"@de , "Czwartek"@pl , "Donderdag"@nl , "Jeudi"@fr , "Quinta-feira"@pt , "Jueves"@es , "星期四"@zh , "Thursday"@en , "木曜日"@ja , "Giovedì"@it . + +time:inXSDDateTime rdf:type owl:DeprecatedProperty , owl:DatatypeProperty ; + rdfs:comment "Posición de un instante, expresado utilizando xsd:dateTime."@es , "Position of an instant, expressed using xsd:dateTime"@en ; + rdfs:domain time:Instant ; + rdfs:label "en fecha-tiempo XSD"@es , "in XSD Date-Time"@en ; + rdfs:range xsd:dateTime ; + owl:deprecated true ; + skos:definition "Posición de un instante, expresado utilizando xsd:dateTime."@es , "Position of an instant, expressed using xsd:dateTime"@en ; + skos:note "La propiedad 'en fecha-hora XSD' ha sido reemplazada por 'en fecha-sello de tiempo XSD' que hace obligatorio el campo 'huso horario'."@es , "The property :inXSDDateTime is replaced by :inXSDDateTimeStamp which makes the time-zone field mandatory."@en . + +dcterms:LinguisticSystem + rdf:type rdfs:Class ; + rdfs:comment "A system of signs, symbols, sounds, gestures, or rules used in communication."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Linguistic System"@en ; + dcterms:description "Written, spoken, sign, and computer languages are linguistic systems."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:ConjunctiveLicenseSet + rdf:type owl:Class ; + rdfs:comment "A ConjunctiveLicenseSet represents a set of licensing information all of which apply."@en ; + rdfs:subClassOf spdx:AnyLicenseInfo , rdfs:Container ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "2"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:member + ] ; + vs:term_status "stable"@en . + +vcard:Label rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Label"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +vcard:logo rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "logo"@en ; + owl:equivalentProperty vcard:hasLogo . + +spdx:snippetName rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify a specific snippet in a human convenient manner."@en ; + rdfs:domain spdx:Snippet ; + rdfs:range xsd:string ; + rdfs:subPropertyOf spdx:name ; + vs:term_status "stable"@en . + +spdx:relationshipType_exampleOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is an example of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +dcat:downloadURL rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "URL til fil der kan downloades i et bestemt format. Fx en CSV-fil eller en RDF-fil. Formatet for distributionen angives ved hjælp af egenskaberne dct:format og/eller dcat:mediaType."@da , "The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dct:format and/or dcat:mediaType."@en , "Ceci est un lien direct à un fichier téléchargeable en un format donnée. Exple fichier CSV ou RDF. Le format est décrit par les propriétés de distribution dct:format et/ou dcat:mediaType."@fr , "La URL de un archivo descargable en el formato dato. Por ejemplo, archivo CSV o archivo RDF. El formato se describe con las propiedades de la distribución dct:format y/o dcat:mediaType."@es , "Questo è un link diretto al file scaricabile in un dato formato. E.g. un file CSV o un file RDF. Il formato è descritto dal dct:format e/o dal dcat:mediaType della distribuzione."@it , "URL souboru ke stažení v daném formátu, například CSV nebo RDF soubor. Formát je popsán vlastností distribuce dct:format a/nebo dcat:mediaType."@cs , "dcat:downloadURLはdcat:accessURLの特定の形式です。しかし、DCATプロファイルが非ダウンロード・ロケーションに対してのみaccessURLを用いる場合には、より強い分離を課すことを望む可能性があるため、この含意を強化しないように、DCATは、dcat:downloadURLをdcat:accessURLのサブプロパティーであると定義しません。"@ja , "Είναι ένας σύνδεσμος άμεσης μεταφόρτωσης ενός αρχείου σε μια δεδομένη μορφή. Π.χ. ένα αρχείο CSV ή RDF. Η μορφη αρχείου περιγράφεται από τις ιδιότητες dct:format ή/και dcat:mediaType της διανομής."@el , "رابط مباشر لملف يمكن تحميله. نوع الملف يتم توصيفه باستخدام الخاصية dct:format dcat:mediaType "@ar ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "downloadURL"@da , "URL de descarga"@es , "رابط تحميل"@ar , "ダウンロードURL"@ja , "URL di scarico"@it , "download URL"@en , "URL souboru ke stažení"@cs , "URL μεταφόρτωσης"@el , "URL de téléchargement"@fr ; + rdfs:range rdfs:Resource ; + skos:definition "Ceci est un lien direct à un fichier téléchargeable en un format donnée. Exple fichier CSV ou RDF. Le format est décrit par les propriétés de distribution dct:format et/ou dcat:mediaType."@fr , "dcat:downloadURLはdcat:accessURLの特定の形式です。しかし、DCATプロファイルが非ダウンロード・ロケーションに対してのみaccessURLを用いる場合には、より強い分離を課すことを望む可能性があるため、この含意を強化しないように、DCATは、dcat:downloadURLをdcat:accessURLのサブプロパティーであると定義しません。"@ja , "Questo è un link diretto al file scaricabile in un dato formato. E.g. un file CSV o un file RDF. Il formato è descritto dal dct:format e/o dal dcat:mediaType della distribuzione."@it , "The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dct:format and/or dcat:mediaType."@en , "رابط مباشر لملف يمكن تحميله. نوع الملف يتم توصيفه باستخدام الخاصية dct:format dcat:mediaType "@ar , "URL souboru ke stažení v daném formátu, například CSV nebo RDF soubor. Formát je popsán vlastností distribuce dct:format a/nebo dcat:mediaType."@cs , "Είναι ένας σύνδεσμος άμεσης μεταφόρτωσης ενός αρχείου σε μια δεδομένη μορφή. Π.χ. ένα αρχείο CSV ή RDF. Η μορφη αρχείου περιγράφεται από τις ιδιότητες dct:format ή/και dcat:mediaType της διανομής."@el , "URL til fil der kan downloades i et bestemt format. Fx en CSV-fil eller en RDF-fil. Formatet for distributionen angives ved hjælp af egenskaberne dct:format og/eller dcat:mediaType."@da , "La URL de un archivo descargable en el formato dato. Por ejemplo, archivo CSV o archivo RDF. El formato se describe con las propiedades de la distribución dct:format y/o dcat:mediaType."@es ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translation updated, other translations pending."@en , "rdfs:label, rdfs:comment and/or skos:scopeNote have been modified. Non-english versions must be updated."@en ; + skos:scopeNote "La valeur est une URL."@fr , "dcat:downloadURL SHOULD be used for the address at which this distribution is available directly, typically through a HTTP Get request."@en , "dcat:downloadURL BY MĚLA být použita pro adresu, ze které je distribuce přímo přístupná, typicky skrze požadavek HTTP Get."@cs , "dcat:downloadURL DOVREBBE essere utilizzato per l'indirizzo a cui questa distribuzione è disponibile direttamente, in genere attraverso una richiesta Get HTTP."@it , "El valor es una URL."@es , "dcat:downloadURL BØR anvendes til angivelse af den adresse hvor distributionen er tilgængelig direkte, typisk gennem et HTTP Get request."@da , "Η τιμή είναι ένα URL."@el . + +spdx:referenceType rdf:type owl:ObjectProperty ; + rdfs:comment "Type of the external reference. These are definined in an appendix in the SPDX specification."@en ; + rdfs:domain spdx:ExternalRef ; + rdfs:range spdx:ReferenceType ; + vs:term_status "stable"@en . + +adms:translation rdf:type owl:ObjectProperty ; + rdfs:comment "Links Assets that are translations of each other."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "translation"@en ; + rdfs:range rdfs:Resource . + +vcard:tel rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "telephone"@en ; + owl:equivalentProperty vcard:hasTelephone . + +spdx:referenceCategory_packageManager + rdf:type owl:NamedIndividual , spdx:ReferenceCategory ; + vs:term_status "stable"@en . + +spdx:RelationshipType + rdf:type owl:Class ; + rdfs:comment "Type of relationship."@en ; + vs:term_status "stable"@en . + +vcard:hasRole rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the role data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has role"@en . + +dcterms:relation rdf:type rdf:Property ; + rdfs:comment "A related resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Relation"@en ; + rdfs:subPropertyOf dc:relation ; + dcterms:description "Recommended practice is to identify the related resource by means of a URI. If this is not possible or feasible, a string conforming to a formal identification system may be provided."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcat:landingPage rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Una página web que puede ser visitada en un explorador Web para tener acceso el catálogo, un conjunto de datos, sus distribuciones y/o información adicional."@es , "A Web page that can be navigated to in a Web browser to gain access to the catalog, a dataset, its distributions and/or additional information."@en , "Μία ιστοσελίδα πλοηγίσιμη μέσω ενός φυλλομετρητή (Web browser) που δίνει πρόσβαση στο σύνολο δεδομένων, τις διανομές αυτού ή/και επιπρόσθετες πληροφορίες."@el , "Una pagina web che può essere navigata per ottenere l'accesso al catalogo, ad un dataset, alle distribuzioni del dataset e/o ad informazioni addizionali."@it , "データセット、その配信および(または)追加情報にアクセスするためにウエブ・ブラウザでナビゲートできるウェブページ。"@ja , "صفحة وب يمكن من خلالها الوصول الى قائمة البيانات أو إلى معلومات إضافية متعلقة بها "@ar , "En webside som der kan navigeres til i en webbrowser for at få adgang til kataloget, et datasæt, dets distributioner og/eller yderligere information."@da , "Webová stránka, na kterou lze pro získání přístupu ke katalogu, datové sadě, jejím distribucím a/nebo dalším informacím přistoupit webovým prohlížečem."@cs , "Une page Web accessible par un navigateur Web donnant accès au catalogue, un jeu de données, ses distributions et/ou des informations additionnelles."@fr ; + rdfs:isDefinedBy ; + rdfs:label "página de destino"@es , "landing page"@en , "vstupní stránka"@cs , "destinationsside"@da , "ランディング・ページ"@ja , "page d'atterrissage"@fr , "صفحة وصول"@ar , "pagina di destinazione"@it , "ιστοσελίδα αρχικής πρόσβασης"@el ; + rdfs:range foaf:Document ; + rdfs:subPropertyOf foaf:page ; + skos:definition "Una pagina web che può essere navigata per ottenere l'accesso al catalogo, ad un dataset, alle distribuzioni del dataset e/o ad informazioni addizionali."@it , "A Web page that can be navigated to in a Web browser to gain access to the catalog, a dataset, its distributions and/or additional information."@en , "Una página web que puede ser visitada en un explorador Web para tener acceso el catálogo, un conjunto de datos, sus distribuciones y/o información adicional."@es , "صفحة وب يمكن من خلالها الوصول الى قائمة البيانات أو إلى معلومات إضافية متعلقة بها "@ar , "En webside som en webbrowser kan navigeres til for at få adgang til kataloget, et datasæt, dets distritbutioner og/eller yderligere information."@da , "Webová stránka, na kterou lze pro získání přístupu ke katalogu, datové sadě, jejím distribucím a/nebo dalším informacím přistoupit webovým prohlížečem."@cs , "Μία ιστοσελίδα πλοηγίσιμη μέσω ενός φυλλομετρητή (Web browser) που δίνει πρόσβαση στο σύνολο δεδομένων, τις διανομές αυτού ή/και επιπρόσθετες πληροφορίες."@el , "Une page Web accessible par un navigateur Web donnant accès au catalogue, un jeu de données, ses distributions et/ou des informations additionnelles."@fr , "データセット、その配信および(または)追加情報にアクセスするためにウエブ・ブラウザでナビゲートできるウェブページ。"@ja ; + skos:scopeNote "Hvis en eller flere distributioner kun er tilgængelige via en destinationsside (dvs. en URL til direkte download er ikke kendt), så bør destinationssidelinket gentages som adgangsadresse for en distribution."@da , "Si la distribución es accesible solamente través de una página de aterrizaje (i.e., no se conoce una URL de descarga directa), entonces el enlance a la página de aterrizaje debe ser duplicado como accessURL sobre la distribución."@es , "ランディング・ページを通じてしか配信にアクセスできない場合(つまり、直接的なダウンロードURLが不明)には、配信におけるaccessURLとしてランディング・ページのリンクをコピーすべきです(SHOULD)。"@ja , "If the distribution(s) are accessible only through a landing page (i.e. direct download URLs are not known), then the landing page link should be duplicated as accessURL on a distribution."@en , "Αν η/οι διανομή/ές είναι προσβάσιμη/ες μόνο μέσω μίας ιστοσελίδας αρχικής πρόσβασης (δηλαδή αν δεν υπάρχουν γνωστές διευθύνσεις άμεσης μεταφόρτωσης), τότε ο σύνδεσμος της ιστοσελίδας αρχικής πρόσβασης πρέπει να αναπαραχθεί ως accessURL σε μία διανομή."@el , "Pokud je distribuce dostupná pouze přes vstupní stránku, t.j. přímý URL odkaz ke stažení není znám, URL přístupové stránky by mělo být duplikováno ve vlastnosti distribuce accessURL."@cs , "Se la distribuzione è accessibile solo attraverso una pagina di destinazione (cioè, un URL di download diretto non è noto), il link alla pagina di destinazione deve essere duplicato come accessURL sulla distribuzione."@it , "Si la distribution est seulement accessible à travers une page d'atterrissage (exple. pas de connaissance d'URLS de téléchargement direct ), alors le lien de la page d'atterrissage doit être dupliqué comme accessURL sur la distribution."@fr . + +dcterms:RFC1766 rdf:type rdfs:Datatype ; + rdfs:comment "The set of tags, constructed according to RFC 1766, for the identification of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "RFC 1766"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +dcat:Role rdf:type owl:Class ; + rdfs:comment "Role je funkce zdroje či agenta ve vztahu k jinému zdroji, v kontextu přiřazení zdrojů či vztahů mezi zdroji."@cs , "A role is the function of a resource or agent with respect to another resource, in the context of resource attribution or resource relationships."@en , "Un rol es la función de un recurso o agente con respecto a otro recuros, en el contexto de atribución del recurso o de las relaciones entre recursos."@es , "En rolle er den funktion en ressource eller aktør har i forhold til en anden ressource, i forbindelse med ressourcekreditering eller ressourcerelationer."@da , "Un ruolo è la funzione di una risorsa o di un agente rispetto ad un'altra risorsa, nel contesto dell'attribuzione delle risorse o delle relazioni tra risorse."@it ; + rdfs:label "Rolle"@da , "Rol"@es , "Ruolo"@it , "Role"@cs , "Role"@en ; + rdfs:seeAlso dcat:hadRole ; + rdfs:subClassOf skos:Concept ; + skos:changeNote "Nuova classe aggiunta in DCAT 2.0."@it , "Nueva clase agregada en DCAT 2.0."@es , "Ny klasse tilføjet i DCAT 2.0."@en , "Nová třída přidaná ve verzi DCAT 2.0."@cs , "New class added in DCAT 2.0."@en ; + skos:definition "Role je funkce zdroje či agenta ve vztahu k jinému zdroji, v kontextu přiřazení zdrojů či vztahů mezi zdroji."@cs , "A role is the function of a resource or agent with respect to another resource, in the context of resource attribution or resource relationships."@en , "Un ruolo è la funzione di una risorsa o di un agente rispetto ad un'altra risorsa, nel contesto dell'attribuzione delle risorse o delle relazioni tra risorse."@it , "Un rol es la función de un recurso o agente con respecto a otro recuros, en el contexto de atribución del recurso o de las relaciones entre recursos."@es , "En rolle er den funktion en ressource eller aktør har i forhold til en anden ressource, i forbindelse med ressourcekreditering eller ressourcerelationer."@da ; + skos:editorialNote "Introduced into DCAT to complement prov:Role (whose use is limited to roles in the context of an activity, as the range of prov:hadRole)."@en , "Introduceret i DCAT for at supplere prov:Role (hvis anvendelse er begrænset til roller i forbindelse med en aktivitet, som er rækkevidde for prov:hadRole)."@da , "Přidáno do DCAT pro doplnění třídy prov:Role (jejíž užití je omezeno na role v kontextu aktivit, jakožto obor hodnot vlastnosti prov:hadRole)."@cs , "Introdotta in DCAT per completare prov:Role (il cui uso è limitato ai ruoli nel contesto di un'attività, in conseguenza alla definizione del codominio di prov:hadRole)."@it , "Incluída en DCAT para complementar prov:Role (cuyo uso está limitado a roles en el contexto de una actividad, ya que es el rango es prov:hadRole)."@es ; + skos:scopeNote "Se usa en una relación cualificada para especificar el rol de una Entidad con respecto a otra Entidad. Se recomienda que los valores se administren como los valores de un vocabulario controlado de roles de entidad como por ejemplo: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; el esquema de metadatos de DataCite; MARC relators https://id.loc.gov/vocabulary/relators."@es , "Used in a qualified-attribution to specify the role of an Agent with respect to an Entity. It is recommended that the values be managed as a controlled vocabulary of agent roles, such as http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@en , "Anvendes i forbindelse med kvalificerede krediteringer til at angive aktørens rolle i forhold til en entitet. Det anbefales at værdierne styres som et kontrolleret udfaldsrum med aktørroller, såsom http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@da , "Used in a qualified-relation to specify the role of an Entity with respect to another Entity. It is recommended that the values be managed as a controlled vocabulary of entity roles such as: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@en , "Použito v kvalifikovaném přiřazení pro specifikaci role Agenta ve vztahu k Entitě. Je doporučeno množinu hodnot spravovat jako řízený slovník rolí agentů, jako například http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@cs , "Se usa en una atribución cualificada para especificar el rol de un Agente con respecto a una Entidad. Se recomienda que los valores se administren como un vocabulario controlado de roles de agente, como por ejemplo http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@es , "Utilizzato in un'attribuzione qualificata per specificare il ruolo di un agente rispetto a un'entità. Si consiglia di attribuire i valori considerando un vocabolario controllato dei ruoli dell'agente, ad esempio http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@it , "Utilizzato in una relazione qualificata per specificare il ruolo di un'entità rispetto a un'altra entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di entità come ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, o MARC relators https://id.loc.gov/vocabulary/relators."@it , "Anvendes i forbindelse med kvalificerede relationer til at specificere en entitets rolle i forhold til en anden entitet. Det anbefales at værdierne styres med et kontrolleret udfaldsrum for for entitetsroller såsom: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@da , "Použito v kvalifikovaném vztahu pro specifikaci role Entity ve vztahu k jiné Entitě. Je doporučeno množinu hodnot spravovat jako řízený slovník rolí entit, jako například ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, či MARC relators https://id.loc.gov/vocabulary/relators."@cs . + +dcterms:available rdf:type rdf:Property ; + rdfs:comment "Date that the resource became or will become available."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Available"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +time:hours rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in hours"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en horas."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "hours duration"@en , "duración en horas"@es ; + rdfs:range xsd:decimal ; + skos:definition "length of, or element of the length of, a temporal extent expressed in hours"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en horas."@es . + +spdx:checksumAlgorithm_md5 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was MD5"@en ; + vs:term_status "stable"@en . + +spdx:member rdf:type owl:ObjectProperty ; + rdfs:comment "A license, or other licensing information, that is a member of the subject license set."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( spdx:ConjunctiveLicenseSet spdx:DisjunctiveLicenseSet spdx:WithExceptionOperator ) + ] ; + rdfs:range spdx:AnyLicenseInfo ; + vs:term_status "stable"@en . + +spdx:checksumAlgorithm_blake3 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was BLAKE3."@en ; + vs:term_status "stable"@en . + +doap:homepage rdf:type owl:DatatypeProperty ; + rdfs:domain spdx:Package ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +time:TemporalEntity rdf:type owl:Class ; + rdfs:comment "A temporal interval or instant."@en , "Un intervalo temporal o un instante."@es ; + rdfs:label "Temporal entity"@en , "entidad temporal"@es ; + rdfs:subClassOf owl:Thing ; + owl:unionOf ( time:Instant time:Interval ) ; + skos:definition "A temporal interval or instant."@en , "Un intervalo temporal o un instante."@es . + +vcard:Spouse rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Spouse"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:validUntilDate rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for recording the end of the support period for a package from the supplier."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:dateTime ; + rdfs:subPropertyOf spdx:date ; + vs:term_status "stable"@en . + +time:minute rdf:type owl:DatatypeProperty ; + rdfs:comment "Minute position in a calendar-clock system."@en , "Posición de minuto en un sistema calendario-reloj."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "minute"@en , "minuto"@es ; + rdfs:range xsd:nonNegativeInteger ; + skos:definition "Minute position in a calendar-clock system."@en , "Posición de minuto en un sistema calendario-reloj."@es . + +spdx:isLive rdf:type owl:DatatypeProperty ; + rdfs:comment "Indicate a URL is still a live accessible location on the public internet"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:boolean . + +dcterms:educationLevel + rdf:type rdf:Property ; + rdfs:comment "A class of agents, defined in terms of progression through an educational or training context, for which the described resource is intended."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Audience Education Level"@en ; + rdfs:subPropertyOf dcterms:audience ; + dcam:rangeIncludes dcterms:AgentClass ; + dcterms:issued "2002-07-13"^^xsd:date . + +vcard:key rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "key"@en ; + owl:equivalentProperty vcard:hasKey . + +skos:prefLabel rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:comment "A resource has no more than one value of skos:prefLabel per language tag, and no more than one value of skos:prefLabel without language tag."@en , "The range of skos:prefLabel is the class of RDF plain literals."@en , "skos:prefLabel, skos:altLabel and skos:hiddenLabel are pairwise\n disjoint properties."@en ; + rdfs:isDefinedBy ; + rdfs:label "preferred label"@en ; + rdfs:subPropertyOf rdfs:label ; + skos:definition "The preferred lexical label for a resource, in a given language."@en . + +dcterms:valid rdf:type rdf:Property ; + rdfs:comment "Date (often a range) of validity of a resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Valid"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:hasGender rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the sex or gender identity of the object. URIs are recommended to enable interoperable sex and gender codes to be used."@en ; + rdfs:isDefinedBy ; + rdfs:label "has gender"@en . + +vcard:Individual rdf:type owl:Class ; + rdfs:comment "An object representing a single person or entity"@en ; + rdfs:isDefinedBy ; + rdfs:label "Individual"@en ; + rdfs:subClassOf vcard:Kind ; + owl:disjointWith vcard:Location , vcard:Organization . + +dcterms:DDC rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of conceptual resources specified by the Dewey Decimal Classification."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "DDC"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:standardLicenseHeaderTemplate + rdf:type owl:DatatypeProperty ; + rdfs:comment "License template which describes sections of the license header which can be varied. See License Template section of the specification for format information."@en ; + rdfs:domain spdx:License ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:label rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "label"@en ; + owl:deprecated true . + +vcard:Muse rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Muse"@en ; + rdfs:subClassOf vcard:RelatedType . + +vcard:Email rdf:type owl:Class ; + rdfs:comment "To specify the electronic mail address for communication with the object the vCard represents. Use the hasEmail object property."@en ; + rdfs:isDefinedBy ; + rdfs:label "Email"@en ; + owl:deprecated true . + +[ rdf:type owl:Axiom ; + rdfs:comment "Revision is a derivation (see http://www.w3.org/TR/prov-dm/#term-Revision). Moreover, according to \nhttp://www.w3.org/TR/2013/REC-prov-constraints-20130430/#term-Revision 23 April 2012 'wasRevisionOf is a strict sub-relation of wasDerivedFrom since two entities e2 and e1 may satisfy wasDerivedFrom(e2,e1) without being a variant of each other.'" ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasRevisionOf ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +spdx:packageVerificationCode + rdf:type owl:ObjectProperty ; + rdfs:comment "A manifest based verification code (the algorithm is defined in section 3.9.4 of the full specification) of the package. This allows consumers of this data and/or database to determine if a package they have in hand is identical to the package from which the data was produced. This algorithm works even if the SPDX document is included in the package."@en ; + rdfs:domain spdx:Package ; + rdfs:range spdx:PackageVerificationCode ; + vs:term_status "stable"@en . + +locn:adminUnitL1 rdf:type rdf:Property ; + rdfs:comment "The uppermost administrative unit for the address, almost always a country. The domain of locn:adminUnitL1 is locn:Address and the range is a literal, conceptually defined by the INSPIRE Geographical Name data type."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "admin unit level 1"@en ; + dcterms:identifier "locn:adminUnitL1" ; + vann:usageNote "Best practice is to use the ISO 3166-1 code but if this is inappropriate for the context, country names should be provided in a consistent manner to reduce ambiguity. For example, either write 'United Kingdom' or 'UK' consistently throughout the data set and avoid mixing the two."@en ; + vs:term_status "testing"@en . + +dcat:Dataset rdf:type rdfs:Class , owl:Class ; + rdfs:comment "1つのエージェントによって公開またはキュレートされ、1つ以上の形式でアクセスまたはダウンロードできるデータの集合。"@ja , "Raccolta di dati, pubblicati o curati da un'unica fonte, disponibili per l'accesso o il download in uno o più formati."@it , "Μία συλλογή από δεδομένα, δημοσιευμένη ή επιμελημένη από μία και μόνο πηγή, διαθέσιμη δε προς πρόσβαση ή μεταφόρτωση σε μία ή περισσότερες μορφές."@el , "قائمة بيانات منشورة أو مجموعة من قبل مصدر ما و متاح الوصول إليها أو تحميلها"@ar , "A collection of data, published or curated by a single source, and available for access or download in one or more representations."@en , "Une collection de données, publiée ou élaborée par une seule source, et disponible pour accès ou téléchargement dans un ou plusieurs formats."@fr , "Kolekce dat poskytovaná či řízená jedním zdrojem, která je k dispozici pro přístup či stažení v jednom či více formátech."@cs , "Una colección de datos, publicados o conservados por una única fuente, y disponibles para ser accedidos o descargados en uno o más formatos."@es , "En samling af data, udgivet eller udvalgt og arrangeret af en enkelt kilde og som er til råde for adgang til eller download af i en eller flere repræsentationer."@da ; + rdfs:isDefinedBy ; + rdfs:label "データセット"@ja , "Dataset"@en , "Dataset"@it , "قائمة بيانات"@ar , "Conjunto de datos"@es , "Σύνολο Δεδομένων"@el , "Jeu de données"@fr , "Datová sada"@cs , "Datasæt"@da ; + rdfs:subClassOf dcat:Resource ; + skos:altLabel "Datasamling"@da ; + skos:changeNote "2018-02 - subklasse af dctype:Dataset fjernet da scope af dcat:Dataset omfatter flere forskellige typer fra dctype-vokabularet."@da , "2018-02 - odstraněno tvrzení o podtřídě dctype:Dataset, jelikož rozsah dcat:Dataset zahrnuje několik dalších typů ze slovníku dctype."@cs , "2018-02 - subclass of dctype:Dataset removed because scope of dcat:Dataset includes several other types from the dctype vocabulary."@en , "2018-02 - se eliminó el axioma de subclase con dctype:Dataset porque el alcance de dcat:Dataset incluye muchos otros tipos del vocabulario dctype."@es , "2018-02 - sottoclasse di dctype:Dataset rimosso perché l'ambito di dcat:Dataset include diversi altri tipi dal vocabolario dctype."@it ; + skos:definition "Une collection de données, publiée ou élaborée par une seule source, et disponible pour accès ou téléchargement dans un ou plusieurs formats."@fr , "قائمة بيانات منشورة أو مجموعة من قبل مصدر ما و متاح الوصول إليها أو تحميلها"@ar , "Una colección de datos, publicados o conservados por una única fuente, y disponibles para ser accedidos o descargados en uno o más formatos."@es , "Raccolta di dati, pubblicati o curati da un'unica fonte, disponibili per l'accesso o il download in uno o più formati."@it , "Kolekce dat poskytovaná či řízená jedním zdrojem, která je k dispozici pro přístup či stažení v jednom či více formátech."@cs , "En samling a data, udgivet eller udvalgt og arrangeret af en enkelt kilde og som der er adgang til i en eller flere repræsentationer."@da , "1つのエージェントによって公開またはキュレートされ、1つ以上の形式でアクセスまたはダウンロードできるデータの集合。"@ja , "Μία συλλογή από δεδομένα, δημοσιευμένη ή επιμελημένη από μία και μόνο πηγή, διαθέσιμη δε προς πρόσβαση ή μεταφόρτωση σε μία ή περισσότερες μορφές."@el , "A collection of data, published or curated by a single source, and available for access or download in one or more represenations."@en ; + skos:editorialNote "2020-03-16 A new scopenote added and need to be translated"@en ; + skos:scopeNote "Questa classe rappresenta il dataset come pubblicato dall’editore. Nel caso in cui sia necessario operare una distinzione fra i metadati originali del dataset e il record dei metadati ad esso associato nel catalogo (ad esempio, per distinguere la data di modifica del dataset da quella del dataset nel catalogo) si può impiegare la classe catalog record."@it , "Cette classe représente le jeu de données publié par le fournisseur de données. Dans les cas où une distinction est nécessaire entre le jeu de donénes et son entrée dans le catalogue, la classe registre de données peut être utilisée pour ce dernier."@fr , "Esta clase representa el conjunto de datos publicados. En los casos donde es necesario distinguir entre el conjunto de datos y su entrada en el catálogo de datos, se debe utilizar la clase 'registro del catálogo'."@es , "Η κλάση αυτή αναπαριστά το σύνολο δεδομένων αυτό καθ'εαυτό, όπως έχει δημοσιευθεί από τον εκδότη. Σε περιπτώσεις όπου είναι απαραίτητος ο διαχωρισμός μεταξύ του συνόλου δεδομένων και της καταγραφής αυτού στον κατάλογο (γιατί μεταδεδομένα όπως η ημερομηνία αλλαγής και ο συντηρητής μπορεί να διαφέρουν) η κλάση της καταγραφής καταλόγου μπορεί να χρησιμοποιηθεί για το τελευταίο."@el , "This class represents the actual dataset as published by the dataset provider. In cases where a distinction between the actual dataset and its entry in the catalog is necessary (because metadata such as modification date and maintainer might differ), the catalog record class can be used for the latter."@en , "Tato třída reprezentuje datovou sadu tak, jak je publikována poskytovatelem dat. V případě potřeby rozlišení datové sady a jejího katalogizačního záznamu (jelikož metadata jako datum modifikace se mohou lišit) pro něj může být použita třída \"katalogizační záznam\"."@cs , "Questa classe descrive il dataset dal punto di vista concettuale. Possono essere disponibili una o più rappresentazioni, con diversi layout e formati schematici o serializzazioni."@it , "This class describes the conceptual dataset. One or more representations might be available, with differing schematic layouts and formats or serializations."@en , "このクラスは、データセットの公開者が公開する実際のデータセットを表わします。カタログ内の実際のデータセットとそのエントリーとの区別が必要な場合(修正日と維持者などのメタデータが異なるかもしれないので)は、後者にcatalog recordというクラスを使用できます。"@ja , "The notion of dataset in DCAT is broad and inclusive, with the intention of accommodating resource types arising from all communities. Data comes in many forms including numbers, text, pixels, imagery, sound and other multi-media, and potentially other types, any of which might be collected into a dataset."@en , "Denne klasse beskriver det konceptuelle datasæt. En eller flere repræsentationer kan være tilgængelige med forskellige skematiske opsætninger, formater eller serialiseringer."@da , "Denne klasse repræsenterer det konkrete datasæt som det udgives af datasætleverandøren. I de tilfælde hvor det er nødvendigt at skelne mellem det konkrete datasæt og dets registrering i kataloget (fordi metadata såsom ændringsdato og vedligeholder er forskellige), så kan klassen katalogpost anvendes. "@da . + +dcterms:description rdf:type rdf:Property ; + rdfs:comment "An account of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Description"@en ; + rdfs:subPropertyOf dc:description ; + dcterms:description "Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:Contact rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Contact"@en ; + rdfs:subClassOf vcard:RelatedType . + +dcterms:issued rdf:type rdf:Property ; + rdfs:comment "Date of formal issuance of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Issued"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +[ rdf:type owl:Axiom ; + owl:annotatedProperty rdfs:domain ; + owl:annotatedSource prov:wasInfluencedBy ; + owl:annotatedTarget [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + prov:definition "influencee: an identifier (o2) for an entity, activity, or agent; " ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence" +] . + +skos:mappingRelation rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "These concept mapping relations mirror semantic relations, and the data model defined below is similar (with the exception of skos:exactMatch) to the data model defined for semantic relations. A distinct vocabulary is provided for concept mapping relations, to provide a convenient way to differentiate links within a concept scheme from links between concept schemes. However, this pattern of usage is not a formal requirement of the SKOS data model, and relies on informal definitions of best practice."@en ; + rdfs:isDefinedBy ; + rdfs:label "is in mapping relation with"@en ; + rdfs:subPropertyOf skos:semanticRelation ; + skos:definition "Relates two concepts coming, by convention, from different schemes, and that have comparable meanings"@en . + +vcard:title rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the position or job of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "title"@en ; + rdfs:range xsd:string . + +vcard:organization-unit + rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the organizational unit name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "organizational unit name"@en ; + rdfs:range xsd:string ; + rdfs:subPropertyOf vcard:organization-name . + +vcard:hasEmail rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the electronic mail address for communication with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has email"@en ; + rdfs:range vcard:Email . + +spdx:annotation rdf:type owl:ObjectProperty ; + rdfs:comment "Provide additional information about an SpdxElement."@en ; + rdfs:domain spdx:SpdxElement ; + rdfs:range spdx:Annotation ; + vs:term_status "stable" . + +spdx:relationshipType_dataFileOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a data file used in SPDXRef-B."@en ; + vs:term_status "stable"@en . + +dcterms:RFC3066 rdf:type rdfs:Datatype ; + rdfs:comment "The set of tags constructed according to RFC 3066 for the identification of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "RFC 3066"@en ; + rdfs:seeAlso ; + dcterms:description "RFC 3066 has been obsoleted by RFC 4646."@en ; + dcterms:issued "2002-07-13"^^xsd:date . + +[ rdf:type owl:Axiom ; + rdfs:comment "hadPrimarySource property is a particular case of wasDerivedFrom (see http://www.w3.org/TR/prov-dm/#term-original-source) that aims to give credit to the source that originated some information." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:hadPrimarySource ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +spdx:reviewer rdf:type owl:DatatypeProperty ; + rdfs:comment "The name and, optionally, contact information of the person who performed the review. Values of this property must conform to the agent and tool syntax. The reviewer property is deprecated in favor of Annotation with an annotationType review."@en ; + rdfs:domain spdx:Review ; + rdfs:range xsd:string ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +dcterms:spatial rdf:type rdf:Property ; + rdfs:comment "Spatial characteristics of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Spatial Coverage"@en ; + rdfs:subPropertyOf dc:coverage , dcterms:coverage ; + dcam:rangeIncludes dcterms:Location ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:Address rdf:type owl:Class ; + rdfs:comment "To specify the components of the delivery address for the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "Address"@en ; + owl:equivalentClass [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:country-name ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:country-name + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:locality ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:locality + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:postal-code ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:postal-code + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:region ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:region + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:street-address ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:street-address + ] + ) + ] + ) + ] . + +time:year rdf:type owl:DatatypeProperty ; + rdfs:comment "Year position in a calendar-clock system.\n\nThe range of this property is not specified, so can be replaced by any specific representation of a calendar year from any calendar. "@en , "Posición de año en un sistema calendario-reloj.\n\nl rango de esta propiedad no está especificado, por tanto, se puede reemplazar por cualquier representación específica de un año de calendario de un calendario cualquiera."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "year"@en . + +dcterms:MethodOfInstruction + rdf:type rdfs:Class ; + rdfs:comment "A process that is used to engender knowledge, attitudes, and skills."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Method of Instruction"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:PhysicalResource + rdf:type rdfs:Class ; + rdfs:comment "A material thing."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Physical Resource"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:geo rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "geo"@en ; + owl:equivalentProperty vcard:hasGeo . + +time:DurationDescription + rdf:type owl:Class ; + rdfs:comment "Description of temporal extent structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of each of the numeric properties is restricted to xsd:decimal"@en , "Descripción de extensión temporal estructurada con valores separados para los distintos elementos de un sistema de horario-calendario. El sistema de referencia temporal se fija al calendario gregoriano, y el intervalo de cada una de las propiedades numéricas se restringe a xsd:decimal."@es ; + rdfs:label "descripción de duración"@es , "Duration description"@en ; + rdfs:subClassOf time:GeneralDurationDescription ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:minutes + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:days + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:seconds + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue ; + owl:onProperty time:hasTRS + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:weeks + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:hours + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:years + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:decimal ; + owl:onProperty time:months + ] ; + skos:definition "Description of temporal extent structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of each of the numeric properties is restricted to xsd:decimal"@en , "Descripción de extensión temporal estructurada con valores separados para los distintos elementos de un sistema de horario-calendario. El sistema de referencia temporal se fija al calendario gregoriano, y el intervalo de cada una de las propiedades numéricas se restringe a xsd:decimal."@es ; + skos:note "In the Gregorian calendar the length of the month is not fixed. Therefore, a value like \"2.5 months\" cannot be exactly compared with a similar duration expressed in terms of weeks or days."@en , "En el calendario gregoriano la longitud de los meses no es fija. Por lo tanto, un valor como \"2,5 meses\" no se puede comparar exactamente con una duración similar expresada en términos de semanas o días."@es . + +vcard:Unknown rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Unknown"@en ; + rdfs:subClassOf vcard:Gender . + +[ rdf:type owl:Axiom ; + owl:annotatedProperty rdfs:range ; + owl:annotatedSource prov:wasInfluencedBy ; + owl:annotatedTarget [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + prov:definition "influencer: an identifier (o1) for an ancestor entity, activity, or agent that the former depends on;" ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence" +] . + +dcterms:MediaTypeOrExtent + rdf:type rdfs:Class ; + rdfs:comment "A media type or extent."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Media Type or Extent"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +skos:broaderTransitive + rdf:type owl:ObjectProperty , owl:TransitiveProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has broader transitive"@en ; + rdfs:subPropertyOf skos:semanticRelation ; + owl:inverseOf skos:narrowerTransitive ; + skos:definition "skos:broaderTransitive is a transitive superproperty of skos:broader." ; + skos:scopeNote "By convention, skos:broaderTransitive is not used to make assertions. Rather, the properties can be used to draw inferences about the transitive closure of the hierarchical relation, which is useful e.g. when implementing a simple query expansion algorithm in a search application."@en . + + + rdfs:label "Class and property diagram of the LOCN vocabulary" . + +dcterms:DCMIType rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of classes specified by the DCMI Type Vocabulary, used to categorize the nature or genre of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "DCMI Type Vocabulary"@en ; + rdfs:seeAlso dctype: ; + dcterms:issued "2000-07-11"^^xsd:date . + +time:unitWeek rdf:type time:TemporalUnit ; + rdfs:label "Week (unit of temporal duration)"@en ; + skos:prefLabel "一週間"@jp , "week"@nl , "week"@en , "settimana"@it , "semana"@es , "semana"@pt , "одна неделя"@ru , "tydzień"@pl , "일주일"@kr , "一周"@zh , "semaine"@fr , "Woche"@de , "سبوع واحد"@ar ; + time:days "0"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "1"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +prov:qualifiedInvalidation + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:wasInvalidatedBy Activity :a, then it can qualify how it was invalidated using prov:qualifiedInvalidation [ a prov:Invalidation; prov:activity :a; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedInvalidation" ; + rdfs:range prov:Invalidation ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedInvalidationOf" ; + prov:sharesDefinitionWith prov:Invalidation ; + prov:unqualifiedForm prov:wasInvalidatedBy . + +prov:qualifiedForm rdf:type owl:AnnotationProperty ; + rdfs:comment "This annotation property links a subproperty of prov:wasInfluencedBy with the subclass of prov:Influence and the qualifying property that are used to qualify it. \n\nExample annotation:\n\n prov:wasGeneratedBy prov:qualifiedForm prov:qualifiedGeneration, prov:Generation .\n\nThen this unqualified assertion:\n\n :entity1 prov:wasGeneratedBy :activity1 .\n\ncan be qualified by adding:\n\n :entity1 prov:qualifiedGeneration :entity1Gen .\n :entity1Gen \n a prov:Generation, prov:Influence;\n prov:activity :activity1;\n :customValue 1337 .\n\nNote how the value of the unqualified influence (prov:wasGeneratedBy :activity1) is mirrored as the value of the prov:activity (or prov:entity, or prov:agent) property on the influence class."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + +[ rdf:type owl:Axiom ; + rdfs:comment "Attribution is a particular case of trace (see http://www.w3.org/TR/prov-dm/#concept-trace), in the sense that it links an entity to the agent that ascribed it." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasAttributedTo ; + owl:annotatedTarget prov:wasInfluencedBy ; + prov:definition "IF wasAttributedTo(e2,ag1,aAttr) holds, THEN wasInfluencedBy(e2,ag1) also holds. " +] . + +vcard:Neighbor rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Neighbor"@en ; + rdfs:subClassOf vcard:RelatedType . + +vcard:language rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the language that may be used for contacting the object. May also be used as a property parameter."@en ; + rdfs:isDefinedBy ; + rdfs:label "language"@en . + +prov:Delegation rdf:type owl:Class ; + rdfs:comment "An instance of prov:Delegation provides additional descriptions about the binary prov:actedOnBehalfOf relation from a performing prov:Agent to some prov:Agent for whom it was performed. For example, :mixing prov:wasAssociatedWith :toddler . :toddler prov:actedOnBehalfOf :mother; prov:qualifiedDelegation [ a prov:Delegation; prov:entity :mother; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Delegation" ; + rdfs:subClassOf prov:AgentInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:definition "Delegation is the assignment of authority and responsibility to an agent (by itself or by another agent) to carry out a specific activity as a delegate or representative, while the agent it acts on behalf of retains some responsibility for the outcome of the delegated work.\n\nFor example, a student acted on behalf of his supervisor, who acted on behalf of the department chair, who acted on behalf of the university; all those agents are responsible in some way for the activity that took place but we do not say explicitly who bears responsibility and to what degree."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-delegation"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-delegation"^^xsd:anyURI ; + prov:unqualifiedForm prov:actedOnBehalfOf . + +dcterms:ISO3166 rdf:type rdfs:Datatype ; + rdfs:comment "The set of codes listed in ISO 3166-1 for the representation of names of countries."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "ISO 3166"@en ; + rdfs:seeAlso , ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:annotationDate rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify when the comment was made. This is to be specified according to the combined date and time in the UTC format, as specified in the ISO 8601 standard."@en ; + rdfs:domain spdx:Annotation ; + rdfs:range xsd:dateTime ; + rdfs:subPropertyOf spdx:date ; + vs:term_status "stable" . + +adms:AssetRepository rdf:type owl:Class ; + rdfs:comment "A system or service that provides facilities for storage and maintenance of descriptions of Assets and Asset Distributions, and functionality that allows users to search and access these descriptions. An Asset Repository will typically contain descriptions of several Assets and related Asset Distributions."@en ; + rdfs:isDefinedBy ; + rdfs:label "Asset repository"@en . + +skos:broader rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Broader concepts are typically rendered as parents in a concept hierarchy (tree)."@en ; + rdfs:isDefinedBy ; + rdfs:label "has broader"@en ; + rdfs:subPropertyOf skos:broaderTransitive ; + owl:inverseOf skos:narrower ; + skos:definition "Relates a concept to a concept that is more general in meaning."@en ; + skos:scopeNote "By convention, skos:broader is only used to assert an immediate (i.e. direct) hierarchical link between two conceptual resources."@en . + +spdx:relationshipType_providedDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a to be provided dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:exceptionTextHtml + rdf:type owl:DatatypeProperty ; + rdfs:comment "HTML representation of the License Exception Text"@en ; + rdfs:domain spdx:ListedLicenseException ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + + + rdf:type sh:NodeShape ; + sh:name "Location"@en ; + sh:property [ sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path locn:geometry ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcat:centroid ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcat:bbox ; + sh:severity sh:Violation + ] ; + sh:targetClass dcterms:Location . + +dcat:accessURL rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint. Use for all cases except a simple download link, in which case downloadURL is preferred."@en , "Puede ser cualquier tipo de URL que de acceso a una distribución del conjunto de datos, e.g., página de destino, descarga, URL feed, punto de acceso SPARQL. Esta propriedad se debe usar cuando su catálogo de datos no tiene información sobre donde está o cuando no se puede descargar."@es , "En URL for en ressource som giver adgang til en repræsentation af datsættet. Fx destinationsside, feed, SPARQL-endpoint. Anvendes i alle sammenhænge undtagen til angivelse af et simpelt download link hvor anvendelse af egenskaben downloadURL foretrækkes."@da , "URL zdroje, přes které je přístupná distribuce datové sady. Příkladem může být vstupní stránka, RSS kanál či SPARQL endpoint. Použijte ve všech případech kromě URL souboru ke stažení, pro které je lepší použít dcat:downloadURL."@cs , "Μπορεί να είναι οποιουδήποτε είδους URL που δίνει πρόσβαση στη διανομή ενός συνόλου δεδομένων. Π.χ. ιστοσελίδα αρχικής πρόσβασης, μεταφόρτωση, feed URL, σημείο διάθεσης SPARQL. Να χρησιμοποιείται όταν ο κατάλογος δεν περιέχει πληροφορίες εαν πρόκειται ή όχι για μεταφορτώσιμο αρχείο."@el , "Un URL di una risorsa che consente di accedere a una distribuzione del set di dati. Per esempio, pagina di destinazione, feed, endpoint SPARQL. Da utilizzare per tutti i casi, tranne quando si tratta di un semplice link per il download nel qual caso è preferito downloadURL."@it , "データセットの配信にアクセス権を与えるランディング・ページ、フィード、SPARQLエンドポイント、その他の種類の資源。"@ja , "Ceci peut être tout type d'URL qui donne accès à une distribution du jeu de données. Par exemple, un lien à une page HTML contenant un lien au jeu de données, un Flux RSS, un point d'accès SPARQL. Utilisez le lorsque votre catalogue ne contient pas d'information sur quoi il est ou quand ce n'est pas téléchargeable."@fr , "أي رابط يتيح الوصول إلى البيانات. إذا كان الرابط هو ربط مباشر لملف يمكن تحميله استخدم الخاصية downloadURL"@ar ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "přístupová adresa"@cs , "URL d'accès"@fr , "アクセスURL"@ja , "access address"@en , "URL πρόσβασης"@el , "رابط وصول"@ar , "URL de acceso"@es , "indirizzo di accesso"@it , "adgangsadresse"@da ; + rdfs:range rdfs:Resource ; + owl:propertyChainAxiom ( dcat:accessService dcat:endpointURL ) ; + skos:altLabel "adgangsURL"@da ; + skos:definition "En URL for en ressource som giver adgang til en repræsentation af datsættet. Fx destinationsside, feed, SPARQL-endpoint. Anvendes i alle sammenhænge undtagen til angivelse af et simpelt download link hvor anvendelse af egenskaben downloadURL foretrækkes."@da , "データセットの配信にアクセス権を与えるランディング・ページ、フィード、SPARQLエンドポイント、その他の種類の資源。"@ja , "A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint. Use for all cases except a simple download link, in which case downloadURL is preferred."@en , "Ceci peut être tout type d'URL qui donne accès à une distribution du jeu de données. Par exemple, un lien à une page HTML contenant un lien au jeu de données, un Flux RSS, un point d'accès SPARQL. Utilisez le lorsque votre catalogue ne contient pas d'information sur quoi il est ou quand ce n'est pas téléchargeable."@fr , "Un URL di una risorsa che consente di accedere a una distribuzione del set di dati. Per esempio, pagina di destinazione, feed, endpoint SPARQL. Da utilizzare per tutti i casi, tranne quando si tratta di un semplice link per il download nel qual caso è preferito downloadURL."@it , "Μπορεί να είναι οποιουδήποτε είδους URL που δίνει πρόσβαση στη διανομή ενός συνόλου δεδομένων. Π.χ. ιστοσελίδα αρχικής πρόσβασης, μεταφόρτωση, feed URL, σημείο διάθεσης SPARQL. Να χρησιμοποιείται όταν ο κατάλογος δεν περιέχει πληροφορίες εαν πρόκειται ή όχι για μεταφορτώσιμο αρχείο."@el , "Puede ser cualquier tipo de URL que de acceso a una distribución del conjunto de datos, e.g., página de destino, descarga, URL feed, punto de acceso SPARQL. Esta propriedad se debe usar cuando su catálogo de datos no tiene información sobre donde está o cuando no se puede descargar."@es , "URL zdroje, přes které je přístupná distribuce datové sady. Příkladem může být vstupní stránka, RSS kanál či SPARQL endpoint. Použijte ve všech případech kromě URL souboru ke stažení, pro které je lepší použít dcat:downloadURL."@cs , "أي رابط يتيح الوصول إلى البيانات. إذا كان الرابط هو ربط مباشر لملف يمكن تحميله استخدم الخاصية downloadURL"@ar ; + skos:editorialNote "rdfs:label, rdfs:comment and skos:scopeNote have been modified. Non-english versions except for Italian must be updated."@en , "Status: English Definition text modified by DCAT revision team, updated Italian and Czech translation provided, translations for other languages pending."@en ; + skos:scopeNote "Se le distribuzioni sono accessibili solo attraverso una pagina web (ad esempio, gli URL per il download diretto non sono noti), allora il link della pagina web deve essere duplicato come accessURL sulla distribuzione."@it , "Hvis en eller flere distributioner kun er tilgængelige via en destinationsside (dvs. en URL til direkte download er ikke kendt), så bør destinationssidelinket gentages som adgangsadresse for distributionen."@da , "Pokud jsou distribuce přístupné pouze přes vstupní stránku (tj. URL pro přímé stažení nejsou známa), pak by URL přístupové stránky mělo být duplikováno ve vlastnosti distribuce accessURL."@cs , "El rango es una URL. Si la distribución es accesible solamente través de una página de destino (es decir, si no se conoce una URL de descarga directa), entonces el enlance a la página de destino debe ser duplicado como accessURL en la distribución."@es , "Η τιμή είναι ένα URL. Αν η/οι διανομή/ές είναι προσβάσιμη/ες μόνο μέσω μίας ιστοσελίδας αρχικής πρόσβασης (δηλαδή αν δεν υπάρχουν γνωστές διευθύνσεις άμεσης μεταφόρτωσης), τότε ο σύνδεσμος της ιστοσελίδας αρχικής πρόσβασης πρέπει να αναπαραχθεί ως accessURL σε μία διανομή."@el , "La valeur est une URL. Si la distribution est accessible seulement au travers d'une page d'atterrissage (c-à-dire on n'ignore une URL de téléchargement direct), alors le lien à la page d'atterrissage doit être dupliqué comee accessURL sur la distribution."@fr , "確実にダウンロードでない場合や、ダウンロードかどうかが不明である場合は、downloadURLではなく、accessURLを用いてください。ランディング・ページを通じてしか配信にアクセスできない場合(つまり、直接的なダウンロードURLが不明)は、配信におけるaccessURLとしてランディング・ページのリンクをコピーすべきです(SHOULD)。"@ja , "If the distribution(s) are accessible only through a landing page (i.e. direct download URLs are not known), then the landing page link should be duplicated as accessURL on a distribution."@en . + +spdx:snippetFromFile rdf:type owl:ObjectProperty ; + rdfs:comment "File containing the SPDX element (e.g. the file contaning a snippet)."@en ; + rdfs:domain spdx:Snippet ; + rdfs:range spdx:File ; + vs:term_status "stable"@en . + +prov:Communication rdf:type owl:Class ; + rdfs:comment "An instance of prov:Communication provides additional descriptions about the binary prov:wasInformedBy relation from an informed prov:Activity to the prov:Activity that informed it. For example, :you_jumping_off_bridge prov:wasInformedBy :everyone_else_jumping_off_bridge; prov:qualifiedCommunication [ a prov:Communication; prov:activity :everyone_else_jumping_off_bridge; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Communication" ; + rdfs:subClassOf prov:ActivityInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Communication is the exchange of an entity by two activities, one activity using the entity generated by the other." ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Communication"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-wasInformedBy"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasInformedBy . + +vcard:email rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "email"@en ; + owl:equivalentProperty vcard:hasEmail . + +prov:qualifiedGeneration + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:generated Entity :e, then it can qualify how it performed the Generation using prov:qualifiedGeneration [ a prov:Generation; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedGeneration" ; + rdfs:range prov:Generation ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedGenerationOf" ; + prov:sharesDefinitionWith prov:Generation ; + prov:unqualifiedForm prov:wasGeneratedBy . + +dcterms:BibliographicResource + rdf:type rdfs:Class ; + rdfs:comment "A book, article, or other documentary resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Bibliographic Resource"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:fileType_audio rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file is associated with an audio file (MIME type of audio/* , ie. .mp3 ); \nIMAGE if the file is assoicated with an picture image file (MIME type of image/*, ie. .jpg, .gif )"@en ; + vs:term_status "stable"@en . + +prov:wasInformedBy rdf:type owl:ObjectProperty ; + rdfs:comment "An activity a2 is dependent on or informed by another activity a1, by way of some unspecified entity that is generated by a1 and used by a2."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "wasInformedBy" ; + rdfs:range prov:Activity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedCommunication prov:activity ) ; + owl:propertyChainAxiom ( prov:qualifiedCommunication prov:activity ) ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:inverse "informed" ; + prov:qualifiedForm prov:qualifiedCommunication , prov:Communication . + +dcat:CatalogRecord rdf:type owl:Class , rdfs:Class ; + rdfs:comment "A record in a data catalog, describing the registration of a single dataset or data service."@en , "1つのデータセットを記述したデータ・カタログ内のレコード。"@ja , "En post i et datakatalog der beskriver registreringen af et enkelt datasæt eller en datatjeneste."@da , "Záznam v datovém katalogu popisující jednu datovou sadu či datovou službu."@cs , "Un registre du catalogue ou une entrée du catalogue, décrivant un seul jeu de données."@fr , "Un registro en un catálogo de datos que describe un solo conjunto de datos o un servicio de datos."@es , "Un record in un catalogo di dati che descrive un singolo dataset o servizio di dati."@it , "Μία καταγραφή ενός καταλόγου, η οποία περιγράφει ένα συγκεκριμένο σύνολο δεδομένων."@el ; + rdfs:isDefinedBy ; + rdfs:label "カタログ・レコード"@ja , "Καταγραφή καταλόγου"@el , "سجل"@ar , "Registro del catálogo"@es , "Record di catalogo"@it , "Catalog Record"@en , "Katalogizační záznam"@cs , "Katalogpost"@da , "Registre du catalogue"@fr ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom dcat:Resource ; + owl:onProperty foaf:primaryTopic + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty foaf:primaryTopic + ] ; + skos:definition "Un record in un catalogo di dati che descrive un singolo dataset o servizio di dati."@it , "Záznam v datovém katalogu popisující jednu datovou sadu či datovou službu."@cs , "Un registro en un catálogo de datos que describe un solo conjunto de datos o un servicio de datos."@es , "En post i et datakatalog der beskriver registreringen af et enkelt datasæt eller en datatjeneste."@da , "1つのデータセットを記述したデータ・カタログ内のレコード。"@ja , "A record in a data catalog, describing the registration of a single dataset or data service."@en , "Μία καταγραφή ενός καταλόγου, η οποία περιγράφει ένα συγκεκριμένο σύνολο δεδομένων."@el , "Un registre du catalogue ou une entrée du catalogue, décrivant un seul jeu de données."@fr ; + skos:editorialNote "English definition updated in this revision. Multilingual text not yet updated except the Spanish one and the Czech one and Italian one."@en ; + skos:scopeNote "このクラスはオプションで、すべてのカタログがそれを用いるとは限りません。これは、データセットに関するメタデータとカタログ内のデータセットのエントリーに関するメタデータとで区別が行われるカタログのために存在しています。例えば、データセットの公開日プロパティーは、公開機関が情報を最初に利用可能とした日付を示しますが、カタログ・レコードの公開日は、データセットがカタログに追加された日付です。両方の日付が異っていたり、後者だけが分かっている場合は、カタログ・レコードに対してのみ公開日を指定すべきです。W3CのPROVオントロジー[prov-o]を用いれば、データセットに対する特定の変更に関連するプロセスやエージェントの詳細などの、さらに詳しい来歴情報の記述が可能となることに注意してください。"@ja , "Questa classe è opzionale e non tutti i cataloghi la utilizzeranno. Esiste per cataloghi in cui si opera una distinzione tra i metadati relativi al dataset ed i metadati relativi alla gestione del dataset nel catalogo. Ad esempio, la proprietà per indicare la data di pubblicazione del dataset rifletterà la data in cui l'informazione è stata originariamente messa a disposizione dalla casa editrice, mentre la data di pubblicazione per il record nel catalogo rifletterà la data in cui il dataset è stato aggiunto al catalogo. Nei casi dove solo quest'ultima sia nota, si utilizzerà esclusivamente la data di pubblicazione relativa al record del catalogo. Si noti che l'Ontologia W3C PROV permette di descrivere ulteriori informazioni sulla provenienza, quali i dettagli del processo, la procedura e l'agente coinvolto in una particolare modifica di un dataset."@it , "Tato třída je volitelná a ne všechny katalogy ji využijí. Existuje pro katalogy, ve kterých se rozlišují metadata datové sady či datové služby a metadata o záznamu o datové sadě či datové službě v katalogu. Například datum publikace datové sady odráží datum, kdy byla datová sada původně zveřejněna poskytovatelem dat, zatímco datum publikace katalogizačního záznamu je datum zanesení datové sady do katalogu. V případech kdy se obě data liší, nebo je známo jen to druhé, by mělo být specifikováno jen datum publikace katalogizačního záznamu. Všimněte si, že ontologie W3C PROV umožňuje popsat další informace o původu jako například podrobnosti o procesu konkrétní změny datové sady a jeho účastnících."@cs , "Esta clase es opcional y no todos los catálogos la utilizarán. Esta clase existe para catálogos que hacen una distinción entre los metadatos acerca de un conjunto de datos o un servicio de datos y los metadatos acerca de una entrada en ese conjunto de datos en el catálogo. Por ejemplo, la propiedad sobre la fecha de la publicación de los datos refleja la fecha en que la información fue originalmente publicada, mientras que la fecha de publicación del registro del catálogo es la fecha en que los datos se agregaron al mismo. En caso en que ambas fechas fueran diferentes, o en que sólo la fecha de publicación del registro del catálogo estuviera disponible, sólo debe especificarse en el registro del catálogo. Tengan en cuenta que la ontología PROV de W3C permite describir otra información sobre la proveniencia de los datos, como por ejemplo detalles del proceso y de los agentes involucrados en algún cambio específico a los datos."@es , "This class is optional and not all catalogs will use it. It exists for catalogs where a distinction is made between metadata about a dataset or data service and metadata about the entry for the dataset or data service in the catalog. For example, the publication date property of the dataset reflects the date when the information was originally made available by the publishing agency, while the publication date of the catalog record is the date when the dataset was added to the catalog. In cases where both dates differ, or where only the latter is known, the publication date should only be specified for the catalog record. Notice that the W3C PROV Ontology allows describing further provenance information such as the details of the process and the agent involved in a particular change to a dataset."@en , "Αυτή η κλάση είναι προαιρετική και δεν χρησιμοποιείται από όλους τους καταλόγους. Υπάρχει για τις περιπτώσεις καταλόγων όπου γίνεται διαχωρισμός μεταξύ των μεταδεδομένων για το σύνολο των δεδομένων και των μεταδεδομένων για την καταγραφή του συνόλου δεδομένων εντός του καταλόγου. Για παράδειγμα, η ιδιότητα της ημερομηνίας δημοσίευσης του συνόλου δεδομένων δείχνει την ημερομηνία κατά την οποία οι πληροφορίες έγιναν διαθέσιμες από τον φορέα δημοσίευσης, ενώ η ημερομηνία δημοσίευσης της καταγραφής του καταλόγου δείχνει την ημερομηνία που το σύνολο δεδομένων προστέθηκε στον κατάλογο. Σε περιπτώσεις που οι δύο ημερομηνίες διαφέρουν, ή που μόνο η τελευταία είναι γνωστή, η ημερομηνία δημοσίευσης θα πρέπει να δίνεται για την καταγραφή του καταλόγου. Να σημειωθεί πως η οντολογία W3C PROV επιτρέπει την περιγραφή επιπλέον πληροφοριών ιστορικού όπως λεπτομέρειες για τη διαδικασία και τον δράστη που εμπλέκονται σε μία συγκεκριμένη αλλαγή εντός του συνόλου δεδομένων."@el , "C'est une classe facultative et tous les catalogues ne l'utiliseront pas. Cette classe existe pour les catalogues\tayant une distinction entre les métadonnées sur le jeu de données et les métadonnées sur une entrée du jeu de données dans le catalogue."@fr , "Denne klasse er valgfri og ikke alle kataloger vil anvende denne klasse. Den kan anvendes i de kataloger hvor der skelnes mellem metadata om datasættet eller datatjenesten og metadata om selve posten til registreringen af datasættet eller datatjenesten i kataloget. Udgivelsesdatoen for datasættet afspejler for eksempel den dato hvor informationerne oprindeligt blev gjort tilgængelige af udgiveren, hvorimod udgivelsesdatoen for katalogposten er den dato hvor datasættet blev føjet til kataloget. I de tilfælde hvor de to datoer er forskellige eller hvor blot sidstnævnte er kendt, bør udgivelsesdatoen kun angives for katalogposten. Bemærk at W3Cs PROV ontologi gør til muligt at tilføje yderligere proveniensoplysninger eksempelvis om processen eller aktøren involveret i en given ændring af datasættet."@da . + +spdx:isValid rdf:type owl:DatatypeProperty ; + rdfs:comment "True if the URL is a valid well formed URL"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:boolean . + + + rdf:type owl:ObjectProperty ; + rdfs:domain ; + rdfs:range ; + vs:term_status "stable"@en . + +skos:Collection rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Collection"@en ; + owl:disjointWith skos:Concept , skos:ConceptScheme ; + skos:definition "A meaningful collection of concepts."@en ; + skos:scopeNote "Labelled collections can be used where you would like a set of concepts to be displayed under a 'node label' in the hierarchy."@en . + +doap:Project rdf:type owl:Class . + +dcterms:modified rdf:type rdf:Property ; + rdfs:comment "Date on which the resource was changed."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Modified"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +adms:supportedSchema rdf:type owl:ObjectProperty ; + rdfs:comment "A schema according to which the Asset Repository can provide data about its content, e.g. ADMS."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "supported schema"@en ; + rdfs:range adms:Asset . + +adms:versionNotes rdf:type owl:DatatypeProperty ; + rdfs:comment "A description of changes between this version and the previous version of the Asset."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "version notes"@en ; + rdfs:range rdfs:Literal . + +spdx:purpose_install rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is used to install software on disk."@en ; + vs:term_status "stable"@en . + +prov:hadActivity rdf:type owl:ObjectProperty ; + rdfs:comment "The _optional_ Activity of an Influence, which used, generated, invalidated, or was the responsibility of some Entity. This property is _not_ used by ActivityInfluence (use prov:activity instead)."@en , "This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile." ; + rdfs:domain prov:Influence ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Delegation prov:Derivation prov:End prov:Start ) + ] ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Delegation prov:Derivation prov:End prov:Start ) + ] ; + rdfs:isDefinedBy ; + rdfs:label "hadActivity" ; + rdfs:range prov:Activity ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:editorialNote "The multiple rdfs:domain assertions are intended. One is simpler and works for OWL-RL, the union is more specific but is not recognized by OWL-RL."@en ; + prov:inverse "wasActivityOfInfluence" ; + prov:sharesDefinitionWith prov:Activity . + +vcard:hasFamilyName rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the family name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has family name"@en . + +time:dayOfWeek rdf:type owl:ObjectProperty ; + rdfs:comment "The day of week, whose value is a member of the class time:DayOfWeek"@en , "El día de la semana, cuyo valor es un miembro de la clase 'día de la semana'." ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "day of week"@en , "día de la semana"@es ; + rdfs:range time:DayOfWeek ; + skos:definition "The day of week, whose value is a member of the class time:DayOfWeek"@en , "El día de la semana, cuyo valor es un miembro de la clase 'día de la semana'."@es . + +spdx:relationshipType_devDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a development dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +time:intervalMeets rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 se encuentra con otro intervalo propio T2, entonces el final de T1 coincide con el principio de T2."@es , "If a proper interval T1 is intervalMeets another proper interval T2, then the end of T1 is coincident with the beginning of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo se encuentra"@es , "interval meets"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalMetBy ; + skos:definition "If a proper interval T1 is intervalMeets another proper interval T2, then the end of T1 is coincident with the beginning of T2."@en , "Si un intervalo propio T1 se encuentra con otro intervalo propio T2, entonces el final de T1 coincide con el principio de T2."@es . + +time:unitSecond rdf:type time:TemporalUnit ; + rdfs:label "Second (unit of temporal duration)"@en ; + skos:prefLabel "segundo"@pt , "segundo"@es , "seconde"@fr , "seconde"@nl , "Sekunde"@de , "second"@en , "ثانية واحدة"@ar , "一秒"@jp , "一秒"@zh , "일초"@kr , "Sekundę"@pl , "secondo"@it ; + time:days "0"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "1"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +spdx:created rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify when the SPDX document was originally created. The date is to be specified according to combined date and time in UTC format as specified in ISO 8601 standard."@en ; + rdfs:domain spdx:CreationInfo ; + rdfs:range xsd:dateTime ; + rdfs:subPropertyOf spdx:date ; + vs:term_status "stable" . + +dcterms:ISO639-2 rdf:type rdfs:Datatype ; + rdfs:comment "The three-letter alphabetic codes listed in ISO639-2 for the representation of names of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "ISO 639-2"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +skos:historyNote rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "history note"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "A note about the past state/use/meaning of a concept."@en . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass ; + owl:onProperty ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass ; + owl:onProperty ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable" . + +vcard:Name rdf:type owl:Class ; + rdfs:comment "To specify the components of the name of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "Name"@en ; + owl:equivalentClass [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:additional-name ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:minCardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:additional-name + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:family-name ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:family-name + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:given-name ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:given-name + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:honorific-prefix ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:minCardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:honorific-prefix + ] + ) + ] + [ rdf:type owl:Class ; + owl:intersectionOf ( [ rdf:type owl:Restriction ; + owl:onProperty vcard:honorific-suffix ; + owl:someValuesFrom xsd:string + ] + [ rdf:type owl:Restriction ; + owl:minCardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty vcard:honorific-suffix + ] + ) + ] + ) + ] . + + + rdf:type owl:Ontology ; + rdfs:comment "Ontology for vCard based on RFC6350"@en ; + rdfs:label "Ontology for vCard"@en ; + owl:versionInfo "Final"@en . + +vcard:tz rdf:type owl:DatatypeProperty ; + rdfs:comment "To indicate time zone information that is specific to the object. May also be used as a property parameter."@en ; + rdfs:isDefinedBy ; + rdfs:label "time zone"@en ; + rdfs:range xsd:string . + +vcard:Pref rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Pref"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +spdx:algorithm rdf:type owl:ObjectProperty ; + rdfs:comment "Identifies the algorithm used to produce the subject Checksum. Currently, SHA-1 is the only supported algorithm. It is anticipated that other algorithms will be supported at a later time."@en ; + rdfs:domain spdx:Checksum ; + vs:term_status "stable" . + + + rdf:type owl:Class ; + vs:term_status "stable" . + +spdx:ListedLicenseException + rdf:type owl:Class ; + rdfs:comment "License exception specific to ListedLicenses" ; + rdfs:subClassOf spdx:LicenseException ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:exceptionTextHtml + ] . + +vcard:family-name rdf:type owl:DatatypeProperty ; + rdfs:comment "The family name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "family name"@en ; + rdfs:range xsd:string . + +spdx:standardLicenseTemplate + rdf:type owl:DatatypeProperty ; + rdfs:comment "License template which describes sections of the license which can be varied. See License Template section of the specification for format information."@en ; + rdfs:domain spdx:License ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:spdxDocument rdf:type owl:ObjectProperty ; + rdfs:comment "A property containing an SPDX document."@en ; + rdfs:domain spdx:ExternalDocumentRef ; + rdfs:range spdx:SpdxDocument ; + vs:term_status "stable"@en . + +prov:Role rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Role" ; + rdfs:seeAlso prov:hadRole ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:definition "A role is the function of an entity or agent with respect to an activity, in the context of a usage, generation, invalidation, association, start, and end."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-attribute-role"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-attribute"^^xsd:anyURI . + +spdx:SpdxItem rdf:type owl:Class ; + rdfs:comment "An SpdxItem is a potentially copyrightable work."@en ; + rdfs:subClassOf spdx:SpdxElement ; + rdfs:subClassOf [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseConcluded + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseConcluded + ] + [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:licenseConcluded + ] + ) + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:licenseInfoFromFiles + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:attributionText + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:copyrightText + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseComments + ] ; + vs:term_status "stable"@en . + +prov:qualifiedAttribution + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:wasAttributedTo Agent :ag, then it can qualify how it was influenced using prov:qualifiedAttribution [ a prov:Attribution; prov:agent :ag; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedAttribution" ; + rdfs:range prov:Attribution ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:inverse "qualifiedAttributionOf" ; + prov:sharesDefinitionWith prov:Attribution ; + prov:unqualifiedForm prov:wasAttributedTo . + +adms:includedAsset rdf:type owl:ObjectProperty ; + rdfs:comment "An Asset that is contained in the Asset being described, e.g. when there are several vocabularies defined in a single document."@en ; + rdfs:domain adms:Asset ; + rdfs:isDefinedBy ; + rdfs:label "included asset"@en ; + rdfs:range adms:Asset . + +prov:definition rdf:type owl:AnnotationProperty ; + rdfs:comment "A definition quoted from PROV-DM or PROV-CONSTRAINTS that describes the concept expressed with this OWL term."@en ; + rdfs:isDefinedBy . + +time:week rdf:type owl:DatatypeProperty ; + rdfs:comment "Week number within the year."@en , "Número de semana en el año."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "week"@en , "semana"@es ; + rdfs:range xsd:nonNegativeInteger ; + skos:note "Weeks are numbered differently depending on the calendar in use and the local language or cultural conventions (locale). ISO-8601 specifies that the first week of the year includes at least four days, and that Monday is the first day of the week. In that system, week 1 is the week that contains the first Thursday in the year."@en ; + skos:scopeNote "Las semanas están numeradas de forma diferente dependiendo del calendario en uso y de las convenciones lingüísticas y culturales locales (locale en inglés). El ISO-8601 especifica que la primera semana del año incluye al menos cuatro días, y que el lunes es el primer día de la semana. En ese sistema, la semana 1 es la semana que contiene el primer jueves del año."@es . + +spdx:builtDate rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for recording the actual date the package was built."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:dateTime ; + rdfs:subPropertyOf spdx:date ; + vs:term_status "stable"@en . + +prov:category rdf:type owl:AnnotationProperty ; + rdfs:comment "Classify prov-o terms into three categories, including 'starting-point', 'qualifed', and 'extended'. This classification is used by the prov-o html document to gently introduce prov-o terms to its users. "@en ; + rdfs:isDefinedBy . + +vcard:Acquaintance rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Acquaintance"@en ; + rdfs:subClassOf vcard:RelatedType . + +dcterms:FileFormat rdf:type rdfs:Class ; + rdfs:comment "A digital resource format."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "File Format"@en ; + rdfs:subClassOf dcterms:MediaType ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:relationshipType_runtimeDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a dependency required for the execution of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +time:intervalBefore rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 está antes que otro intervalo propio T2, entonces el final de T1 está antes que el principio de T2."@es , "If a proper interval T1 is intervalBefore another proper interval T2, then the end of T1 is before the beginning of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval before"@en , "intervalo anterior"@es ; + rdfs:range time:ProperInterval ; + rdfs:subPropertyOf time:intervalDisjoint , time:before ; + owl:inverseOf time:intervalAfter ; + skos:definition "Si un intervalo propio T1 está antes que otro intervalo propio T2, entonces el final de T1 está antes que el principio de T2."@es , "If a proper interval T1 is intervalBefore another proper interval T2, then the end of T1 is before the beginning of T2."@en . + +time:intervalContains + rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 contiene otro intervalo propio T2, entonces el principio de T1 está antes que el principio de T2, y el final de T1 está después del final de T2."@es , "If a proper interval T1 is intervalContains another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is after the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo contiene"@es , "interval contains"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalDuring ; + skos:definition "Si un intervalo propio T1 contiene otro intervalo propio T2, entonces el principio de T1 está antes que el principio de T2, y el final de T1 está después del final de T2."@es , "If a proper interval T1 is intervalContains another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is after the end of T2."@en . + +time:DateTimeDescription + rdf:type owl:Class ; + rdfs:comment "Descripción de fecha y tiempo estructurada con valores separados para los diferentes elementos de un sistema calendario-reloj. El sistema de referencia temporal está fijado al calendario gregoriano, y el rango de las propiedades año, mes, día restringidas a los correspondientes tipos del XML Schema xsd:gYear, xsd:gMonth y xsd:gDay respectivamente."@es , "Description of date and time structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of year, month, day properties restricted to corresponding XML Schema types xsd:gYear, xsd:gMonth and xsd:gDay, respectively."@en ; + rdfs:label "descripción de fecha-tiempo"@es , "Date-Time description"@en ; + rdfs:subClassOf time:GeneralDateTimeDescription ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:gMonth ; + owl:onProperty time:month + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:gYear ; + owl:onProperty time:year + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue ; + owl:onProperty time:hasTRS + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:allValuesFrom xsd:gDay ; + owl:onProperty time:day + ] ; + skos:definition "Descripción de fecha y tiempo estructurada con valores separados para los diferentes elementos de un sistema calendario-reloj. El sistema de referencia temporal está fijado al calendario gregoriano, y el rango de las propiedades año, mes, día restringidas a los correspondientes tipos del XML Schema xsd:gYear, xsd:gMonth y xsd:gDay respectivamente."@es , "Description of date and time structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of year, month, day properties restricted to corresponding XML Schema types xsd:gYear, xsd:gMonth and xsd:gDay, respectively."@en . + + + dcterms:title "Core Vocabularies Specification" . + +spdx:contextualExample + rdf:type owl:DatatypeProperty ; + rdfs:comment "Example for use of the external repository identifier"@en ; + rdfs:domain spdx:ReferenceType ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +locn:geographicName rdf:type rdf:Property ; + rdfs:comment "\nA geographic name is a proper noun applied to a spatial object. Taking the example used in the relevant INSPIRE data specification (page 18), the following are all valid geographic names for the Greek capital:\n- Αθήνα (the Greek endonym written in the Greek script)\n- Athína (the standard Romanisation of the endonym)\n- Athens (the English language exonym)\nFor INSPIRE-conformant data, provide the metadata for the geographic name using a skos:Concept as a datatype.\n"@en ; + rdfs:isDefinedBy ; + rdfs:label "geographic name"@en ; + dcterms:identifier "locn:geographicName" ; + vs:term_status "testing"@en ; + wdsr:describedby . + +time:TimeZone rdf:type owl:Class ; + rdfs:comment "Un huso horario especifica la cantidad en que la hora local está desplazada con respecto a UTC.\n Un huso horario normalmente se denota geográficamente (p.ej. el horario de verano del este de Australia), con un valor constante en una región dada.\n La región donde aplica y el desplazamiento desde UTC las especifica una autoridad gubernamental localmente reconocida."@es , "A Time Zone specifies the amount by which the local time is offset from UTC. \n\tA time zone is usually denoted geographically (e.g. Australian Eastern Daylight Time), with a constant value in a given region. \nThe region where it applies and the offset from UTC are specified by a locally recognised governing authority."@en ; + rdfs:label "Time Zone"@en , "huso horario"@es ; + skos:definition "Un huso horario especifica la cantidad en que la hora local está desplazada con respecto a UTC.\n Un huso horario normalmente se denota geográficamente (p.ej. el horario de verano del este de Australia), con un valor constante en una región dada.\n La región donde aplica y el desplazamiento desde UTC las especifica una autoridad gubernamental localmente reconocida."@es , "A Time Zone specifies the amount by which the local time is offset from UTC. \n\tA time zone is usually denoted geographically (e.g. Australian Eastern Daylight Time), with a constant value in a given region. \nThe region where it applies and the offset from UTC are specified by a locally recognised governing authority."@en ; + skos:historyNote "En la versión original de OWL-Time de 2006, se definió, en un espacio de nombres diferente \"http://www.w3.org/2006/timezone#\", la clase 'huso horario', con varias propiedades específicas correspondientes a un modelo específico de huso horario.\n En la versión actual hay una clase con el mismo nombre local en el espacio de nombres de OWL-Time, eliminando la dependencia del espacio de nombres externo.\n Un axioma de alineación permite que los datos codificados de acuerdo con la versión anterior sean consistentes con la ontología actualizada."@es , "In the original 2006 version of OWL-Time, the TimeZone class, with several properties corresponding to a specific model of time-zones, was defined in a separate namespace \"http://www.w3.org/2006/timezone#\". \n\nIn the current version a class with same local name is put into the main OWL-Time namespace, removing the dependency on the external namespace. \n\nAn alignment axiom \n\ttzont:TimeZone rdfs:subClassOf time:TimeZone . \nallows data encoded according to the previous version to be consistent with the updated ontology. " ; + skos:note "A designated timezone is associated with a geographic region. However, for a particular region the offset from UTC often varies seasonally, and the dates of the changes may vary from year to year. The timezone designation usually changes for the different seasons (e.g. Australian Eastern Standard Time vs. Australian Eastern Daylight Time). Furthermore, the offset for a timezone may change over longer timescales, though its designation might not. \n\nDetailed guidance about working with time zones is given in http://www.w3.org/TR/timezone/ ."@en , "An ontology for time zone descriptions was described in [owl-time-20060927] and provided as RDF in a separate namespace tzont:. However, that ontology was incomplete in scope, and the example datasets were selective. Furthermore, since the use of a class from an external ontology as the range of an ObjectProperty in OWL-Time creates a dependency, reference to the time zone class has been replaced with the 'stub' class in the normative part of this version of OWL-Time."@en , "Un huso horario designado está asociado con una región geográfica. Sin embargo, para una región particular el desplazamiento desde UTC a menudo varía según las estaciones, y las fechas de los cambios pueden variar de un año a otro. La designación de huso horario normalmente cambia de una estación a otra (por ejemplo, el horario estándar frente al horario de verano ambos del este de Australia). Además, del desplazamiento para un huso horario puede cambiar sobre escalas de tiempo mayores, aunque su designación no lo haga.\n Se puede encontrar una guía detallada sobre el funcionamiento de husos horarios en http://www.w3.org/TR/timezone/.\"@es , \"En [owl-time-20060927] se describió una ontología para descripciones de husos horarios, y se proporcionó en un espacio de nombres separado tzont:. Sin embargo, dicha ontología estaba incompleta en su alcance, y el ejemplo de conjuntos de datos (datasets) era selectivo. Además, puesto que el uso de una clase de una ontología externa como el rango de una propiedad de objeto en OWL-Time crea una dependencia, la referencia a la clase huso horario se ha reemplazado por una clase que viene a ser un \"cajón de sastre\" en la en la parte normativa de esta versión de OWL-Time."@es ; + skos:scopeNote "En esta implementación 'huso horario' no tiene definidas propiedades. Se debería pensar como una superclase \"abstracta\" de todas las implementaciones de huso horario específicas."@es , "In this implementation TimeZone has no properties defined. It should be thought of as an 'abstract' superclass of all specific timezone implementations." . + +vcard:latitude rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated. See hasGeo"@en ; + rdfs:isDefinedBy ; + rdfs:label "latitude"@en ; + owl:deprecated true . + + + rdf:type owl:Ontology ; + rdfs:comment "This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). \n\nIf you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov-comments/). All feedback is welcome."@en ; + rdfs:label "W3C PROVenance Interchange Ontology (PROV-O)"@en ; + rdfs:seeAlso , ; + owl:versionIRI ; + owl:versionInfo "Recommendation version 2013-04-30"@en ; + prov:specializationOf ; + prov:wasRevisionOf . + +spdx:FileType rdf:type owl:Class ; + rdfs:comment "Type of file."@en ; + vs:term_status "stable"@en . + +rdfs:comment rdf:type owl:AnnotationProperty , owl:DatatypeProperty ; + rdfs:comment ""@en ; + rdfs:isDefinedBy ; + rdfs:range xsd:string . + +spdx:purpose_framework + rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is a software framework."@en ; + vs:term_status "stable"@en . + +spdx:specVersion rdf:type owl:DatatypeProperty ; + rdfs:comment "Provide a reference number that can be used to understand how to parse and interpret the rest of the file. It will enable both future changes to the specification and to support backward compatibility. The version number consists of a major and minor version indicator. The major field will be incremented when incompatible changes between versions are made (one or more sections are created, modified or deleted). The minor field will be incremented when backwards compatible changes are made."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range xsd:string . + +dcterms:RFC5646 rdf:type rdfs:Datatype ; + rdfs:comment "The set of tags constructed according to RFC 5646 for the identification of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "RFC 5646"@en ; + rdfs:seeAlso ; + dcterms:description "RFC 5646 obsoletes RFC 4646."@en ; + dcterms:issued "2010-10-11"^^xsd:date . + +skos:narrower rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Narrower concepts are typically rendered as children in a concept hierarchy (tree)."@en ; + rdfs:isDefinedBy ; + rdfs:label "has narrower"@en ; + rdfs:subPropertyOf skos:narrowerTransitive ; + owl:inverseOf skos:broader ; + skos:definition "Relates a concept to a concept that is more specific in meaning."@en ; + skos:scopeNote "By convention, skos:broader is only used to assert an immediate (i.e. direct) hierarchical link between two conceptual resources."@en . + +spdx:ExternalDocumentRef + rdf:type owl:Class ; + rdfs:comment "Information about an external SPDX document reference including the checksum. This allows for verification of the external references."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:Checksum ; + owl:onProperty spdx:checksum ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:SpdxDocument ; + owl:onProperty spdx:spdxDocument ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:externalDocumentId ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +skos:memberList rdf:type owl:ObjectProperty , owl:FunctionalProperty , rdf:Property ; + rdfs:comment "For any resource, every item in the list given as the value of the\n skos:memberList property is also a value of the skos:member property."@en ; + rdfs:domain skos:OrderedCollection ; + rdfs:isDefinedBy ; + rdfs:label "has member list"@en ; + rdfs:range rdf:List ; + skos:definition "Relates an ordered collection to the RDF list containing its members."@en . + +spdx:purpose_other rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package doesn't fit into other purpose defined terms."@en ; + vs:term_status "stable"@en . + +prov:atLocation rdf:type owl:ObjectProperty ; + rdfs:comment "The Location of any resource."@en , "This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile." ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity prov:InstantaneousEvent ) + ] ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity prov:InstantaneousEvent ) + ] ; + rdfs:isDefinedBy ; + rdfs:label "atLocation" ; + rdfs:range prov:Location ; + prov:category "expanded" ; + prov:editorialNote "This property is not functional because the many values could be at a variety of granularies (In this building, in this room, in that chair)."@en , "The naming of prov:atLocation parallels prov:atTime, and is not named prov:hadLocation to avoid conflicting with the convention that prov:had* properties are used on prov:Influence classes."@en ; + prov:inverse "locationOf" ; + prov:sharesDefinitionWith prov:Location . + +foaf:primaryTopic rdf:type owl:ObjectProperty ; + rdfs:comment "This axiom needed so that Protege loads DCAT2 without errors." . + +adms:last rdf:type owl:ObjectProperty ; + rdfs:comment "A link to the current or latest version of the Asset."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "last"@en ; + rdfs:range rdfs:Resource ; + rdfs:subPropertyOf . + +dcterms:replaces rdf:type rdf:Property ; + rdfs:comment "A related resource that is supplanted, displaced, or superseded by the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Replaces"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Replaced By."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:hasSource rdf:type owl:ObjectProperty ; + rdfs:comment "To identify the source of directory information of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has source"@en . + +vcard:Location rdf:type owl:Class ; + rdfs:comment "An object representing a named geographical place"@en ; + rdfs:isDefinedBy ; + rdfs:label "Location"@en ; + rdfs:subClassOf vcard:Kind ; + owl:disjointWith vcard:Organization . + +spdx:ReferenceType rdf:type owl:Class ; + rdfs:comment "Types used to external reference identifiers."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:contextualExample ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:documentation ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:externalReferenceSite ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +spdx:artifactOf rdf:type owl:ObjectProperty ; + rdfs:comment "Deprecated as of version 2.1"@en , "Indicates the project in which the SpdxElement originated. Tools must preserve doap:homepage and doap:name properties and the URI (if one is known) of doap:Project resources that are values of this property. All other properties of doap:Projects are not directly supported by SPDX and may be dropped when translating to or from some SPDX formats."@en ; + rdfs:domain spdx:SpdxElement ; + rdfs:range doap:Project ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +spdx:licenseListVersion + rdf:type owl:DatatypeProperty , owl:FunctionalProperty ; + rdfs:comment "An optional field for creators of the SPDX file to provide the version of the SPDX License List used when the SPDX file was created."@en ; + rdfs:domain spdx:CreationInfo ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:copyrightText rdf:type owl:DatatypeProperty ; + rdfs:comment "The text of copyright declarations recited in the package, file or snippet.\n\nIf the copyrightText field is not present, it implies an equivalent meaning to NOASSERTION."@en ; + rdfs:domain spdx:SpdxItem ; + rdfs:range rdfs:Literal , xsd:string ; + vs:term_status "stable"@en . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:positiveInteger ; + owl:onProperty ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable" . + +spdx:OrLaterOperator rdf:type owl:Class ; + rdfs:comment "A license with an or later operator indicating this license version or any later version of the license"@en ; + rdfs:subClassOf spdx:AnyLicenseInfo ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:SimpleLicensingInfo ; + owl:onProperty spdx:member ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +dcterms:Policy rdf:type rdfs:Class ; + rdfs:comment "A plan or course of action by an authority, intended to influence and determine decisions, actions, and other matters."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Policy"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + + + rdf:type sh:NodeShape ; + sh:name "Relationship"@en ; + sh:property [ sh:minCount 1 ; + sh:path dcat:hadRole ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:path dcterms:relation ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Relationship . + +dcat:servesDataset rdf:type owl:ObjectProperty ; + rdfs:comment "Una colección de datos que este Servicio de Datos puede distribuir."@es , "En samling af data som denne datatjeneste kan distribuere."@da , "Una raccolta di dati che questo DataService può distribuire."@it , "Kolekce dat, kterou je tato Datová služba schopna poskytnout."@cs , "A collection of data that this DataService can distribute."@en ; + rdfs:domain dcat:DataService ; + rdfs:label "serve set di dati"@it , "datatjeneste for datasæt"@da , "poskytuje datovou sadu"@cs , "serves dataset"@en , "provee conjunto de datos"@es ; + rdfs:range dcat:Dataset ; + skos:altLabel "udstiller"@da , "ekspederer"@da , "distribuerer"@da ; + skos:changeNote "New property in DCAT 2.0."@en , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "Nuova proprietà in DCAT 2.0."@it , "Nueva propiedad agregada en DCAT 2.0."@es ; + skos:definition "En samling af data som denne datatjeneste kan distribuere."@da , "Una raccolta di dati che questo DataService può distribuire."@it , "A collection of data that this DataService can distribute."@en , "Una colección de datos que este Servicio de Datos puede distribuir."@es , "Kolekce dat, kterou je tato Datová služba schopna poskytnout."@cs . + +dcterms:Standard rdf:type rdfs:Class ; + rdfs:comment "A reference point against which other things can be evaluated or compared."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Standard"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +time:Tuesday rdf:type time:DayOfWeek ; + rdfs:label "Tuesday"@en ; + skos:prefLabel "Dienstag"@de , "Terça-feira"@pt , "الثلاثاء"@ar , "Mardi"@fr , "Вторник"@ru , "Dinsdag"@nl , "火曜日"@ja , "Wtorek"@pl , "Tuesday"@en , "Martes"@es , "星期二"@zh , "Martedì"@it . + +spdx:relationshipType_optionalDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is an optional dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +time:hasDuration rdf:type owl:ObjectProperty ; + rdfs:comment "Duration of a temporal entity, expressed as a scaled value or nominal value"@en , "Duración de una entidad temporal, expresada como un valor escalado o un valor nominal."@es ; + rdfs:label "has duration"@en , "tiene duración"@es ; + rdfs:range time:Duration ; + rdfs:subPropertyOf time:hasTemporalDuration ; + skos:definition "Duration of a temporal entity, event or activity, or thing, expressed as a scaled value"@en , "Duración de una entidad temporal, evento o actividad, o cosa, expresada como un valor escalado."@es . + +dcterms:PeriodOfTime rdf:type rdfs:Class ; + rdfs:comment "An interval of time that is named or defined by its start and end dates."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Period of Time"@en ; + rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:WithExceptionOperator + rdf:type owl:Class ; + rdfs:comment "Sometimes a set of license terms apply except under special circumstances. In this case, use the binary \"WITH\" operator to construct a new license expression to represent the special exception situation. A valid is where the left operand is a value and the right operand is a that represents the special exception terms."@en ; + rdfs:subClassOf spdx:AnyLicenseInfo ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:LicenseException ; + owl:onProperty spdx:licenseException ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:SimpleLicensingInfo ; + owl:onProperty spdx:member ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +vcard:hasRegion rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the region data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has region"@en . + +[ rdf:type owl:AllDisjointClasses ; + owl:members ( spdx:Annotation spdx:Relationship spdx:SpdxElement ) +] . + +spdx:externalRef rdf:type owl:ObjectProperty ; + rdfs:comment "An External Reference allows a Package to reference an external source of additional information, metadata, enumerations, asset identifiers, or downloadable content believed to be relevant to the Package."@en ; + rdfs:domain spdx:Package ; + rdfs:range spdx:ExternalRef ; + vs:term_status "stable"@en . + +vcard:hasGeo rdf:type owl:ObjectProperty ; + rdfs:comment "To specify information related to the global positioning of the object. May also be used as a property parameter."@en ; + rdfs:isDefinedBy ; + rdfs:label "has geo"@en . + +vcard:Postal rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Postal"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +time:GeneralDurationDescription + rdf:type owl:Class ; + rdfs:comment "Descripción de extensión temporal estructurada con valores separados para los distintos elementos de un sistema de horario-calendario."@es , "Description of temporal extent structured with separate values for the various elements of a calendar-clock system."@en ; + rdfs:label "descripción de duración generalizada"@es , "Generalized duration description"@en ; + rdfs:subClassOf time:TemporalDuration ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:minutes + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:weeks + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:hasTRS + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:hours + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:years + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:days + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:seconds + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:months + ] ; + skos:definition "Description of temporal extent structured with separate values for the various elements of a calendar-clock system."@en , "Descripción de extensión temporal estructurada con valores separados para los distintos elementos de un sistema de horario-calendario."@es ; + skos:note "La extensión de una duración de tiempo expresada como una 'descripción de duración general' depende del Sistema de Referencia Temporal. En algunos calendarios la longitud de la semana o del mes no es constante a lo largo del año. Por tanto, un valor como \"25 meses\" puede no ser necesariamente ser comparado con un duración similar expresada en términos de semanas o días. Cuando se consideran calendarios que no están basados en el movimiento de la Tierra, se deben tomar incluso más precauciones en la comparación de duraciones."@es , "The extent of a time duration expressed as a GeneralDurationDescription depends on the Temporal Reference System. In some calendars the length of the week or month is not constant within the year. Therefore, a value like \"2.5 months\" may not necessarily be exactly compared with a similar duration expressed in terms of weeks or days. When non-earth-based calendars are considered even more care must be taken in comparing durations."@en . + +prov:Activity rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Activity" ; + owl:disjointWith prov:Entity ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "An activity is something that occurs over a period of time and acts upon or with entities; it may include consuming, processing, transforming, modifying, relocating, using, or generating entities." ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Activity"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Activity"^^xsd:anyURI . + +time:Interval rdf:type owl:Class ; + rdfs:comment "A temporal entity with an extent or duration"@en , "Una entidad temporal con una extensión o duración."@es ; + rdfs:label "Time interval"@en , "intervalo de tiempo"@es ; + rdfs:subClassOf time:TemporalEntity ; + skos:definition "A temporal entity with an extent or duration"@en , "Una entidad temporal con una extensión o duración."@es . + +vcard:Voice rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Voice"@en ; + rdfs:subClassOf vcard:TelephoneType . + +vcard:hasFN rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the formatted name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has formatted name"@en . + +vcard:street-address rdf:type owl:DatatypeProperty ; + rdfs:comment "The street address associated with the address of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "street address"@en ; + rdfs:range xsd:string . + +dcterms:LCSH rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of labeled concepts specified by the Library of Congress Subject Headings."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "LCSH"@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +time:hasDurationDescription + rdf:type owl:ObjectProperty ; + rdfs:comment "Duration of a temporal entity, expressed using a structured description"@en , "Duración de una entidad temporal, expresada utilizando una descripción estructurada."@es ; + rdfs:label "has duration description"@en , "tiene descripción de duración"@es ; + rdfs:range time:GeneralDurationDescription ; + rdfs:subPropertyOf time:hasTemporalDuration ; + skos:definition "Duration of a temporal entity, expressed using a structured description"@en , "Duración de una entidad temporal, expresada utilizando una descripción estructurada."@es . + +vcard:role rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the function or part played in a particular situation by the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "role"@en ; + rdfs:range xsd:string . + +time:generalDay rdf:type rdfs:Datatype ; + rdfs:comment "Day of month - formulated as a text string with a pattern constraint to reproduce the same lexical form as gDay, except that values up to 99 are permitted, in order to support calendars with more than 31 days in a month. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Día del mes - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gDay, excepto que se permiten valores hasta el 99, con el propósito de proporcionar soporte a calendarios con meses con más de 31 días.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es ; + rdfs:label "Generalized day"@en , "Día generalizado"@es ; + owl:onDatatype xsd:string ; + owl:withRestrictions ( [ xsd:pattern "---(0[1-9]|[1-9][0-9])(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?" ] + ) ; + skos:definition "Day of month - formulated as a text string with a pattern constraint to reproduce the same lexical form as gDay, except that values up to 99 are permitted, in order to support calendars with more than 31 days in a month. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Día del mes - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gDay, excepto que se permiten valores hasta el 99, con el propósito de proporcionar soporte a calendarios con meses con más de 31 días.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es . + +spdx:isDeprecatedLicenseId + rdf:type owl:DatatypeProperty ; + rdfs:domain spdx:ListedLicense ; + rdfs:range xsd:boolean . + +spdx:checksumAlgorithm_md4 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was MD4" ; + vs:term_status "stable" . + +spdx:relationshipType_other + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "to be used for a relationship which has not been defined in the formal SPDX specification. A description of the relationship should be included in the Relationship comments field."@en ; + vs:term_status "stable"@en . + +vcard:Me rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Me"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:licenseExceptionId + rdf:type owl:DatatypeProperty ; + rdfs:comment "Short form license exception identifier in Appendix I.2 of the SPDX specification."@en ; + rdfs:domain spdx:LicenseException ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +dcterms:bibliographicCitation + rdf:type rdf:Property ; + rdfs:comment "A bibliographic reference for the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Bibliographic Citation"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:identifier , dcterms:identifier ; + dcterms:description "Recommended practice is to include sufficient bibliographic detail to identify the resource as unambiguously as possible."@en ; + dcterms:issued "2003-02-15"^^xsd:date . + +prov:generated rdf:type owl:ObjectProperty ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "generated" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:influenced ; + owl:inverseOf prov:wasGeneratedBy ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:editorialNote "prov:generated is one of few inverse property defined, to allow Activity-oriented assertions in addition to Entity-oriented assertions."@en ; + prov:inverse "wasGeneratedBy" ; + prov:sharesDefinitionWith prov:Generation . + +vcard:bday rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the birth date of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "birth date"@en ; + rdfs:range [ rdf:type rdfs:Datatype ; + owl:unionOf ( xsd:dateTime xsd:dateTimeStamp xsd:gYear ) + ] . + +dcterms:language rdf:type rdf:Property ; + rdfs:comment "A language of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Language"@en ; + rdfs:subPropertyOf dc:language ; + dcam:rangeIncludes dcterms:LinguisticSystem ; + dcterms:description "Recommended practice is to use either a non-literal value representing a language from a controlled vocabulary such as ISO 639-2 or ISO 639-3, or a literal value consisting of an IETF Best Current Practice 47 [[IETF-BCP47](https://tools.ietf.org/html/bcp47)] language tag."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +prov:hadGeneration rdf:type owl:ObjectProperty ; + rdfs:comment "The _optional_ Generation involved in an Entity's Derivation."@en ; + rdfs:domain prov:Derivation ; + rdfs:isDefinedBy ; + rdfs:label "hadGeneration" ; + rdfs:range prov:Generation ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "generatedAsDerivation" ; + prov:sharesDefinitionWith prov:Generation . + +spdx:relationshipType_copyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_copyOf expresses that the SPDXElement is an exact copy of the relatedSDPXElement. For example, a downstream distribution of a binary library which was copied from the upstream package."@en ; + vs:term_status "stable"@en . + +time:intervalIn rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 es un intervalo interior a otro intervalo propio T2, entonces el principio de T1 está después del principio de T2 o coincide con el principio de T2, y el final de T1 está antes que el final de T2, o coincide con el final de T2, excepto que el final de T1 puede no coincidir con el final de T2 si el principio de T1 coincide con el principio de T2."@es , "If a proper interval T1 is intervalIn another proper interval T2, then the beginning of T1 is after the beginning of T2 or is coincident with the beginning of T2, and the end of T1 is before the end of T2, or is coincident with the end of T2, except that end of T1 may not be coincident with the end of T2 if the beginning of T1 is coincident with the beginning of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval in"@en , "intervalo interior"@es ; + rdfs:range time:ProperInterval ; + owl:propertyDisjointWith time:intervalEquals ; + skos:definition "If a proper interval T1 is intervalIn another proper interval T2, then the beginning of T1 is after the beginning of T2 or is coincident with the beginning of T2, and the end of T1 is before the end of T2, or is coincident with the end of T2, except that end of T1 may not be coincident with the end of T2 if the beginning of T1 is coincident with the beginning of T2."@en , "Si un intervalo propio T1 es un intervalo interior a otro intervalo propio T2, entonces el principio de T1 está después del principio de T2 o coincide con el principio de T2, y el final de T1 está antes que el final de T2, o coincide con el final de T2, excepto que el final de T1 puede no coincidir con el final de T2 si el principio de T1 coincide con el principio de T2."@es ; + skos:note "This interval relation is not included in the 13 basic relationships defined in Allen (1984), but is referred to as 'an important relationship' in Allen and Ferguson (1997). It is the disjoint union of :intervalStarts v :intervalDuring v :intervalFinishes . However, that is outside OWL2 expressivity, so is implemented as an explicit property, with :intervalStarts , :intervalDuring , :intervalFinishes as sub-properties"@en , "Esta relación entre intervalos no estaba incluida en las 13 relaciones básicas definidas por Allen (1984), pero se hace referencia a ella como \"una relación importante\" en Allen y Ferguson (1997). Es la unión disjunta de 'intervalo empieza', 'intervalo durante' y con 'intervalo termina'. Sin embargo, esto está fuera de la expresividad de OWL2, por tanto, se implementa como una propiedad explícita, con 'intervalo empieza', 'intervalo durante' e 'intervalo termina' como sub-propiedades."@es . + +adms:Asset rdf:type owl:Class ; + rdfs:comment "An abstract entity that reflects the intellectual content of the asset and represents those characteristics of the asset that are independent of its physical embodiment. This abstract entity combines the FRBR entities work (a distinct intellectual or artistic creation) and expression (the intellectual or artistic realization of a work)"@en ; + rdfs:isDefinedBy ; + rdfs:label "Asset"@en . + +vcard:Crush rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Crush"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:checksumAlgorithm_adler32 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was ADLER32."@en ; + vs:term_status "stable"@en . + +vcard:hasSound rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a digital sound content information that annotates some aspect of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has sound"@en ; + owl:equivalentProperty vcard:sound . + +spdx:primaryPackagePurpose + rdf:type owl:ObjectProperty ; + rdfs:comment "This field provides information about the primary purpose of the identified package. Package Purpose is intrinsic to how the package is being used rather than the content of the package."@en ; + rdfs:domain spdx:Package ; + rdfs:range spdx:Purpose ; + vs:term_status "stable"@en . + +owl:Thing rdf:type owl:Class . + + + rdf:type sh:NodeShape ; + sh:name "Category"@en ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path skos:prefLabel ; + sh:severity sh:Violation + ] ; + sh:targetClass skos:Concept . + +spdx:relationshipType_descendantOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_descendantOf expresses that an SPDXElement is a descendant of (same lineage but post-dates) the relatedSPDXElement. For example, an downstream File that was modified is a descendant of an upstream File"@en ; + vs:term_status "stable"@en . + +dcterms:license rdf:type rdf:Property ; + rdfs:comment "A legal document giving official permission to do something with the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "License"@en ; + rdfs:subPropertyOf dc:rights , dcterms:rights ; + dcam:rangeIncludes dcterms:LicenseDocument ; + dcterms:description "Recommended practice is to identify the license document with a URI. If this is not possible or feasible, a literal value that identifies the license may be provided."@en ; + dcterms:issued "2004-06-14"^^xsd:date . + +spdx:File rdf:type owl:Class ; + rdfs:comment "A File represents a named sequence of information that is contained in a software package."@en ; + rdfs:subClassOf spdx:SpdxItem ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Checksum ; + owl:onProperty spdx:checksum + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass doap:Project ; + owl:onProperty spdx:artifactOf + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:fileContributor + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:noticeText + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:fileName ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:File ; + owl:onProperty spdx:fileDependency + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:licenseInfoInFile + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:FileType ; + owl:onProperty spdx:fileType + ] ; + owl:disjointWith spdx:Snippet ; + vs:term_status "stable"@en . + +dcatap:shacl_shapes cc:attributionURL ; + dcatap:availability dcatap:stable ; + dcterms:conformsTo ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Eugeniu Costetchi" + ] ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Natasa Sofou" + ] ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Makx Dekkers" + ] ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Vassilios Peristeras" + ] ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Nikolaos Loutas" + ] ; + dcterms:creator [ rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Bert Van Nuffelen" + ] ; + dcterms:description "This document specifies the constraints on properties and classes expressed by DCAT-AP in SHACL."@en ; + dcterms:format ; + dcterms:license ; + dcterms:modified "2021-12-01"^^xsd:date ; + dcterms:publisher ; + dcterms:relation ; + dcterms:title "The constraints of DCAT Application Profile for Data Portals in Europe"@en ; + owl:versionInfo "2.1.1" ; + dcat:accessURL ; + dcat:downloadURL ; + foaf:homepage ; + foaf:maker [ foaf:mbox ; + foaf:name "DCAT-AP Working Group" ; + foaf:page , + ] . + +prov:sharesDefinitionWith + rdf:type owl:AnnotationProperty ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + + + rdf:type owl:Ontology ; + rdfs:label "OWL-Time"@en , "Tiempo en OWL"@es ; + rdfs:seeAlso , , ; + dcterms:contributor , ; + dcterms:created "2006-09-27"^^xsd:date ; + dcterms:creator , , ; + dcterms:isVersionOf ; + dcterms:license ; + dcterms:modified "2017-04-06"^^xsd:date ; + dcterms:rights "Copyright © 2006-2017 W3C, OGC. W3C and OGC liability, trademark and document use rules apply."@en ; + owl:priorVersion time:2006 ; + owl:versionIRI time:2016 ; + skos:changeNote "2016-06-15 - initial update of OWL-Time - modified to support arbitrary temporal reference systems. " , "2016-12-20 - adjust range of time:timeZone to time:TimeZone, moved up from the tzont ontology. " , "2017-02 - intervalIn, intervalDisjoint, monthOfYear added; TemporalUnit subclass of TemporalDuration" , "2016-12-20 - restore time:Year and time:January which were present in the 2006 version of the ontology, but now marked \"deprecated\". " , "2017-04-06 - hasTime, hasXSDDuration added; Number removed; all duration elements changed to xsd:decimal" ; + skos:historyNote "Update of OWL-Time ontology, extended to support general temporal reference systems. \n\nOntology engineering by Simon J D Cox"@en . + +spdx:relationshipType_describedBy + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used an SPDXRef-A is described by SPDXRef-Document."@en ; + vs:term_status "stable"@en . + +spdx:date rdf:type owl:DatatypeProperty ; + rdfs:comment "A date-time stamp."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( spdx:Annotation spdx:CreationInfo ) + ] ; + rdfs:range xsd:dateTime ; + vs:term_status "stable"@en . + +time:unitMinute rdf:type time:TemporalUnit ; + rdfs:label "Minute (unit of temporal duration)"@en ; + skos:prefLabel "دقيقة واحدة"@ar , "minuut"@nl , "одна минута"@ru , "Minute"@de , "minuto"@es , "minuto"@it , "minuto"@pt , "분"@kr , "等一下"@zh , "一分"@jp , "minute"@en , "minute"@fr , "minuta"@pl ; + time:days "0"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "1"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +dcat:service rdf:type owl:ObjectProperty ; + rdfs:comment "Umístění či přístupový bod registrovaný v katalogu."@cs , "A site or endpoint that is listed in the catalog."@en , "Un sito o endpoint elencato nel catalogo."@it , "Et websted eller et endpoint som er opført i kataloget."@da , "Un sitio o 'endpoint' que está listado en el catálogo."@es ; + rdfs:domain dcat:Catalog ; + rdfs:label "service"@en , "datatjeneste"@da , "servicio"@es , "servizio"@it , "služba"@cs ; + rdfs:range dcat:DataService ; + rdfs:subPropertyOf dcterms:hasPart , rdfs:member ; + skos:altLabel "har datatjeneste"@da ; + skos:changeNote "Nueva propiedad añadida en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it , "New property added in DCAT 2.0."@en , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs ; + skos:definition "Un sitio o 'endpoint' que está listado en el catálogo."@es , "Umístění či přístupový bod registrovaný v katalogu."@cs , "Et websted eller et endpoint som er opført i kataloget."@da , "A site or endpoint that is listed in the catalog."@en , "Un sito o endpoint elencato nel catalogo."@it . + +spdx:fileType_spdx rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file is an SPDX document."@en ; + vs:term_status "stable"@en . + +vcard:Kind rdf:type owl:Class ; + rdfs:comment "The parent class for all objects"@en ; + rdfs:isDefinedBy ; + rdfs:label "Kind"@en ; + owl:equivalentClass vcard:VCard ; + owl:equivalentClass [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty vcard:fn + ] . + +prov:used rdf:type owl:ObjectProperty ; + rdfs:comment "A prov:Entity that was used by this prov:Activity. For example, :baking prov:used :spoon, :egg, :oven ."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "used" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedUsage prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedUsage prov:entity ) ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:inverse "wasUsedBy" ; + prov:qualifiedForm prov:Usage , prov:qualifiedUsage . + +spdx:hasFile rdf:type owl:ObjectProperty ; + rdfs:comment "Indicates that a particular file belongs to a package."@en ; + rdfs:domain spdx:Package ; + rdfs:range spdx:File ; + vs:term_status "stable"@en . + +vcard:Modem rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Modem"@en ; + rdfs:subClassOf vcard:TelephoneType ; + owl:deprecated true . + +dcat:accessService rdf:type owl:ObjectProperty ; + rdfs:comment "A site or end-point that gives access to the distribution of the dataset."@en , "Un sito o end-point che dà accesso alla distribuzione del set di dati."@it , "Un sitio o end-point que da acceso a la distribución de un conjunto de datos."@es , "Umístění či přístupový bod zpřístupňující distribuci datové sady."@cs , "Et websted eller endpoint der giver adgang til en repræsentation af datasættet."@da ; + rdfs:label "služba pro přístup k datům"@cs , "data access service"@en , "servicio de acceso de datos"@es , "dataadgangstjeneste"@da , "servizio di accesso ai dati"@it ; + rdfs:range dcat:DataService ; + skos:changeNote "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property added in DCAT 2.0."@en , "Ny egenskab tilføjet i DCAT 2.0."@da , "Nueva propiedad agregada en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it ; + skos:definition "Umístění či přístupový bod zpřístupňující distribuci datové sady."@cs , "Un sito o end-point che dà accesso alla distribuzione del set di dati."@it , "Un sitio o end-point que da acceso a la distribución de un conjunto de datos."@es , "Et websted eller endpoint der giver adgang til en repræsentation af datasættet."@da , "A site or end-point that gives access to the distribution of the dataset."@en . + +time:Wednesday rdf:type time:DayOfWeek ; + rdfs:label "Wednesday"@en ; + skos:prefLabel "Mercoledì"@it , "Среда"@ru , "Woensdag"@nl , "Mercredi"@fr , "水曜日"@ja , "Quarta-feira"@pt , "Środa"@pl , "星期三"@zh , "الأربعاء"@ar , "Mittwoch"@de , "Miércoles"@es , "Wednesday"@en . + +time:intervalFinishes + rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 termina otro intervalo propio T2, entonces del principio de T1 está después del principio de T2, y el final de T1 coincide con el final de T2."@es , "If a proper interval T1 is intervalFinishes another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is coincident with the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo termina"@es , "interval finishes"@en ; + rdfs:range time:ProperInterval ; + rdfs:subPropertyOf time:intervalIn ; + owl:inverseOf time:intervalFinishedBy ; + skos:definition "If a proper interval T1 is intervalFinishes another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is coincident with the end of T2."@en , "Si un intervalo propio T1 termina otro intervalo propio T2, entonces del principio de T1 está después del principio de T2, y el final de T1 coincide con el final de T2."@es . + +skos:Concept rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Concept"@en ; + skos:definition "An idea or notion; a unit of thought."@en . + +time:unitDay rdf:type time:TemporalUnit ; + rdfs:label "Day (unit of temporal duration)"@en ; + skos:prefLabel "Tag"@de , "day"@en , "dag"@nl , "dia"@pt , "día"@es , "doba"@pl , "ある日"@jp , "يوماً ما"@ar , "giorno"@it , "언젠가"@kr , "jour"@fr , "一天"@zh ; + time:days "1"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +spdx:relationshipType_optionalComponentOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A is an optional component of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_expandedFromArchive + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_expandedFromArchive expresses that the SPDXElement is a file which was epanded from a relatedSPDXElement file. For example, if there is an archive file xyz.tar.gz containing a file foo.c the archive file was expanded in a directory arch/xyz, the file arch/xyz/foo.c would have a relationshipType_expandedFromArchive with the file xyz.tar.gz."@en ; + vs:term_status "stable"@en . + + + dcterms:title "Process and Methodology for Developing Core Vocabularies" . + +dcterms:RFC4646 rdf:type rdfs:Datatype ; + rdfs:comment "The set of tags constructed according to RFC 4646 for the identification of languages."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "RFC 4646"@en ; + rdfs:seeAlso ; + dcterms:description "RFC 4646 obsoletes RFC 3066."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +adms:AssetDistribution + rdf:type owl:Class ; + rdfs:comment "A particular physical embodiment of an Asset, which is an example of the FRBR entity manifestation (the physical embodiment of an expression of a work)."@en ; + rdfs:isDefinedBy ; + rdfs:label "Asset Distribution"@en . + +dcterms:isReferencedBy + rdf:type rdf:Property ; + rdfs:comment "A related resource that references, cites, or otherwise points to the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Referenced By"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of References."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:fileType_video rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file is associated with a video file type (MIME type of video/*)."@en ; + vs:term_status "stable"@en . + +dcterms:extent rdf:type rdf:Property ; + rdfs:comment "The size or duration of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Extent"@en ; + rdfs:subPropertyOf dc:format , dcterms:format ; + dcam:rangeIncludes dcterms:SizeOrDuration ; + dcterms:description "Recommended practice is to specify the file size in megabytes and duration in ISO 8601 format."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +locn:locatorDesignator + rdf:type rdf:Property ; + rdfs:comment "A number or a sequence of characters that uniquely identifies the locator within the relevant scope(s). The full identification of the locator could include one or more locator designators.\n "@en ; + rdfs:isDefinedBy ; + rdfs:label "locator designator"@en ; + dcterms:identifier "locn:locatorDesignator" ; + vs:term_status "testing"@en . + +vcard:sound rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "sound"@en ; + owl:equivalentProperty vcard:hasSound . + +time:nominalPosition rdf:type owl:DatatypeProperty ; + rdfs:comment "The (nominal) value indicating temporal position in an ordinal reference system "@en , "El valor (nominal) que indica posición temporal en un sistema de referencia ordinal."@es ; + rdfs:domain time:TimePosition ; + rdfs:label "Name of temporal position"@en , "nombre de posición temporal"@es ; + rdfs:range xsd:string ; + skos:definition "The (nominal) value indicating temporal position in an ordinal reference system "@en , "El valor (nominal) que indica posición temporal en un sistema de referencia ordinal."@es . + +spdx:PackageVerificationCode + rdf:type owl:Class ; + rdfs:comment "A manifest based verification code (the algorithm is defined in section 4.7 of the full specification) of the SPDX Item. This allows consumers of this data and/or database to determine if an SPDX item they have in hand is identical to the SPDX item from which the data was produced. This algorithm works even if the SPDX document is included in the SPDX item."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:packageVerificationCodeExcludedFile + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:hexBinary ; + owl:onProperty spdx:packageVerificationCodeValue ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +dcterms:hasFormat rdf:type rdf:Property ; + rdfs:comment "A related resource that is substantially the same as the pre-existing described resource, but in another format."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Has Format"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Format Of."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +time:DayOfWeek rdf:type owl:Class ; + rdfs:comment "The day of week"@en , "El día de la semana"@es ; + rdfs:label "día de la semana"@es , "Day of week"@en ; + rdfs:subClassOf owl:Thing ; + skos:changeNote "Remove enumeration from definition, in order to allow other days to be used when required in other calendars. \nNOTE: existing days are still present as members of the class, but the class membership is now open. \n\nIn the original OWL-Time the following constraint appeared: \n owl:oneOf (\n time:Monday\n time:Tuesday\n time:Wednesday\n time:Thursday\n time:Friday\n time:Saturday\n time:Sunday\n ) ;"@en ; + skos:definition "The day of week"@en , "El día de la semana"@es ; + skos:note "Membership of the class :DayOfWeek is open, to allow for alternative week lengths and different day names."@en , "La pertenencia a la clase 'día de la semana' está abierta, para permitir longitudes de semana alternativas y diferentes nombres de días."@es . + +vcard:hasMember rdf:type owl:ObjectProperty ; + rdfs:comment "To include a member in the group this object represents. (This property can only be used by Group individuals)"@en ; + rdfs:domain vcard:Group ; + rdfs:isDefinedBy ; + rdfs:label "has member"@en ; + rdfs:range vcard:Kind . + +time:generalMonth rdf:type rdfs:Datatype ; + rdfs:comment "Month of year - formulated as a text string with a pattern constraint to reproduce the same lexical form as gMonth, except that values up to 20 are permitted, in order to support calendars with more than 12 months in the year. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Mes del año - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gMonth, excepto que se permiten valores hasta el 20, con el propósito de proporcionar soporte a calendarios con años con más de 12 meses.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es ; + rdfs:label "Generalized month"@en , "Mes generalizado"@es ; + owl:onDatatype xsd:string ; + owl:withRestrictions ( [ xsd:pattern "--(0[1-9]|1[0-9]|20)(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?" ] + ) ; + skos:definition "Month of year - formulated as a text string with a pattern constraint to reproduce the same lexical form as gMonth, except that values up to 20 are permitted, in order to support calendars with more than 12 months in the year. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Mes del año - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gMonth, excepto que se permiten valores hasta el 20, con el propósito de proporcionar soporte a calendarios con años con más de 12 meses.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es . + +spdx:checksumAlgorithm_blake2b256 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was BLAKE2b-256."@en ; + vs:term_status "stable"@en . + +prov:value rdf:type owl:DatatypeProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "value" ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:definition "Provides a value that is a direct representation of an entity."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-attribute-value"^^xsd:anyURI ; + prov:editorialNote "This property serves the same purpose as rdf:value, but has been reintroduced to avoid some of the definitional ambiguity in the RDF specification (specifically, 'may be used in describing structured values')."@en , "The editor's definition comes from http://www.w3.org/TR/rdf-primer/#rdfvalue" . + +spdx:fileType_documentation + rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file serves as documentation."@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_patchFor + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_patchFor expresses that the SPDXElement is a 'patchfile' that is designed to patch (apply modifications to) the relatedSPDXElement. For example, relationship from a .diff File to a Package it is designed to patch. "@en ; + vs:term_status "stable"@en . + +spdx:noticeText rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for the SPDX file creator to record potential legal notices found in the file. This may or may not include copyright statements."@en ; + rdfs:domain spdx:File ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +time:ProperInterval rdf:type owl:Class ; + rdfs:comment "A temporal entity with non-zero extent or duration, i.e. for which the value of the beginning and end are different"@en , "Una entidad temporal con extensión o duración distinta de cero, es decir, para la cual los valores de principio y fin del intervalo son diferentes."@es ; + rdfs:label "Proper interval"@en , "intervalo propio"@es ; + rdfs:subClassOf time:Interval ; + owl:disjointWith time:Instant ; + skos:definition "A temporal entity with non-zero extent or duration, i.e. for which the value of the beginning and end are different"@en , "Una entidad temporal con extensión o duración distinta de cero, es decir, para la cual los valores de principio y fin del intervalo son diferentes."@es . + +rdfs:Container rdf:type owl:Class . + +spdx:creationInfo rdf:type owl:ObjectProperty ; + rdfs:comment "The creationInfo property relates an SpdxDocument to a set of information about the creation of the SpdxDocument."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:CreationInfo ; + vs:term_status "stable" . + +time:intervalEquals rdf:type owl:ObjectProperty ; + rdfs:comment "If a proper interval T1 is intervalEquals another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is coincident with the end of T2."@en , "Si un intervalo propio T1 es igual a otro intervalo propio T2, entonces el principio de T1 coincide con el principio de T2, y el final de T1 coincide con el final de T2."@es ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo igual"@es , "interval equals"@en ; + rdfs:range time:ProperInterval ; + owl:propertyDisjointWith time:intervalIn ; + skos:definition "If a proper interval T1 is intervalEquals another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is coincident with the end of T2."@en , "Si un intervalo propio T1 es igual a otro intervalo propio T2, entonces el principio de T1 coincide con el principio de T2, y el final de T1 coincide con el final de T2."@es . + +vcard:adr rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "address"@en ; + owl:equivalentProperty vcard:hasAddress . + +dcterms:Agent rdf:type dcterms:AgentClass , rdfs:Class ; + rdfs:comment "A resource that acts or has the power to act."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Agent"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:RightsStatement + rdf:type rdfs:Class ; + rdfs:comment "A statement about the intellectual property rights (IPR) held in or over a resource, a legal document giving official permission to do something with a resource, or a statement about access rights."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Rights Statement"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:reviewDate rdf:type owl:DatatypeProperty ; + rdfs:comment "Deprecated in favor of Annotation with an annotationType_review."@en , "The date and time at which the SpdxDocument was reviewed. This value must be in UTC and have 'Z' as its timezone indicator."@en ; + rdfs:domain spdx:Review ; + rdfs:range xsd:dateTime ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +[ rdf:type owl:Axiom ; + owl:annotatedProperty rdfs:domain ; + owl:annotatedSource prov:wasInfluencedBy ; + owl:annotatedTarget [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + prov:definition "influencee: an identifier (o2) for an entity, activity, or agent; " ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence" +] . + +prov:wasRevisionOf rdf:type owl:ObjectProperty , owl:AnnotationProperty ; + rdfs:comment "A revision is a derivation that revises an entity into a revised version."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasRevisionOf" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasDerivedFrom ; + owl:propertyChainAxiom ( prov:qualifiedRevision prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedRevision prov:entity ) ; + prov:category "expanded" ; + prov:component "derivations" ; + prov:inverse "hadRevision" ; + prov:qualifiedForm prov:Revision , prov:qualifiedRevision . + +time:January rdf:type owl:Class , owl:DeprecatedClass ; + rdfs:label "January" ; + rdfs:subClassOf time:DateTimeDescription ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue time:unitMonth ; + owl:onProperty time:unitType + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue "--01" ; + owl:onProperty time:month + ] ; + owl:deprecated true ; + skos:historyNote "This class was present in the 2006 version of OWL-Time. It was presented as an example of how DateTimeDescription could be specialized, but does not belong in the revised ontology. " . + +skos:note rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "note"@en ; + skos:definition "A general note, for any purpose."@en ; + skos:scopeNote "This property may be used directly, or as a super-property for more specific note types."@en . + +vcard:Fax rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Fax"@en ; + rdfs:subClassOf vcard:TelephoneType . + +prov:Person rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Person" ; + rdfs:subClassOf prov:Agent ; + prov:category "expanded" ; + prov:component "agents-responsibility" ; + prov:definition "Person agents are people."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-agent"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-types"^^xsd:anyURI . + +vcard:hasInstantMessage + rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the instant messaging and presence protocol communications with the object. (Was called IMPP in RFC6350)"@en ; + rdfs:isDefinedBy ; + rdfs:label "has messaging"@en . + +vcard:hasNote rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the note data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has note"@en . + +dcat:spatialResolutionInMeters + rdf:type owl:DatatypeProperty ; + rdfs:comment "minimum spatial separation resolvable in a dataset, measured in metres."@en-GB , "minimum spatial separation resolvable in a dataset, measured in meters."@en-US , "mínima separacíon espacial disponible en un conjunto de datos, medida en metros."@es , "separazione spaziale minima risolvibile in un set di dati, misurata in metri."@it , "mindste geografiske afstand som kan erkendes i et datasæt, målt i meter."@da , "minimální prostorový rozestup rozeznatelný v datové sadě, měřeno v metrech."@cs ; + rdfs:label "spatial resolution (metres)"@en-GB , "resolución espacial (metros)"@es , "spatial resolution (meters)"@en-US , "prostorové rozlišení (metry)"@cs , "risoluzione spaziale (metros)"@it , "geografisk opløsning (meter)"@da ; + rdfs:range xsd:decimal ; + skos:changeNote "Nueva propiedad añadida en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it , "New property added in DCAT 2.0."@en , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "Ny genskab tilføjet i DCAT 2.0."@da ; + skos:definition "minimum spatial separation resolvable in a dataset, measured in meters."@en-US , "separazione spaziale minima risolvibile in un set di dati, misurata in metri."@it , "minimální prostorový rozestup rozeznatelný v datové sadě, měřeno v metrech."@cs , "minimum spatial separation resolvable in a dataset, measured in metres."@en-GB , "mínima separacíon espacial disponible en un conjunto de datos, medida en metros."@es , "mindste geografiske afstand som kan resolveres i et datasæt, målt i meter."@da ; + skos:editorialNote "Může se vyskytnout v popisu Datové sady nebo Distribuce, takže nebyl specifikován definiční obor."@cs , "Might appear in the description of a Dataset or a Distribution, so no domain is specified."@en , "Kan optræde i forbindelse med beskrivelse af datasættet eller datasætditributionen, så der er ikke angivet et domæne for egenskaben."@da ; + skos:scopeNote "Pokud je datová sada obraz či mřížka, měla by tato vlastnost odpovídat rozestupu položek. Pro ostatní druhy prostorových datových sad bude tato vlastnost obvykle indikovat nejmenší vzdálenost mezi položkami této datové sady."@cs , "Hvis datasættet udgøres af et billede eller et grid, så bør dette svare til afstanden mellem elementerne. For andre typer af spatiale datasæt, vil denne egenskab typisk indikere den mindste afstand mellem elementerne i datasættet."@da , "Různá prostorová rozlišení mohou být poskytována jako různé distribuce datové sady."@cs , "Distintas distribuciones de un conjunto de datos pueden tener resoluciones espaciales diferentes."@es , "Se il set di dati è un'immagine o una griglia, questo dovrebbe corrispondere alla spaziatura degli elementi. Per altri tipi di set di dati spaziali, questa proprietà di solito indica la distanza minima tra gli elementi nel set di dati."@it , "Alternative geografiske opløsninger kan leveres som forskellige datasætdistributioner."@da , "Alternative spatial resolutions might be provided as different dataset distributions."@en , "Risoluzioni spaziali alternative possono essere fornite come diverse distribuzioni di set di dati."@it , "Si el conjunto de datos es una imágen o grilla, esta propiedad corresponde al espaciado de los elementos. Para otro tipo de conjunto de datos espaciales, esta propieda usualmente indica la menor distancia entre los elementos de dichos datos."@es , "If the dataset is an image or grid this should correspond to the spacing of items. For other kinds of spatial dataset, this property will usually indicate the smallest distance between items in the dataset."@en . + +time:GeneralDateTimeDescription + rdf:type owl:Class ; + rdfs:comment "Descripción de fecha y hora estructurada con valores separados para los distintos elementos de un sistema calendario-reloj."@es , "Description of date and time structured with separate values for the various elements of a calendar-clock system"@en ; + rdfs:label "descripción de fecha-hora generalizada"@es , "Generalized date-time description"@en ; + rdfs:subClassOf time:TemporalPosition ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:month + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:dayOfWeek + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:minute + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:timeZone + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:monthOfYear + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:year + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:dayOfYear + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:unitType + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:hour + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:day + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:second + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxCardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:week + ] ; + skos:definition "Descripción de fecha y hora estructurada con valores separados para los distintos elementos de un sistema calendario-reloj." , "Description of date and time structured with separate values for the various elements of a calendar-clock system"@en ; + skos:note "Algunas combinaciones de propiedades son redundantes - por ejemplo, dentro de un 'año' especificado si se proporciona 'día del año' entonces 'día' y 'mes' se pueden computar, y viceversa. Los valores individuales deberían ser consistentes entre ellos y con el calendario, indicado a través del valor de la propiedad 'tiene TRS'."@es , "Some combinations of properties are redundant - for example, within a specified :year if :dayOfYear is provided then :day and :month can be computed, and vice versa. Individual values should be consistent with each other and the calendar, indicated through the value of the :hasTRS property." . + +time:numericPosition rdf:type owl:DatatypeProperty ; + rdfs:comment "The (numeric) value indicating position within a temporal coordinate system "@en , "El valor (numérico) que indica posición temporal en un sistema de referencia ordinal."@es ; + rdfs:domain time:TimePosition ; + rdfs:label "Numeric value of temporal position"@en , "valor numérico de posición temporal"@es ; + rdfs:range xsd:decimal ; + skos:definition "The (numeric) value indicating position within a temporal coordinate system "@en , "El valor (numérico) que indica posición temporal en un sistema de referencia ordinal."@es . + +dcterms:alternative rdf:type rdf:Property ; + rdfs:comment "An alternative name for the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Alternative Title"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:title , dcterms:title ; + dcterms:description "The distinction between titles and alternative titles is application-specific."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +prov:component rdf:type owl:AnnotationProperty ; + rdfs:comment "Classify prov-o terms into six components according to prov-dm, including 'agents-responsibility', 'alternate', 'annotations', 'collections', 'derivations', and 'entities-activities'. This classification is used so that readers of prov-o specification can find its correspondence with the prov-dm specification."@en ; + rdfs:isDefinedBy . + +dcterms: dcterms:modified "2012-06-14"^^xsd:date ; + dcterms:publisher ; + dcterms:title "DCMI Metadata Terms - other"@en . + +time:hasDateTimeDescription + rdf:type owl:ObjectProperty ; + rdfs:comment "Value of DateTimeInterval expressed as a structured value. The beginning and end of the interval coincide with the limits of the shortest element in the description."@en , "Valor de intervalo de fecha-hora expresado como un valor estructurado. El principio y el final del intervalo coincide con los límites del elemento más corto en la descripción."@es ; + rdfs:domain time:DateTimeInterval ; + rdfs:label "has Date-Time description"@en , "tiene descripción fecha-hora"@es ; + rdfs:range time:GeneralDateTimeDescription ; + skos:definition "Value of DateTimeInterval expressed as a structured value. The beginning and end of the interval coincide with the limits of the shortest element in the description."@en , "Valor de intervalo de fecha-hora expresado como un valor estructurado. El principio y el final del intervalo coincide con los límites del elemento más corto en la descripción."@es . + +dcat:Catalog rdf:type rdfs:Class , owl:Class ; + rdfs:comment "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en , "Una raccolta curata di metadati sulle risorse (ad es. sui dataset e relativi servizi nel contesto di cataloghi di dati)."@it , "Řízená kolekce metadat o datových sadách a datových službách"@cs , "Una colección curada de metadatos sobre recursos (por ejemplo, conjuntos de datos y servicios de datos en el contexto de un catálogo de datos)."@es , "Une collection élaborée de métadonnées sur les jeux de données"@fr , "Μια επιμελημένη συλλογή μεταδεδομένων περί συνόλων δεδομένων"@el , "مجموعة من توصيفات قوائم البيانات"@ar , "En udvalgt og arrangeret samling af metadata om ressourcer (fx datasæt og datatjenester i kontekst af et datakatalog). "@da , "データ・カタログは、データセットに関するキュレートされたメタデータの集合です。"@ja ; + rdfs:isDefinedBy ; + rdfs:label "Κατάλογος"@el , "Katalog"@cs , "Katalog"@da , "Catalogo"@it , "فهرس قوائم البيانات"@ar , "Catálogo"@es , "カタログ"@ja , "Catalog"@en , "Catalogue"@fr ; + rdfs:subClassOf dcat:Dataset ; + skos:definition "データ・カタログは、データセットに関するキュレートされたメタデータの集合です。"@ja , "Řízená kolekce metadat o datových sadách a datových službách."@cs , "Una raccolta curata di metadati sulle risorse (ad es. sui dataset e relativi servizi nel contesto di cataloghi di dati)."@it , "مجموعة من توصيفات قوائم البيانات"@ar , "Una colección curada de metadatos sobre recursos (por ejemplo, conjuntos de datos y servicios de datos en el contexto de un catálogo de datos)."@es , "Μια επιμελημένη συλλογή μεταδεδομένων περί συνόλων δεδομένων."@el , "Une collection élaborée de métadonnées sur les jeux de données."@fr , "En samling af metadata om ressourcer (fx datasæt og datatjenester i kontekst af et datakatalog)."@da , "A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog)."@en ; + skos:editorialNote "English, Italian, Spanish definitions updated in this revision. Multilingual text not yet updated."@en ; + skos:scopeNote "Normalmente, un catálogo de datos disponible en la web se representa como una única instancia de esta clase."@es , "Et webbaseret datakatalog repræsenteres typisk ved en enkelt instans af denne klasse."@da , "Συνήθως, ένας κατάλογος δεδομένων στον Παγκόσμιο Ιστό αναπαρίσταται ως ένα στιγμιότυπο αυτής της κλάσης."@el , "Webový datový katalog je typicky reprezentován jako jedna instance této třídy."@cs , "A web-based data catalog is typically represented as a single instance of this class."@en , "Normalmente, un catalogo di dati nel web viene rappresentato come una singola istanza di questa classe."@it , "通常、ウェブ・ベースのデータ・カタログは、このクラスの1つのインスタンスとして表わされます。"@ja . + +prov:editorialNote rdf:type owl:AnnotationProperty ; + rdfs:comment "A note by the OWL development team about how this term expresses the PROV-DM concept, or how it should be used in context of semantic web or linked data."@en ; + rdfs:isDefinedBy . + +vcard:None rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "None"@en ; + rdfs:subClassOf vcard:Gender . + +spdx:ExtractedLicensingInfo + rdf:type owl:Class ; + rdfs:comment "An ExtractedLicensingInfo represents a license or licensing notice that was found in a package, file or snippet. Any license text that is recognized as a license may be represented as a License rather than an ExtractedLicensingInfo."@en ; + rdfs:subClassOf spdx:SimpleLicensingInfo ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:extractedText ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable" . + +[ rdf:type owl:Axiom ; + rdfs:comment "Derivation is a particular case of trace (see http://www.w3.org/TR/prov-dm/#term-trace), since it links an entity to another entity that contributed to its existence." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasDerivedFrom ; + owl:annotatedTarget prov:wasInfluencedBy +] . + +spdx:fileType_application + rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment " The file is associated with a specific application type (MIME type of application/* )"@en ; + vs:term_status "stable"@en . + +locn:addressArea rdf:type rdf:Property ; + rdfs:comment "The name or names of a geographic area or locality that groups a number of addressable objects for addressing purposes, without being an administrative unit. This would typically be part of a city, a neighbourhood or village. The domain of locn:addressArea is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "address area"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:addressArea" ; + vs:term_status "testing"@en . + +spdx:checksumAlgorithm_sha256 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA256"@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_testDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a test dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +prov:qualifiedRevision + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:wasRevisionOf Entity :e, then it can qualify how it was revised using prov:qualifiedRevision [ a prov:Revision; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedRevision" ; + rdfs:range prov:Revision ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "revisedEntity" ; + prov:sharesDefinitionWith prov:Revision ; + prov:unqualifiedForm prov:wasRevisionOf . + +dcterms:Box rdf:type rdfs:Datatype ; + rdfs:comment "The set of regions in space defined by their geographic coordinates according to the DCMI Box Encoding Scheme."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "DCMI Box"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:url rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "url"@en ; + owl:equivalentProperty vcard:hasURL . + +spdx:Annotation rdf:type owl:Class ; + rdfs:comment "An Annotation is a comment on an SpdxItem by an agent." ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:AnnotationType ; + owl:onProperty spdx:annotationType ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:dateTime ; + owl:onProperty spdx:annotationDate ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:annotator ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +vcard:hasTelephone rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the telephone number for telephony communication with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has telephone"@en ; + owl:equivalentProperty vcard:tel . + +spdx:relationshipType_dependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +vcard:longitude rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated. See hasGeo"@en ; + rdfs:isDefinedBy ; + rdfs:label "longitude"@en ; + owl:deprecated true . + +adms:schemaAgency rdf:type owl:DatatypeProperty ; + rdfs:comment "The name of the agency that issued the identifier."@en ; + rdfs:domain adms:Identifier ; + rdfs:isDefinedBy ; + rdfs:label "schema agency"@en ; + rdfs:range rdfs:Literal . + +spdx:description rdf:type owl:DatatypeProperty ; + rdfs:comment "Provides a detailed description of the package."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +time:intervalStartedBy + rdf:type owl:ObjectProperty ; + rdfs:comment "If a proper interval T1 is intervalStarted another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is after the end of T2."@en , "Si un intervalo propio T1 es empezado por otro intervalo propio T2, entonces el principio de T1 coincide con el principio de T2, y el final de T1 es posterior al final de T2."@es ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval started by"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalStarts ; + skos:definition "If a proper interval T1 is intervalStarted another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is after the end of T2."@en , "Si un intervalo propio T1 es empezado por otro intervalo propio T2, entonces el principio de T1 coincide con el principio de T2, y el final de T1 es posterior al final de T2."@es . + +dcterms:Location rdf:type rdfs:Class ; + rdfs:comment "A spatial region or named place."@en , "dcterms:Location class fully represents the ISA Programme Location Core Vocabulary class of Location."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Location"@en ; + rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction ; + dcterms:identifier "dcterms:Location" ; + dcterms:issued "2008-01-14"^^xsd:date ; + vann:usageNote "This is the key class for the ISA Programme Location Core Vocabulary and represents any location, irrespective of size or other restriction."@en ; + vs:term_status "testing"@en . + +skos:notation rdf:type owl:DatatypeProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "notation"@en ; + skos:definition "A notation, also known as classification code, is a string of characters such as \"T58.5\" or \"303.4833\" used to uniquely identify a concept within the scope of a given concept scheme."@en ; + skos:scopeNote "By convention, skos:notation is used with a typed literal in the object position of the triple."@en . + +vcard:hasLogo rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a graphic image of a logo associated with the object "@en ; + rdfs:isDefinedBy ; + rdfs:label "has logo"@en ; + owl:equivalentProperty vcard:logo . + +prov:qualifiedDelegation + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Agent prov:actedOnBehalfOf Agent :ag, then it can qualify how with prov:qualifiedResponsibility [ a prov:Responsibility; prov:agent :ag; :foo :bar ]."@en ; + rdfs:domain prov:Agent ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedDelegation" ; + rdfs:range prov:Delegation ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:inverse "qualifiedDelegationOf" ; + prov:sharesDefinitionWith prov:Delegation ; + prov:unqualifiedForm prov:actedOnBehalfOf . + +dcterms:isVersionOf rdf:type rdf:Property ; + rdfs:comment "A related resource of which the described resource is a version, edition, or adaptation."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Version Of"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "Changes in version imply substantive changes in content rather than differences in format. This property is intended to be used with non-literal values. This property is an inverse property of Has Version."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +time:inTemporalPosition + rdf:type owl:ObjectProperty ; + rdfs:comment "Position of a time instant"@en , "Posición de un instante de tiempo."@es ; + rdfs:domain time:Instant ; + rdfs:label "Temporal position"@en , "posición temporal"@es ; + rdfs:range time:TemporalPosition ; + skos:definition "Position of a time instant"@en , "Posición de un instante de tiempo."@es . + +time:second rdf:type owl:DatatypeProperty ; + rdfs:comment "Second position in a calendar-clock system."@en , "Posición de segundo en un sistema calendario-reloj."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "second"@en , "segundo"@es ; + rdfs:range xsd:decimal . + +dcat:Distribution rdf:type owl:Class , rdfs:Class ; + rdfs:comment "En specifik repræsentation af et datasæt. Et datasæt kan være tilgængelig i mange serialiseringer der kan variere på forskellige vis, herunder sprog, medietype eller format, systemorganisering, tidslig- og geografisk opløsning, detaljeringsniveau eller profiler (der kan specificere en eller flere af ovenstående)."@da , "Représente une forme spécifique d'un jeu de données. Caque jeu de données peut être disponible sous différentes formes, celles-ci pouvant représenter différents formats du jeu de données ou différents endpoint. Des exemples de distribution sont des fichirs CSV, des API ou des flux RSS."@fr , "Αναπαριστά μία συγκεκριμένη διαθέσιμη μορφή ενός συνόλου δεδομένων. Κάθε σύνολο δεδομενων μπορεί να είναι διαθέσιμο σε διαφορετικές μορφές, οι μορφές αυτές μπορεί να αναπαριστούν διαφορετικές μορφές αρχείων ή διαφορετικά σημεία διάθεσης. Παραδείγματα διανομών συμπεριλαμβάνουν ένα μεταφορτώσιμο αρχείο μορφής CSV, ένα API ή ένα RSS feed."@el , "شكل محدد لقائمة البيانات يمكن الوصول إليه. قائمة بيانات ما يمكن أن تكون متاحه باشكال و أنواع متعددة. ملف يمكن تحميله أو واجهة برمجية يمكن من خلالها الوصول إلى البيانات هي أمثلة على ذلك."@ar , "Konkrétní reprezentace datové sady. Datová sada může být dostupná v různých serializacích, které se mohou navzájem lišit různými způsoby, mimo jiné přirozeným jazykem, media-typem či formátem, schematickou organizací, časovým a prostorovým rozlišením, úrovní detailu či profily (které mohou specifikovat některé či všechny tyto rozdíly)."@cs , "データセットの特定の利用可能な形式を表わします。各データセットは、異なる形式で利用できることがあり、これらの形式は、データセットの異なる形式や、異なるエンドポイントを表わす可能性があります。配信の例には、ダウンロード可能なCSVファイル、API、RSSフィードが含まれます。"@ja , "Rappresenta una forma disponibile e specifica del dataset. Ciascun dataset può essere disponibile in forme differenti, che possono rappresentare formati diversi o diversi punti di accesso per un dataset. Esempi di distribuzioni sono un file CSV scaricabile, una API o un RSS feed."@it , "A specific representation of a dataset. A dataset might be available in multiple serializations that may differ in various ways, including natural language, media-type or format, schematic organization, temporal and spatial resolution, level of detail or profiles (which might specify any or all of the above)."@en , "Una representación específica de los datos. Cada conjunto de datos puede estar disponible en formas diferentes, las cuáles pueden variar en distintas formas, incluyendo el idioma, 'media-type' o formato, organización esquemática, resolución temporal y espacial, nivel de detalle o perfiles (que pueden especificar cualquiera o todas las diferencias anteriores)."@es ; + rdfs:isDefinedBy ; + rdfs:label "التوزيع"@ar , "Διανομή"@el , "Distribuce"@cs , "Distribuzione"@it , "配信"@ja , "Distribution"@en , "Distribution"@fr , "Distribution"@da , "Distribución"@es ; + skos:altLabel "Datamanifestation"@da , "Datarepræsentation"@da , "Dataudstilling"@da , "Datadistribution"@da ; + skos:definition "Konkrétní reprezentace datové sady. Datová sada může být dostupná v různých serializacích, které se mohou navzájem lišit různými způsoby, mimo jiné přirozeným jazykem, media-typem či formátem, schematickou organizací, časovým a prostorovým rozlišením, úrovní detailu či profily (které mohou specifikovat některé či všechny tyto rozdíly)."@cs , "Rappresenta una forma disponibile e specifica del dataset. Ciascun dataset può essere disponibile in forme differenti, che possono rappresentare formati diversi o diversi punti di accesso per un dataset. Esempi di distribuzioni sono un file CSV scaricabile, una API o un RSS feed."@it , "Una representación específica de los datos. Cada conjunto de datos puede estar disponible en formas diferentes, las cuáles pueden variar en distintas formas, incluyendo el idioma, 'media-type' o formato, organización esquemática, resolución temporal y espacial, nivel de detalle o perfiles (que pueden especificar cualquiera o todas las diferencias anteriores)."@es , "Représente une forme spécifique d'un jeu de données. Caque jeu de données peut être disponible sous différentes formes, celles-ci pouvant représenter différents formats du jeu de données ou différents endpoint. Des exemples de distribution sont des fichirs CSV, des API ou des flux RSS."@fr , "A specific representation of a dataset. A dataset might be available in multiple serializations that may differ in various ways, including natural language, media-type or format, schematic organization, temporal and spatial resolution, level of detail or profiles (which might specify any or all of the above)."@en , "Αναπαριστά μία συγκεκριμένη διαθέσιμη μορφή ενός συνόλου δεδομένων. Κάθε σύνολο δεδομενων μπορεί να είναι διαθέσιμο σε διαφορετικές μορφές, οι μορφές αυτές μπορεί να αναπαριστούν διαφορετικές μορφές αρχείων ή διαφορετικά σημεία διάθεσης. Παραδείγματα διανομών συμπεριλαμβάνουν ένα μεταφορτώσιμο αρχείο μορφής CSV, ένα API ή ένα RSS feed."@el , "データセットの特定の利用可能な形式を表わします。各データセットは、異なる形式で利用できることがあり、これらの形式は、データセットの異なる形式や、異なるエンドポイントを表わす可能性があります。配信の例には、ダウンロード可能なCSVファイル、API、RSSフィードが含まれます。"@ja , "شكل محدد لقائمة البيانات يمكن الوصول إليه. قائمة بيانات ما يمكن أن تكون متاحه باشكال و أنواع متعددة. ملف يمكن تحميله أو واجهة برمجية يمكن من خلالها الوصول إلى البيانات هي أمثلة على ذلك."@ar , "En specifik repræsentation af et datasæt. Et datasæt kan være tilgængelig i mange serialiseringer der kan variere på forskellige vis, herunder sprog, medietype eller format, systemorganisering, tidslig- og geografisk opløsning, detaljeringsniveau eller profiler (der kan specificere en eller flere af ovenstående)."@da ; + skos:scopeNote "Esta clase representa una disponibilidad general de un conjunto de datos, e implica que no existe información acerca del método de acceso real a los datos, i.e., si es un enlace de descarga directa o a través de una página Web."@es , "これは、データセットの一般的な利用可能性を表わし、データの実際のアクセス方式に関する情報(つまり、直接ダウンロードなのか、APIなのか、ウェブページを介したものなのか)を意味しません。dcat:downloadURLプロパティーの使用は、直接ダウンロード可能な配信を意味します。"@ja , "Ceci représente une disponibilité générale du jeu de données, et implique qu'il n'existe pas d'information sur la méthode d'accès réelle des données, par exple, si c'est un lien de téléchargement direct ou à travers une page Web."@fr , "Denne klasse repræsenterer datasættets overordnede tilgængelighed og giver ikke oplysninger om hvilken metode der kan anvendes til at få adgang til data, dvs. om adgang til datasættet realiseres ved direkte download, API eller via et websted. Anvendelsen af egenskaben dcat:downloadURL indikerer at distributionen kan downloades direkte."@da , "Αυτό αναπαριστά μία γενική διαθεσιμότητα ενός συνόλου δεδομένων και δεν υπονοεί τίποτα περί του πραγματικού τρόπου πρόσβασης στα δεδομένα, αν είναι άμεσα μεταφορτώσιμα, μέσω API ή μέσω μίας ιστοσελίδας. Η χρήση της ιδιότητας dcat:downloadURL δείχνει μόνο άμεσα μεταφορτώσιμες διανομές."@el , "This represents a general availability of a dataset it implies no information about the actual access method of the data, i.e. whether by direct download, API, or through a Web page. The use of dcat:downloadURL property indicates directly downloadable distributions."@en , "Toto popisuje obecnou dostupnost datové sady. Neimplikuje žádnou informaci o skutečné metodě přístupu k datům, tj. zda jsou přímo ke stažení, skrze API či přes webovou stránku. Použití vlastnosti dcat:downloadURL indikuje přímo stažitelné distribuce."@cs , "Questa classe rappresenta una disponibilità generale di un dataset e non implica alcuna informazione sul metodo di accesso effettivo ai dati, ad esempio se si tratta di un accesso a download diretto, API, o attraverso una pagina Web. L'utilizzo della proprietà dcat:downloadURL indica distribuzioni direttamente scaricabili."@it . + +vcard:hasAdditionalName + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the additional name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has additional name"@en . + +[ rdf:type owl:Axiom ; + rdfs:comment "Revision is a derivation (see http://www.w3.org/TR/prov-dm/#term-Revision). Moreover, according to \nhttp://www.w3.org/TR/2013/REC-prov-constraints-20130430/#term-Revision 23 April 2012 'wasRevisionOf is a strict sub-relation of wasDerivedFrom since two entities e2 and e1 may satisfy wasDerivedFrom(e2,e1) without being a variant of each other.'" ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasRevisionOf ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +locn:thoroughfare rdf:type rdf:Property ; + rdfs:comment "An address component that represents the name of a passage or way through from one location to another. A thoroughfare is not necessarily a road, it might be a waterway or some other feature. The domain of locn:thoroughfare is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "thoroughfare"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:thoroughfare" ; + vs:term_status "testing"@en . + +locn:Address rdf:type rdfs:Class ; + rdfs:comment "An \"address representation\" as conceptually defined by the INSPIRE Address Representation data type. The locn:addressId property may be used to link this locn:Address to other representations."@en ; + rdfs:isDefinedBy ; + rdfs:label "Address"@en ; + dcterms:identifier "locn:Address" ; + vs:term_status "testing"@en ; + wdsr:describedby . + +dcat:mediaType rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Il tipo di media della distribuzione come definito da IANA"@it , "このプロパティーは、配信のメディア・タイプがIANAで定義されているときに使用すべきで(SHOULD)、そうでない場合には、dct:formatを様々な値と共に使用できます(MAY)。"@ja , "Η ιδιότητα αυτή ΘΑ ΠΡΕΠΕΙ να χρησιμοποιείται όταν ο τύπος μέσου μίας διανομής είναι ορισμένος στο IANA, αλλιώς η ιδιότητα dct:format ΔΥΝΑΤΑΙ να χρησιμοποιηθεί με διαφορετικές τιμές."@el , "Cette propriété doit être utilisée quand c'est définit le type de média de la distribution en IANA, sinon dct:format DOIT être utilisé avec différentes valeurs."@fr , "The media type of the distribution as defined by IANA"@en , "يجب استخدام هذه الخاصية إذا كان نوع الملف معرف ضمن IANA"@ar , "Medietypen for distributionen som den er defineret af IANA."@da , "Typ média distribuce definovaný v IANA."@cs , "Esta propiedad debe ser usada cuando está definido el tipo de media de la distribución en IANA, de otra manera dct:format puede ser utilizado con diferentes valores"@es ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "tipo de media"@es , "media type"@en , "نوع الميديا"@ar , "typ média"@cs , "メディア・タイプ"@ja , "type de média"@fr , "τύπος μέσου"@el , "medietype"@da , "tipo di media"@it ; + rdfs:range dcterms:MediaType ; + rdfs:subPropertyOf dcterms:format ; + skos:changeNote "Obor hodnot dcat:mediaType byl zúžen v této revizi DCAT."@cs , "Il range di dcat:mediaType è stato ristretto come parte della revisione di DCAT."@it , "The range of dcat:mediaType has been tightened as part of the revision of DCAT."@en ; + skos:definition "يجب استخدام هذه الخاصية إذا كان نوع الملف معرف ضمن IANA"@ar , "Η ιδιότητα αυτή ΘΑ ΠΡΕΠΕΙ να χρησιμοποιείται όταν ο τύπος μέσου μίας διανομής είναι ορισμένος στο IANA, αλλιώς η ιδιότητα dct:format ΔΥΝΑΤΑΙ να χρησιμοποιηθεί με διαφορετικές τιμές."@el , "Cette propriété doit être utilisée quand c'est définit le type de média de la distribution en IANA, sinon dct:format DOIT être utilisé avec différentes valeurs."@fr , "Esta propiedad debe ser usada cuando está definido el tipo de media de la distribución en IANA, de otra manera dct:format puede ser utilizado con diferentes valores."@es , "Il tipo di media della distribuzione come definito da IANA."@it , "The media type of the distribution as defined by IANA."@en , "このプロパティーは、配信のメディア・タイプがIANAで定義されているときに使用すべきで(SHOULD)、そうでない場合には、dct:formatを様々な値と共に使用できます(MAY)。"@ja , "Typ média distribuce definovaný v IANA."@cs , "Medietypen for distributionen som den er defineret af IANA."@da ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, Italian and Czech translation provided, other translations pending. Note some inconsistency on def vs. usage."@en ; + skos:scopeNote "Questa proprietà DEVE essere usata quando il tipo di media della distribuzione è definito nel registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, altrimenti dct:format PUO 'essere usato con differenti valori."@it , "Tato vlastnost BY MĚLA být použita, je-li typ média distribuce definován v registru IANA https://www.iana.org/assignments/media-types/. V ostatních případech MŮŽE být použita vlastnost dct:format s jinými hodnotami."@cs , "Esta propiedad DEBERÍA usarse cuando el 'media type' de la distribución está definido en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, de lo contrario, dct:format PUEDE usarse con distintos valores."@es , "This property SHOULD be used when the media type of the distribution is defined in the IANA media types registry https://www.iana.org/assignments/media-types/, otherwise dct:format MAY be used with different values."@en , "Denne egenskab BØR anvendes hvis distributionens medietype optræder i 'IANA media types registry' https://www.iana.org/assignments/media-types/, ellers KAN egenskaben dct:format anvendes med et andet udfaldsrum."@da . + +prov:qualifiedUsage rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:used Entity :e, then it can qualify how it used it using prov:qualifiedUsage [ a prov:Usage; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedUsage" ; + rdfs:range prov:Usage ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedUsingActivity" ; + prov:sharesDefinitionWith prov:Usage ; + prov:unqualifiedForm prov:used . + +dcterms:AgentClass rdf:type rdfs:Class ; + rdfs:comment "A group of agents."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Agent Class"@en ; + rdfs:subClassOf rdfs:Class ; + dcterms:issued "2008-01-14"^^xsd:date . + +prov:Collection rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Collection" ; + rdfs:subClassOf prov:Entity ; + prov:category "expanded" ; + prov:component "collections" ; + prov:definition "A collection is an entity that provides a structure to some constituents, which are themselves entities. These constituents are said to be member of the collections."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-collection"^^xsd:anyURI . + +dcterms:W3CDTF rdf:type rdfs:Datatype ; + rdfs:comment "The set of dates and times constructed according to the W3C Date and Time Formats Specification."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "W3C-DTF"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:checksumAlgorithm_sha3_384 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA3-384."@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_dynamicLink + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A dynamically links to SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:License rdf:type owl:Class ; + rdfs:comment "A License represents a copyright license. The SPDX license list website is annotated with these properties (using RDFa) to allow license data published there to be easily processed. The license list is populated in accordance with the License List fields guidelines. These guidelines are not normative and may change over time. SPDX tooling should not rely on values in the license list conforming to the current guidelines."@en ; + rdfs:subClassOf spdx:SimpleLicensingInfo ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:standardLicenseHeaderTemplate + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:standardLicenseHeader + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isDeprecatedLicenseId + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:standardLicenseTemplate + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isOsiApproved ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isFsfLibre + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseText ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +vcard:post-office-box + rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "post office box"@en ; + owl:deprecated true . + +time:hasTemporalDuration + rdf:type owl:ObjectProperty ; + rdfs:comment "Duration of a temporal entity."@en , "Duración de una entidad temporal."@es ; + rdfs:domain time:TemporalEntity ; + rdfs:label "has temporal duration"@en , "tiene duración temporal"@es ; + rdfs:range time:TemporalDuration ; + skos:definition "Duration of a temporal entity."@en , "Duración de una entidad temporal."@es . + + + rdfs:label "RDF/XML version of the ISA Programme Location Core Vocabulary"@en ; + dcterms:format ; + dcat:mediaType "application/rdf+xml"^^dcterms:IMT . + +prov:Start rdf:type owl:Class ; + rdfs:comment "An instance of prov:Start provides additional descriptions about the binary prov:wasStartedBy relation from some started prov:Activity to an prov:Entity that started it. For example, :foot_race prov:wasStartedBy :bang; prov:qualifiedStart [ a prov:Start; prov:entity :bang; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime ] ."@en ; + rdfs:isDefinedBy ; + rdfs:label "Start" ; + rdfs:subClassOf prov:EntityInfluence , prov:InstantaneousEvent ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Start is when an activity is deemed to have been started by an entity, known as trigger. The activity did not exist before its start. Any usage, generation, or invalidation involving an activity follows the activity's start. A start may refer to a trigger entity that set off the activity, or to an activity, known as starter, that generated the trigger."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Start"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Start"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasStartedBy . + +spdx:extractedText rdf:type owl:DatatypeProperty ; + rdfs:comment "Provide a copy of the actual text of the license reference extracted from the package, file or snippet that is associated with the License Identifier to aid in future analysis."@en ; + rdfs:domain spdx:ExtractedLicensingInfo ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +dcterms:tableOfContents + rdf:type rdf:Property ; + rdfs:comment "A list of subunits of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Table Of Contents"@en ; + rdfs:subPropertyOf dc:description , dcterms:description ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:range rdf:type owl:ObjectProperty ; + rdfs:comment "This field defines the byte range in the original host file (in X.2) that the snippet information applies to"@en ; + rdfs:domain spdx:Snippet ; + rdfs:range ; + vs:term_status "stable"@en . + +time:unitHour rdf:type time:TemporalUnit ; + rdfs:label "Hour (unit of temporal duration)"@en ; + skos:prefLabel "один час\"@ru" , "一時間"@jp , "godzina"@pl , "Stunde"@de , "一小時"@zh , "한 시간"@kr , "hora"@es , "hora"@pt , "ora"@it , "hour"@en , "ساعة واحدة"@ar , "uur"@nl , "heure"@fr ; + time:days "0"^^xsd:decimal ; + time:hours "1"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "0"^^xsd:decimal . + +prov:specializationOf + rdf:type owl:ObjectProperty , owl:AnnotationProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "specializationOf" ; + rdfs:range prov:Entity ; + rdfs:seeAlso prov:alternateOf ; + rdfs:subPropertyOf prov:alternateOf ; + prov:category "expanded" ; + prov:component "alternate" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "An entity that is a specialization of another shares all aspects of the latter, and additionally presents more specific aspects of the same thing as the latter. In particular, the lifetime of the entity being specialized contains that of any specialization. Examples of aspects include a time period, an abstraction, and a context associated with the entity."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-specialization"^^xsd:anyURI ; + prov:inverse "generalizationOf" ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-specialization"^^xsd:anyURI . + +prov:qualifiedInfluence + rdf:type owl:ObjectProperty ; + rdfs:comment "Because prov:qualifiedInfluence is a broad relation, the more specific relations (qualifiedCommunication, qualifiedDelegation, qualifiedEnd, etc.) should be used when applicable."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Activity prov:Agent prov:Entity ) + ] ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedInfluence" ; + rdfs:range prov:Influence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "qualifiedInfluenceOf" ; + prov:sharesDefinitionWith prov:Influence ; + prov:unqualifiedForm prov:wasInfluencedBy . + +vcard:country-name rdf:type owl:DatatypeProperty ; + rdfs:comment "The country name associated with the address of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "country name"@en ; + rdfs:range xsd:string . + +vcard:Other rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Other"@en ; + rdfs:subClassOf vcard:Gender . + + + rdf:type sh:NodeShape ; + sh:name "Identifier"@en ; + sh:property [ sh:maxCount 1 ; + sh:path skos:notation ; + sh:severity sh:Violation + ] ; + sh:targetClass adms:Identifier . + +spdx:Purpose rdf:type owl:Class ; + rdfs:comment "Package Purpose is intrinsic to how the package is being used rather than the content of the package." ; + vs:term_status "stable" . + +spdx:isWayBackLink rdf:type owl:DatatypeProperty ; + rdfs:comment "True if the License SeeAlso URL points to a Wayback archive"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:boolean . + +time:month rdf:type owl:DatatypeProperty ; + rdfs:comment "Month position in a calendar-clock system.\n\nThe range of this property is not specified, so can be replaced by any specific representation of a calendar month from any calendar. "@en , "Posición de mes en un sistema calendario-reloj.\n El rango de esta propiedad no está especificado, por tanto, se puede reemplazar por cualquier representación específica de un mes de calendario de un calendario cualquiera."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "month"@en , "mes"@es ; + skos:definition "Month position in a calendar-clock system.\n\nThe range of this property is not specified, so can be replaced by any specific representation of a calendar month from any calendar. "@en , "Posición de mes en un sistema calendario-reloj.\n El rango de esta propiedad no está especificado, por tanto, se puede reemplazar por cualquier representación específica de un mes de calendario de un calendario cualquiera."@es . + +time:seconds rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in seconds"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en segundos."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "seconds duration"@en , "duración en segundos"@es ; + rdfs:range xsd:decimal ; + rdfs:seeAlso . + +vcard:Agent rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Agent"@en ; + rdfs:subClassOf vcard:RelatedType . + +vcard:hasCalendarRequest + rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the calendar user address to which a scheduling request be sent for the object. (Was called CALADRURI in RFC6350)"@en ; + rdfs:isDefinedBy ; + rdfs:label "has calendar request"@en . + +dcterms:UDC rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of conceptual resources specified by the Universal Decimal Classification."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "UDC"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +locn:location rdf:type rdf:Property ; + rdfs:comment "The location property links any resource to the Location Class. Asserting the location relationship implies only that the domain has some connection to a Location in time or space. It does not imply that the resource is necessarily at that location at the time when the assertion is made."@en ; + rdfs:isDefinedBy ; + rdfs:label "location"@en ; + rdfs:range dcterms:Location ; + dcterms:identifier "locn:location" ; + vs:term_status "testing"@en . + +spdx:relationshipType_prerequisiteFor + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a prerequisite for SPDXRef-B"@en ; + vs:term_status "stable"@en . + +spdx:checksumAlgorithm_blake2b512 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was BLAKE2b-512."@en ; + vs:term_status "stable"@en . + +spdx:describesPackage + rdf:type owl:ObjectProperty ; + rdfs:comment "The describesPackage property relates an SpdxDocument to the package which it describes."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:Package ; + vs:term_status "stable"@en . + +vcard:hasName rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the components of the name of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has name"@en ; + rdfs:range vcard:Name ; + owl:equivalentProperty vcard:n . + +spdx:purpose_file rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is a single file which can be independently distributed (configuration file, statically linked binary, Kubernetes deployment, etc)."@en ; + vs:term_status "stable"@en . + +time:day rdf:type owl:DatatypeProperty ; + rdfs:comment "Day position in a calendar-clock system.\n\nThe range of this property is not specified, so can be replaced by any specific representation of a calendar day from any calendar. "@en , "Posición de día en un sistema calendario-reloj."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "day"@en , "día"@es ; + skos:definition "Day position in a calendar-clock system.\n\nThe range of this property is not specified, so can be replaced by any specific representation of a calendar day from any calendar. "@en , "Posición de día en un sistema calendario-reloj.\n\nEl rango de esta propiedad no está especificado, por tanto, se puede reemplazar por una representación específica de un día de calendario de cualquier calendario."@es . + +dcterms:medium rdf:type rdf:Property ; + rdfs:comment "The material or physical carrier of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Medium"@en ; + rdfs:subPropertyOf dc:format , dcterms:format ; + dcam:domainIncludes dcterms:PhysicalResource ; + dcam:rangeIncludes dcterms:PhysicalMedium ; + dcterms:issued "2000-07-11"^^xsd:date . + + + rdf:type voaf:Vocabulary , owl:Ontology ; + rdfs:comment "This is a new version of the final draft published by the European Commission in May 2012, revised according to the results of the ISA Core Location Pilot (see Section Change History for the list of changes). It is currently under the control of the Locations and Addresses Community Group, but is not under active development or review. Comments and queries should be sent to that group via public-locadd@w3.org. Terms defined here may be deprecated by that or future groups but will not disappear or their definition change."@en ; + rdfs:label "ISA Programme Location Core Vocabulary"@en ; + cc:attributionName "European Commission"@en ; + cc:attributionURL ; + dcterms:abstract "The ISA Programme Location Core Vocabulary provides a minimum set of classes and properties for describing any place in terms of its name, address or geometry. The vocabulary is specifically designed to aid the publication of data that is interoperable with EU INSPIRE Directive. It is closely integrated with the Business and Person Core Vocabularies of the EU ISA Programme, now available in W3C space as, respectively, the Registered Organization vocabulary and ISA Person Core Vocabulary."@en ; + dcterms:conformsTo ; + dcterms:hasFormat , , ; + dcterms:issued "2013-11-25"^^xsd:date ; + dcterms:license ; + dcterms:mediator [ foaf:homepage ; + foaf:mbox ; + foaf:name "Locations and Addresses Community Group" + ] ; + dcterms:modified "2015-03-23"^^xsd:date ; + dcterms:rights "Copyright © European Union, 2012-2015."@en ; + dcterms:title "ISA Programme Location Core Vocabulary"@en ; + vann:changes "\n2015-03-23: Updates in the namespace document and in the RDF/XML and Turtle schemas:\n- Fixed copyright notice.\n- Added class and property diagram.\n- Updated GeoSPARQL (a) namespace URIs and (b) datatype names in the examples of property locn:geometry, based on version 1.0 of the GeoSPARQL specification, and added a note in the examples.\n - prefix ogc (http://www.opengis.net/rdf#) replaced with gsp (http://www.opengis.net/ont/geosparql#) and sf (http://www.opengis.net/ont/sf#)\n - ogc:WKTLiteral → gsp:wktLiteral\n - ogc:GMLLiteral → gsp:gmlLiteral\n- Added namespace declarations for all namespace prefixes used in LOCN namespace document, even though they are not used in class/property definitions.\n- Corrected the endonym of the Greek capital written in the Greek script in the definition of class locn:geographicName (Aθnνa → Αθήνα).\n- Fixed links and typos, minor revisions made to the textual descriptions.\n2013-12-21: (PhilA) Update in RDF/XML and Turtle schemas:\n- Updated voaf namespace.\n- Corrected links to different distributions of the schema.\n- Removed xml:base and used http://www/w3/org/ns/locn as the schema URI cf. original which used the namespace URI (with the final # character).\n2013-11-25: Changes since final draft version released by the EU ISA Programme Core Vocabularies Working Group (Location Task Force)\n- Revised usage note of class locn:Geometry. The text describing its recommended usage has been moved to usage note of property locn:geometry.\n- Dropped domain/range restriction for locn:geographicName.\n- Dropped domain/range restriction for locn:locatorDesignator. Free text definition updated accordingly.\n- Dropped domain/range restriction for locn:locatorName. Free text definition updated accordingly.\n- Corrected free text definition of property locn:geometry (its domain is \"any resource\", and not a \"location\").\n- Revised usage note of property locn:geometry to include text about recommended usage, formerly included in the usage note of class locn:Geometry.\n- Revised usage note and examples of property locn:geometry to include support to geocoded URIs (e.g., geo URIs, GeoHash URIs).\n- Added term status. All terms have been set to \"testing\", with the exception of class locn:Geometry and properties rdfs:seeAlso (geographic identifier) and locn:addressId.\n- Renamed subject in Turtle examples (ex:a → :Resource).\n- Fixed links and typos, minor revisions made to the textual descriptions.\n "@en ; + vann:preferredNamespacePrefix "locn" ; + vann:preferredNamespaceUri "http://www.w3.org/ns/locn#"^^xsd:anyURI ; + voaf:classNumber "3"^^xsd:nonNegativeInteger ; + voaf:propertyNumber "16"^^xsd:nonNegativeInteger ; + voaf:reliesOn dcterms: , rdfs: ; + rec:editor [ rdfs:seeAlso ; + sdo:affiliation [ foaf:homepage ; + foaf:name "European Commission - Joint Research Centre (JRC)"@en + ] ; + foaf:homepage ; + foaf:name "Andrea Perego" + ] ; + rec:editor [ rdfs:seeAlso ; + sdo:affiliation [ foaf:homepage ; + foaf:name "W3C/ERCIM" + ] ; + foaf:homepage ; + foaf:name "Phil Archer" + ] ; + rec:editor [ sdo:affiliation [ foaf:homepage ; + foaf:name "European Commission - Joint Research Centre (JRC)"@en + ] ; + foaf:name "Michael Lutz" + ] ; + owl:versionInfo "First version in w3.org/ns space"@en ; + wdsr:describedby ; + foaf:depiction ; + foaf:maker [ foaf:homepage ; + foaf:name "EU ISA Programme Core Vocabularies Working Group (Location Task Force)" + ] . + +vcard:Met rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Met"@en ; + rdfs:subClassOf vcard:RelatedType . + +skos:exactMatch rdf:type owl:ObjectProperty , owl:SymmetricProperty , owl:TransitiveProperty , rdf:Property ; + rdfs:comment "skos:exactMatch is disjoint with each of the properties skos:broadMatch and skos:relatedMatch."@en ; + rdfs:isDefinedBy ; + rdfs:label "has exact match"@en ; + rdfs:subPropertyOf skos:closeMatch ; + skos:definition "skos:exactMatch is used to link two concepts, indicating a high degree of confidence that the concepts can be used interchangeably across a wide range of information retrieval applications. skos:exactMatch is a transitive property, and is a sub-property of skos:closeMatch."@en . + +vcard:hasRelated rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a relationship between another entity and the entity represented by this object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has related"@en . + +vcard:Text rdf:type owl:Class ; + rdfs:comment "Also called sms telephone"@en ; + rdfs:isDefinedBy ; + rdfs:label "Text"@en ; + rdfs:subClassOf vcard:TelephoneType . + +time:intervalOverlappedBy + rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 es 'intervalo solapado por' otro intervalo propio T2, entonces el principio de T1 es posterior al principio de T2, y el principio de T1 es anterior al final de T2, y el final de T1 es posterior al final de T2."@es , "If a proper interval T1 is intervalOverlappedBy another proper interval T2, then the beginning of T1 is after the beginning of T2, the beginning of T1 is before the end of T2, and the end of T1 is after the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo solapado por"@es , "interval overlapped by"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalOverlaps ; + skos:definition "If a proper interval T1 is intervalOverlappedBy another proper interval T2, then the beginning of T1 is after the beginning of T2, the beginning of T1 is before the end of T2, and the end of T1 is after the end of T2."@en , "Si un intervalo propio T1 es 'intervalo solapado por' otro intervalo propio T2, entonces el principio de T1 es posterior al principio de T2, y el principio de T1 es anterior al final de T2, y el final de T1 es posterior al final de T2."@es . + +spdx:relationshipType_dependencyManifestOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a manifest file that lists a set of dependencies for SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:fileType_binary rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "Indicates the file is not a text file. spdx:filetype_archive is preferred for archive files even though they are binary."@en ; + vs:term_status "stable"@en . + +vcard:class rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "class"@en ; + owl:deprecated true . + +dcterms:IMT rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of media types specified by the Internet Assigned Numbers Authority."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "IMT"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +prov:qualifiedEnd rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:wasEndedBy Entity :e1, then it can qualify how it was ended using prov:qualifiedEnd [ a prov:End; prov:entity :e1; :foo :bar ]."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedEnd" ; + rdfs:range prov:End ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedEndOf" ; + prov:sharesDefinitionWith prov:End ; + prov:unqualifiedForm prov:wasEndedBy . + +prov:alternateOf rdf:type owl:ObjectProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "alternateOf" ; + rdfs:range prov:Entity ; + rdfs:seeAlso prov:specializationOf ; + prov:category "expanded" ; + prov:component "alternate" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Two alternate entities present aspects of the same thing. These aspects may be the same or different, and the alternate entities may or may not overlap in time."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-alternate"^^xsd:anyURI ; + prov:inverse "alternateOf" ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-alternate"^^xsd:anyURI . + +vcard:hasPostalCode rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the postal code data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has postal code"@en . + +dcterms:isReplacedBy rdf:type rdf:Property ; + rdfs:comment "A related resource that supplants, displaces, or supersedes the described resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Replaced By"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Replaces."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:Parent rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Parent"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:deprecatedVersion + rdf:type owl:DatatypeProperty ; + rdfs:comment "License list version where this license was decprecated"@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( spdx:ListedLicense spdx:ListedLicenseException ) + ] ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:hasCalendarLink + rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the calendar associated with the object. (Was called CALURI in RFC6350)"@en ; + rdfs:isDefinedBy ; + rdfs:label "has calendar link"@en . + +time:Friday rdf:type time:DayOfWeek ; + rdfs:label "Friday"@en ; + skos:prefLabel "Venerdì"@it , "Vendredi"@fr , "Viernes"@es , "Friday"@en , "Piątek"@pl , "Vrijdag"@nl , "Freitag"@de , "Пятница"@ru , "金曜日"@ja , "الجمعة"@ar , "星期五"@zh , "Sexta-feira"@pt . + +spdx:fileType rdf:type owl:ObjectProperty ; + rdfs:comment "The type of the file."@en ; + rdfs:domain spdx:File ; + vs:term_status "stable" . + +dcterms:type rdf:type rdf:Property ; + rdfs:comment "The nature or genre of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Type"@en ; + rdfs:subPropertyOf dc:type ; + dcterms:description "Recommended practice is to use a controlled vocabulary such as the DCMI Type Vocabulary [[DCMI-TYPE](http://dublincore.org/documents/dcmi-type-vocabulary/)]. To describe the file format, physical medium, or dimensions of the resource, use the property Format."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +prov:todo rdf:type owl:AnnotationProperty . + +prov:qualifiedQuotation + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:wasQuotedFrom Entity :e, then it can qualify how using prov:qualifiedQuotation [ a prov:Quotation; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedQuotation" ; + rdfs:range prov:Quotation ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "qualifiedQuotationOf" ; + prov:sharesDefinitionWith prov:Quotation ; + prov:unqualifiedForm prov:wasQuotedFrom . + +prov:n rdf:type owl:AnnotationProperty ; + rdfs:comment "A reference to the principal section of the PROV-DM document that describes this concept."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + +prov:wasEndedBy rdf:type owl:ObjectProperty ; + rdfs:comment "End is when an activity is deemed to have ended. An end may refer to an entity, known as trigger, that terminated the activity."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "wasEndedBy" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedEnd prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedEnd prov:entity ) ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:inverse "ended" ; + prov:qualifiedForm prov:qualifiedEnd , prov:End . + +spdx:relationshipType_metafileOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A is a metafile of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +dcterms:provenance rdf:type rdf:Property ; + rdfs:comment "A statement of any changes in ownership and custody of the resource since its creation that are significant for its authenticity, integrity, and interpretation."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Provenance"@en ; + dcam:rangeIncludes dcterms:ProvenanceStatement ; + dcterms:description "The statement may include a description of any changes successive custodians made to the resource."@en ; + dcterms:issued "2004-09-20"^^xsd:date . + +spdx:dataLicense rdf:type owl:ObjectProperty , owl:FunctionalProperty ; + rdfs:comment "Compliance with the SPDX specification includes populating the SPDX fields therein with data related to such fields (\"SPDX-Metadata\"). The SPDX specification contains numerous fields where an SPDX document creator may provide relevant explanatory text in SPDX-Metadata. Without opining on the lawfulness of \"database rights\" (in jurisdictions where applicable), such explanatory text is copyrightable subject matter in most Berne Convention countries. By using the SPDX specification, or any portion hereof, you hereby agree that any copyright rights (as determined by your jurisdiction) in any SPDX-Metadata, including without limitation explanatory text, shall be subject to the terms of the Creative Commons CC0 1.0 Universal license. For SPDX-Metadata not containing any copyright rights, you hereby agree and acknowledge that the SPDX-Metadata is provided to you \"as-is\" and without any representations or warranties of any kind concerning the SPDX-Metadata, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non-infringement, or the absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range [ rdf:type owl:Restriction ; + owl:hasValue ; + owl:onProperty spdx:dataLicense + ] ; + vs:term_status "stable" . + +vcard:agent rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "agent"@en ; + owl:deprecated true . + +spdx:relationship rdf:type owl:ObjectProperty ; + rdfs:comment "Defines a relationship between two SPDX elements. The SPDX element may be a Package, File, or SpdxDocument."@en ; + rdfs:domain spdx:SpdxElement ; + rdfs:range spdx:Relationship ; + vs:term_status "stable"@en . + +spdx:checksumAlgorithm_sha512 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was SHA512"@en ; + vs:term_status "stable"@en . + +prov:qualifiedCommunication + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:wasInformedBy Activity :a, then it can qualify how it was influenced using prov:qualifiedCommunication [ a prov:Communication; prov:activity :a; :foo :bar ]."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedCommunication" ; + rdfs:range prov:Communication ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedCommunicationOf" ; + prov:qualifiedForm prov:Communication ; + prov:sharesDefinitionWith prov:Communication . + +vcard:rev rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify revision information about the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "revision"@en ; + rdfs:range xsd:dateTime . + +spdx:documentation rdf:type owl:DatatypeProperty ; + rdfs:comment "Website containing the documentation related to the repository identifier"@en ; + rdfs:domain spdx:ReferenceType ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +time:before rdf:type owl:TransitiveProperty , owl:ObjectProperty ; + rdfs:comment "Asume una dirección en el tiempo. Si una entidad temporal T1 está antes que otra entidad temporal T2, entonces el final de T1 está antes que el principio de T2. Así, \"antes\" se puede considerar básica para instantes y derivada para intervalos."@es , "Gives directionality to time. If a temporal entity T1 is before another temporal entity T2, then the end of T1 is before the beginning of T2. Thus, \"before\" can be considered to be basic to instants and derived for intervals."@en ; + rdfs:domain time:TemporalEntity ; + rdfs:label "antes"@es , "before"@en ; + rdfs:range time:TemporalEntity ; + owl:inverseOf time:after ; + skos:definition "Gives directionality to time. If a temporal entity T1 is before another temporal entity T2, then the end of T1 is before the beginning of T2. Thus, \"before\" can be considered to be basic to instants and derived for intervals."@en , "Asume una dirección en el tiempo. Si una entidad temporal T1 está antes que otra entidad temporal T2, entonces el final de T1 está antes que el principio de T2. Así, \"antes\" se puede considerar básica para instantes y derivada para intervalos."@es . + +prov:Entity rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Entity" ; + owl:disjointWith prov:InstantaneousEvent ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "An entity is a physical, digital, conceptual, or other kind of thing with some fixed aspects; entities may be real or imaginary. "@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-entity"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Entity"^^xsd:anyURI . + +prov:AgentInfluence rdf:type owl:Class ; + rdfs:comment "AgentInfluence provides additional descriptions of an Agent's binary influence upon any other kind of resource. Instances of AgentInfluence use the prov:agent property to cite the influencing Agent."@en , "It is not recommended that the type AgentInfluence be asserted without also asserting one of its more specific subclasses."@en ; + rdfs:isDefinedBy ; + rdfs:label "AgentInfluence" ; + rdfs:seeAlso prov:agent ; + rdfs:subClassOf prov:Influence ; + prov:category "qualified" ; + prov:editorsDefinition "AgentInfluence is the capacity of an agent to have an effect on the character, development, or behavior of another by means of attribution, association, delegation, or other."@en . + +dcterms:created rdf:type rdf:Property ; + rdfs:comment "Date of creation of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Created"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:Cell rdf:type owl:Class ; + rdfs:comment "Also called mobile telephone"@en ; + rdfs:isDefinedBy ; + rdfs:label "Cell"@en ; + rdfs:subClassOf vcard:TelephoneType . + +prov:Location rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Location" ; + rdfs:seeAlso prov:atLocation ; + prov:category "expanded" ; + prov:definition "A location can be an identifiable geographic place (ISO 19112), but it can also be a non-geographic place such as a directory, row, or column. As such, there are numerous ways in which location can be expressed, such as by a coordinate, address, landmark, and so forth."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-attribute-location"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-attribute"^^xsd:anyURI . + +dcterms:temporal rdf:type rdf:Property ; + rdfs:comment "Temporal characteristics of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Temporal Coverage"@en ; + rdfs:subPropertyOf dc:coverage , dcterms:coverage ; + dcam:rangeIncludes dcterms:PeriodOfTime ; + dcterms:issued "2000-07-11"^^xsd:date . + + + rdf:type owl:ObjectProperty ; + rdfs:domain ; + rdfs:range ; + vs:term_status "stable"@en . + +dcat:endpointDescription + rdf:type owl:ObjectProperty ; + rdfs:comment "En beskrivelse af det pågældende tjenesteendpoint, inklusiv dets operationer, parametre etc."@da , "Una descripción del end-point del servicio, incluyendo sus operaciones, parámetros, etc."@es , "A description of the service end-point, including its operations, parameters etc."@en , "Una descrizione dell'endpoint del servizio, incluse le sue operazioni, parametri, ecc."@it , "Popis přístupového bodu služby včetně operací, parametrů apod."@cs ; + rdfs:domain dcat:DataService ; + rdfs:label "endpointbeskrivelse"@da , "descripción del end-point del servicio"@es , "description of service end-point"@en , "popis přístupového bodu služby"@cs , "descrizione dell'endpoint del servizio"@it ; + skos:changeNote "Nuova proprietà in DCAT 2.0."@it , "Nueva propiedad agregada en DCAT 2.0."@en , "Ny egenskab i DCAT 2.0."@da , "New property in DCAT 2.0."@en , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs ; + skos:definition "A description of the service end-point, including its operations, parameters etc."@en , "Una descrizione dell'endpoint del servizio, incluse le sue operazioni, parametri, ecc."@it , "En beskrivelse af det pågældende tjenesteendpoint, inklusiv dets operationer, parametre etc."@da , "Una descripción del end-point del servicio, incluyendo sus operaciones, parámetros, etc.."@es , "Popis přístupového bodu služby včetně operací, parametrů apod."@cs ; + skos:scopeNote "Una descrizione dell'endpoint può essere espressa in un formato leggibile dalla macchina, come una descrizione OpenAPI (Swagger), una risposta GetCapabilities OGC, una descrizione del servizio SPARQL, un documento OpenSearch o WSDL, una descrizione API Hydra, o con del testo o qualche altra modalità informale se una rappresentazione formale non è possibile."@it , "La descripción del endpoint brinda detalles específicos de la instancia del endpoint, mientras que dct:conformsTo se usa para indicar el estándar general o especificación que implementa el endpoint."@es , "Popis přístupového bodu dává specifické detaily jeho konkrétní instance, zatímco dct:conformsTo indikuje obecný standard či specifikaci kterou přístupový bod implementuje."@cs , "The endpoint description gives specific details of the actual endpoint instance, while dct:conformsTo is used to indicate the general standard or specification that the endpoint implements."@en , "Una descripción del endpoint del servicio puede expresarse en un formato que la máquina puede interpretar, tal como una descripción basada en OpenAPI (Swagger), una respuesta OGC GetCapabilities, una descripción de un servicio SPARQL, un documento OpenSearch o WSDL, una descripción con la Hydra API, o en texto u otro modo informal si la representación formal no es posible."@es , "La descrizione dell'endpoint fornisce dettagli specifici dell'istanza dell'endpoint reale, mentre dct:conformsTo viene utilizzato per indicare lo standard o le specifiche implementate dall'endpoint."@it , "En beskrivelse af et endpoint kan udtrykkes i et maskinlæsbart format, såsom OpenAPI (Swagger)-beskrivelser, et OGC GetCapabilities svar, en SPARQL tjenestebeskrivelse, en OpenSearch- eller et WSDL-dokument, en Hydra-API-beskrivelse, eller i tekstformat eller i et andet uformelt format, hvis en formel repræsentation ikke er mulig."@da , "Popis přístupového bodu může být vyjádřen ve strojově čitelné formě, například jako popis OpenAPI (Swagger), odpověď služby OGC getCapabilities, pomocí slovníku SPARQL Service Description, jako OpenSearch či WSDL document, jako popis API dle slovníku Hydra, a nebo textově nebo jiným neformálním způsobem, pokud není možno použít formální reprezentaci."@cs , "An endpoint description may be expressed in a machine-readable form, such as an OpenAPI (Swagger) description, an OGC GetCapabilities response, a SPARQL Service Description, an OpenSearch or WSDL document, a Hydra API description, else in text or some other informal mode if a formal representation is not possible."@en , "Endpointbeskrivelsen giver specifikke oplysninger om den konkrete endpointinstans, mens dct:conformsTo anvendes til at indikere den overordnede standard eller specifikation som endpointet er i overensstemmelse med."@da . + +vcard:Sibling rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Sibling"@en ; + rdfs:subClassOf vcard:RelatedType . + +skos:editorialNote rdf:type owl:AnnotationProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "editorial note"@en ; + rdfs:subPropertyOf skos:note ; + skos:definition "A note for an editor, translator or maintainer of the vocabulary."@en . + +[ rdf:type owl:AllDifferent ; + owl:distinctMembers ( spdx:noassertion spdx:none ) +] . + +spdx:licenseException + rdf:type owl:ObjectProperty ; + rdfs:comment "An exception to a license."@en ; + rdfs:domain spdx:WithExceptionOperator ; + rdfs:range spdx:LicenseException ; + vs:term_status "stable"@en . + +[ rdf:type owl:Axiom ; + rdfs:comment "hadPrimarySource property is a particular case of wasDerivedFrom (see http://www.w3.org/TR/prov-dm/#term-original-source) that aims to give credit to the source that originated some information." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:hadPrimarySource ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +spdx:creator rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify who (or what, in the case of a tool) created the SPDX document. If the SPDX document was created by an individual, indicate the person's name. If the SPDX document was created on behalf of a company or organization, indicate the entity name. If the SPDX document was created using a software tool, indicate the name and version for that tool. If multiple participants or tools were involved, use multiple instances of this field. Person name or organization name may be designated as “anonymous” if appropriate."@en ; + rdfs:domain spdx:CreationInfo ; + rdfs:range xsd:string ; + vs:term_status "stable" . + +dcterms:rightsHolder rdf:type rdf:Property ; + rdfs:comment "A person or organization owning or managing rights over the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Rights Holder"@en ; + dcam:rangeIncludes dcterms:Agent ; + dcterms:description "Recommended practice is to refer to the rights holder with a URI. If this is not possible or feasible, a literal value that identifies the rights holder may be provided."@en ; + dcterms:issued "2004-06-14"^^xsd:date . + +prov:activity rdf:type owl:ObjectProperty ; + rdfs:domain prov:ActivityInfluence ; + rdfs:isDefinedBy ; + rdfs:label "activity" ; + rdfs:range prov:Activity ; + rdfs:subPropertyOf prov:influencer ; + prov:category "qualified" ; + prov:editorialNote "This property behaves in spirit like rdf:object; it references the object of a prov:wasInfluencedBy triple."@en ; + prov:editorsDefinition "The prov:activity property references an prov:Activity which influenced a resource. This property applies to an prov:ActivityInfluence, which is given by a subproperty of prov:qualifiedInfluence from the influenced prov:Entity, prov:Activity or prov:Agent." ; + prov:inverse "activityOfInfluence" . + +spdx:Checksum rdf:type owl:Class ; + rdfs:comment "A Checksum is value that allows the contents of a file to be authenticated. Even small changes to the content of the file will change its checksum. This class allows the results of a variety of checksum and cryptographic message digest algorithms to be represented."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:ChecksumAlgorithm ; + owl:onProperty spdx:algorithm ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:hexBinary ; + owl:onProperty spdx:checksumValue ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + +time:hasXSDDuration rdf:type owl:DatatypeProperty ; + rdfs:comment "Extensión de una entidad temporal, expresada utilizando xsd:duration."@es , "Extent of a temporal entity, expressed using xsd:duration"@en ; + rdfs:domain time:TemporalEntity ; + rdfs:label "has XSD duration"@en , "tiene duración XSD"@es ; + rdfs:range xsd:duration ; + skos:definition "Extensión de una entidad temporal, expresada utilizando xsd:duration."@es , "Extent of a temporal entity, expressed using xsd:duration"@en ; + skos:editorialNote "Característica arriesgada - añadida en la revisión de 2017, y todavía no ampliamente utilizada."@es , "Feature at risk - added in 2017 revision, and not yet widely used. "@en . + +prov:hadMember rdf:type owl:ObjectProperty ; + rdfs:domain prov:Collection ; + rdfs:isDefinedBy ; + rdfs:label "hadMember" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + prov:category "expanded" ; + prov:component "expanded" ; + prov:inverse "wasMemberOf" ; + prov:sharesDefinitionWith prov:Collection . + +spdx:relationshipType_patchApplied + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_patchApplied expresses that the SPDXElement is a 'patchfile' that was applied and produced the relatedSPDXElement. For example, a .diff File relates to a specific file where the diff was applied."@en ; + vs:term_status "stable"@en . + +spdx:referenceCategory_security + rdf:type owl:NamedIndividual , spdx:ReferenceCategory ; + vs:term_status "stable"@en . + +dcterms:LCC rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of conceptual resources specified by the Library of Congress Classification."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "LCC"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +dcterms:LicenseDocument + rdf:type rdfs:Class ; + rdfs:comment "A legal document giving official permission to do something with a resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "License Document"@en ; + rdfs:subClassOf dcterms:RightsStatement ; + dcterms:issued "2008-01-14"^^xsd:date . + +prov:Revision rdf:type owl:Class ; + rdfs:comment "An instance of prov:Revision provides additional descriptions about the binary prov:wasRevisionOf relation from some newer prov:Entity to an earlier prov:Entity. For example, :draft_2 prov:wasRevisionOf :draft_1; prov:qualifiedRevision [ a prov:Revision; prov:entity :draft_1; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Revision" ; + rdfs:subClassOf prov:Derivation ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:definition "A revision is a derivation for which the resulting entity is a revised version of some original. The implication here is that the resulting entity contains substantial content from the original. Revision is a particular case of derivation."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-revision"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Revision"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasRevisionOf . + +spdx:AnnotationType rdf:type owl:Class ; + rdfs:comment "This type describes the type of annotation. Annotations are usually created when someone reviews the file, and if this is the case the annotation type should be REVIEW."@en ; + vs:term_status "stable"@en . + +spdx:checksumValue rdf:type owl:DatatypeProperty , owl:FunctionalProperty ; + rdfs:comment "The checksumValue property provides a lower case hexidecimal encoded digest value produced using a specific algorithm."@en ; + rdfs:domain spdx:Checksum ; + rdfs:range xsd:hexBinary ; + vs:term_status "stable" . + +[ rdf:type owl:Axiom ; + rdfs:comment "Derivation is a particular case of trace (see http://www.w3.org/TR/prov-dm/#term-trace), since it links an entity to another entity that contributed to its existence." ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasDerivedFrom ; + owl:annotatedTarget prov:wasInfluencedBy +] . + +time:intervalFinishedBy + rdf:type owl:ObjectProperty ; + rdfs:comment "If a proper interval T1 is intervalFinishedBy another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is coincident with the end of T2."@en , "Si un intervalo propio T1 está terminado por otro intervalo propio T2, entonces el principio de T1 está antes que el principio de T2, y el final de T1 coincide con el final de T2."@es ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo terminado por"@es , "interval finished by"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalFinishes ; + skos:definition "Si un intervalo propio T1 está terminado por otro intervalo propio T2, entonces el principio de T1 está antes que el principio de T2, y el final de T1 coincide con el final de T2."@es , "If a proper interval T1 is intervalFinishedBy another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is coincident with the end of T2."@en . + +spdx:relationshipType_testToolOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is used as a test tool for SPDXRef-B."@en ; + vs:term_status "stable"@en . + +prov:qualifiedStart rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:wasStartedBy Entity :e1, then it can qualify how it was started using prov:qualifiedStart [ a prov:Start; prov:entity :e1; :foo :bar ]."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedStart" ; + rdfs:range prov:Start ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:inverse "qualifiedStartOf" ; + prov:sharesDefinitionWith prov:Start ; + prov:unqualifiedForm prov:wasStartedBy . + + + rdf:type sh:NodeShape ; + sh:name "Checksum"@en ; + sh:property [ sh:datatype xsd:hexBinary ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path spdx:checksumValue ; + sh:severity sh:Violation + ] ; + sh:property [ sh:hasValue spdx:checksumAlgorithm_sha1 ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path spdx:algorithm ; + sh:severity sh:Violation + ] ; + sh:targetClass spdx:Checksum . + +prov:Influence rdf:type owl:Class ; + rdfs:comment "An instance of prov:Influence provides additional descriptions about the binary prov:wasInfluencedBy relation from some influenced Activity, Entity, or Agent to the influencing Activity, Entity, or Agent. For example, :stomach_ache prov:wasInfluencedBy :spoon; prov:qualifiedInfluence [ a prov:Influence; prov:entity :spoon; :foo :bar ] . Because prov:Influence is a broad relation, the more specific relations (Communication, Delegation, End, etc.) should be used when applicable."@en , "Because prov:Influence is a broad relation, its most specific subclasses (e.g. prov:Communication, prov:Delegation, prov:End, prov:Revision, etc.) should be used when applicable."@en ; + rdfs:isDefinedBy ; + rdfs:label "Influence" ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:definition "Influence is the capacity of an entity, activity, or agent to have an effect on the character, development, or behavior of another by means of usage, start, end, generation, invalidation, communication, derivation, attribution, association, or delegation."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-influence"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-influence"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasInfluencedBy . + +dcterms:conformsTo rdf:type rdf:Property ; + rdfs:comment "An established standard to which the described resource conforms."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Conforms To"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcam:rangeIncludes dcterms:Standard ; + dcterms:issued "2001-05-21"^^xsd:date . + +spdx:licenseDeclared rdf:type owl:ObjectProperty ; + rdfs:comment "The licensing that the creators of the software in the package, or the packager, have declared. Declarations by the original software creator should be preferred, if they exist."@en ; + rdfs:domain spdx:SpdxItem ; + rdfs:range spdx:AnyLicenseInfo ; + vs:term_status "stable"@en . + +spdx:relationshipType_amendment + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A amends the SPDX information in SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:example rdf:type owl:DatatypeProperty ; + rdfs:comment "Text for examples in describing an SPDX element."@en ; + rdfs:domain spdx:LicenseException ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:AnyLicenseInfo rdf:type owl:Class ; + rdfs:comment "The AnyLicenseInfo class includes all resources that represent licensing information." ; + rdfs:isDefinedBy "http://spdx.org/rdf/terms#AnyLicenseInfo" ; + vs:term_status "stable" . + +vcard:mailer rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "mailer"@en ; + owl:deprecated true . + +time:generalYear rdf:type rdfs:Datatype ; + rdfs:comment "Year number - formulated as a text string with a pattern constraint to reproduce the same lexical form as gYear, but not restricted to values from the Gregorian calendar. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Número de año - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gYear, aunque no está restringido a valores del calendario gregoriano.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es ; + rdfs:label "Generalized year"@en , "Año generalizado"@es ; + owl:onDatatype xsd:string ; + owl:withRestrictions ( [ xsd:pattern "-?([1-9][0-9]{3,}|0[0-9]{3})(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?" ] + ) ; + skos:definition "Year number - formulated as a text string with a pattern constraint to reproduce the same lexical form as gYear, but not restricted to values from the Gregorian calendar. \nNote that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type."@en , "Número de año - formulado como una cadena de texto con una restricción patrón para reproducir la misma forma léxica que gYear, aunque no está restringido a valores del calendario gregoriano.\n Nótese que el espacio de valores no está definido, por tanto, un procesador genérico de OWL2 no puede computar relaciones de orden de valores de este tipo."@es . + +skos:narrowerTransitive + rdf:type owl:ObjectProperty , owl:TransitiveProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has narrower transitive"@en ; + rdfs:subPropertyOf skos:semanticRelation ; + owl:inverseOf skos:broaderTransitive ; + skos:definition "skos:narrowerTransitive is a transitive superproperty of skos:narrower." ; + skos:scopeNote "By convention, skos:narrowerTransitive is not used to make assertions. Rather, the properties can be used to draw inferences about the transitive closure of the hierarchical relation, which is useful e.g. when implementing a simple query expansion algorithm in a search application."@en . + +locn:postName rdf:type rdf:Property ; + rdfs:comment "The key postal division of the address, usually the city. (INSPIRE's definition is \"One or more names created and maintained for postal purposes to identify a subdivision of addresses and postal delivery points.\"). The domain of locn:postName is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "post name"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:postName" ; + vs:term_status "testing"@en . + +spdx:isOsiApproved rdf:type owl:DatatypeProperty ; + rdfs:comment "Indicates if the OSI has approved the license."@en ; + rdfs:domain spdx:License ; + rdfs:range xsd:boolean ; + vs:term_status "stable"@en . + +adms:interoperabilityLevel + rdf:type owl:ObjectProperty ; + rdfs:comment "The interoperability level for which the Asset is relevant."@en ; + rdfs:domain adms:Asset ; + rdfs:isDefinedBy ; + rdfs:label "interoperability level"@en ; + rdfs:range skos:Concept . + +time:Sunday rdf:type time:DayOfWeek ; + rdfs:label "Sunday"@en ; + skos:prefLabel "Zondag"@nl , "Sunday"@en , "Воскресенье"@ru , "Sonntag"@de , "Domingo"@es , "Domingo"@pt , "الأحد (يوم)"@ar , "Niedziela"@pl , "Dimanche"@fr , "Domenica"@it , "日曜日"@ja , "星期日"@zh . + +owl:qualifiedCardinality + rdf:type owl:AnnotationProperty . + +vcard:prodid rdf:type owl:DatatypeProperty ; + rdfs:comment "To specify the identifier for the product that created the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "product id"@en ; + rdfs:range xsd:string . + +spdx:CrossRef rdf:type owl:Class ; + rdfs:comment "Cross reference details for the a URL reference"@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:match + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:isWayBackLink + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isValid + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:isLive + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:url ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:nonNegativeInteger ; + owl:onProperty spdx:order + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:dateTime ; + owl:onProperty spdx:timestamp + ] ; + vs:term_status "stable"@en . + +dcat:record rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "Συνδέει έναν κατάλογο με τις καταγραφές του."@el , "Un record che descrive la registrazione di un singolo set di dati o di un servizio dati che fa parte del catalogo."@it , "Relie un catalogue à ses registres."@fr , "Propojuje katalog a jeho záznamy."@cs , "تربط الفهرس بسجل ضمنه"@ar , "Záznam popisující registraci jedné datové sady či datové služby jakožto součásti katalogu."@cs , "Describe la registración de un conjunto de datos o un servicio de datos en el catálogo."@es , "En post der beskriver registreringen af et enkelt datasæt eller en datatjeneste som er opført i kataloget."@da , "A record describing the registration of a single dataset or data service that is part of the catalog."@en , "カタログの一部であるカタログ・レコード。"@ja ; + rdfs:domain dcat:Catalog ; + rdfs:isDefinedBy ; + rdfs:label "カタログ・レコード"@ja , "سجل"@ar , "registre"@fr , "záznam"@cs , "record"@en , "record"@it , "registro"@es , "καταγραφή"@el , "post"@da ; + rdfs:range dcat:CatalogRecord ; + skos:altLabel "har post"@da ; + skos:definition "Propojuje katalog a jeho záznamy."@cs , "Relie un catalogue à ses registres."@fr , "Záznam popisující registraci jedné datové sady či datové služby jakožto součásti katalogu."@cs , "En post der beskriver registreringen af et enkelt datasæt eller en datatjeneste som er opført i kataloget."@da , "Un record che descrive la registrazione di un singolo set di dati o di un servizio dati che fa parte del catalogo."@it , "Συνδέει έναν κατάλογο με τις καταγραφές του."@el , "カタログの一部であるカタログ・レコード。"@ja , "Describe la registración de un conjunto de datos o un servicio de datos en el catálogo."@es , "تربط الفهرس بسجل ضمنه"@ar , "A record describing the registration of a single dataset or data service that is part of the catalog."@en ; + skos:editorialNote "Status: English, Italian, Spanish and Czech Definitions modified by DCAT revision team, other translations pending."@en . + +time:unitType rdf:type owl:ObjectProperty ; + rdfs:comment "The temporal unit which provides the precision of a date-time value or scale of a temporal extent"@en , "La unidad de tiempo que proporciona la precisión de un valor fecha-hora o la escala de una extensión temporal."@es ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( time:GeneralDateTimeDescription time:Duration ) + ] ; + rdfs:label "temporal unit type"@en , "tipo de unidad temporal"@es ; + rdfs:range time:TemporalUnit . + +prov:generatedAtTime rdf:type owl:DatatypeProperty ; + rdfs:comment "The time at which an entity was completely created and is available for use."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "generatedAtTime" ; + rdfs:range xsd:dateTime ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:editorialNote "It is the intent that the property chain holds: (prov:qualifiedGeneration o prov:atTime) rdfs:subPropertyOf prov:generatedAtTime."@en ; + prov:qualifiedForm prov:atTime , prov:Generation . + +spdx:supplier rdf:type owl:DatatypeProperty ; + rdfs:comment "The name and, optionally, contact information of the person or organization who was the immediate supplier of this package to the recipient. The supplier may be different than originator when the software has been repackaged. Values of this property must conform to the agent and tool syntax."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:licenseTextHtml rdf:type owl:DatatypeProperty ; + rdfs:comment "License text in HTML format"@en ; + rdfs:domain spdx:ListedLicense ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +dcterms:NLM rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of conceptual resources specified by the National Library of Medicine Classification."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "NLM"@en ; + rdfs:seeAlso ; + dcterms:issued "2005-06-13"^^xsd:date . + +dcat:contactPoint rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "Relevante kontaktoplysninger for den katalogiserede ressource. Anvendelse af vCard anbefales."@da , "Relevant contact information for the catalogued resource. Use of vCard is recommended."@en , "Información relevante de contacto para el recurso catalogado. Se recomienda el uso de vCard."@es , "Relie un jeu de données à une information de contact utile en utilisant VCard."@fr , "Informazioni di contatto rilevanti per la risorsa catalogata. Si raccomanda l'uso di vCard."@it , "データセットを、VCardを用いて提供されている適切な連絡先情報にリンクします。"@ja , "تربط قائمة البيانات بعنوان اتصال موصف باستخدام VCard"@ar , "Συνδέει ένα σύνολο δεδομένων με ένα σχετικό σημείο επικοινωνίας, μέσω VCard."@el , "Relevantní kontaktní informace pro katalogizovaný zdroj. Doporučuje se použít slovník VCard."@cs ; + rdfs:isDefinedBy ; + rdfs:label "point de contact"@fr , "عنوان اتصال"@ar , "窓口"@ja , "contact point"@en , "σημείο επικοινωνίας"@el , "kontaktní bod"@cs , "punto di contatto"@it , "Punto de contacto"@es , "kontaktpunkt"@da ; + rdfs:range vcard:Kind ; + skos:definition "Relie un jeu de données à une information de contact utile en utilisant VCard."@fr , "Informazioni di contatto rilevanti per la risorsa catalogata. Si raccomanda l'uso di vCard."@it , "Συνδέει ένα σύνολο δεδομένων με ένα σχετικό σημείο επικοινωνίας, μέσω VCard."@el , "Relevant contact information for the catalogued resource. Use of vCard is recommended."@en , "Información relevante de contacto para el recurso catalogado. Se recomienda el uso de vCard."@es , "データセットを、VCardを用いて提供されている適切な連絡先情報にリンクします。"@ja , "Relevante kontaktoplysninger for den katalogiserede ressource. Anvendelse af vCard anbefales."@da , "تربط قائمة البيانات بعنوان اتصال موصف باستخدام VCard"@ar , "Relevantní kontaktní informace pro katalogizovaný zdroj. Doporučuje se použít slovník VCard."@cs ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translations provided, other translations pending."@en . + +spdx:relationshipType_buildDependencyOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a build dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + + + rdf:type owl:Ontology ; + rdfs:comment "DCAT er et RDF-vokabular som har til formål at understøtte interoperabilitet mellem datakataloger udgivet på nettet. Ved at anvende DCAT til at beskrive datasæt i datakataloger, kan udgivere øge findbarhed og gøre det gøre det lettere for applikationer at anvende metadata fra forskellige kataloger. Derudover understøttes decentraliseret udstilling af kataloger og fødererede datasætsøgninger på tværs af websider. Aggregerede DCAT-metadata kan fungere som fortegnelsesfiler der kan understøtte digital bevaring. DCAT er defineret på http://www.w3.org/TR/vocab-dcat/. Enhver forskel mellem det normative dokument og dette schema er en fejl i dette schema."@da , "DCAT est un vocabulaire développé pour faciliter l'interopérabilité entre les jeux de données publiées sur le Web. En utilisant DCAT pour décrire les jeux de données dans les catalogues de données, les fournisseurs de données augmentent leur découverte et permettent que les applications facilement les métadonnées de plusieurs catalogues. Il permet en plus la publication décentralisée des catalogues et facilitent la recherche fédérée des données entre plusieurs sites. Les métadonnées DCAT aggrégées peuvent servir comme un manifeste pour faciliter la préservation digitale des ressources. DCAT est définie à l'adresse http://www.w3.org/TR/vocab-dcat/. Une quelconque version de ce document normatif et ce vocabulaire est une erreur dans ce vocabulaire."@fr , "DCAT je RDF slovník navržený pro zprostředkování interoperability mezi datovými katalogy publikovanými na Webu. Poskytovatelé dat používáním slovníku DCAT pro popis datových sad v datových katalozích zvyšují jejich dohledatelnost a umožňují aplikacím konzumovat metadata z více katalogů. Dále je umožňena decentralizovaná publikace katalogů a federované dotazování na datové sady napříč katalogy. Agregovaná DCAT metadata mohou také sloužit jako průvodka umožňující digitální uchování informace. DCAT je definován na http://www.w3.org/TR/vocab-dcat/. Jakýkoliv nesoulad mezi odkazovaným dokumentem a tímto schématem je chybou v tomto schématu."@cs , "هي أنطولوجية تسهل تبادل البيانات بين مختلف الفهارس على الوب. استخدام هذه الأنطولوجية يساعد على اكتشاف قوائم البيانات المنشورة على الوب و يمكن التطبيقات المختلفة من الاستفادة أتوماتيكيا من البيانات المتاحة من مختلف الفهارس."@ar , "DCATは、ウェブ上で公開されたデータ・カタログ間の相互運用性の促進を目的とするRDFの語彙です。このドキュメントでは、その利用のために、スキーマを定義し、例を提供します。データ・カタログ内のデータセットを記述するためにDCATを用いると、公開者が、発見可能性を増加させ、アプリケーションが複数のカタログのメタデータを容易に利用できるようになります。さらに、カタログの分散公開を可能にし、複数のサイトにまたがるデータセットの統合検索を促進します。集約されたDCATメタデータは、ディジタル保存を促進するためのマニフェスト・ファイルとして使用できます。"@ja , "DCAT is an RDF vocabulary designed to facilitate interoperability between data catalogs published on the Web. By using DCAT to describe datasets in data catalogs, publishers increase discoverability and enable applications easily to consume metadata from multiple catalogs. It further enables decentralized publishing of catalogs and facilitates federated dataset search across sites. Aggregated DCAT metadata can serve as a manifest file to facilitate digital preservation. DCAT is defined at http://www.w3.org/TR/vocab-dcat/. Any variance between that normative document and this schema is an error in this schema."@en , "DCAT es un vocabulario RDF diseñado para facilitar la interoperabilidad entre catálogos de datos publicados en la Web. Utilizando DCAT para describir datos disponibles en catálogos se aumenta la posibilidad de que sean descubiertos y se permite que las aplicaciones consuman fácilmente los metadatos de varios catálogos."@es , "Το DCAT είναι ένα RDF λεξιλόγιο που σχεδιάσθηκε για να κάνει εφικτή τη διαλειτουργικότητα μεταξύ καταλόγων δεδομένων στον Παγκόσμιο Ιστό. Χρησιμοποιώντας το DCAT για την περιγραφή συνόλων δεδομένων, οι εκδότες αυτών αυξάνουν την ανακαλυψιμότητα και επιτρέπουν στις εφαρμογές την εύκολη κατανάλωση μεταδεδομένων από πολλαπλούς καταλόγους. Επιπλέον, δίνει τη δυνατότητα για αποκεντρωμένη έκδοση και διάθεση καταλόγων και επιτρέπει δυνατότητες ενοποιημένης αναζήτησης μεταξύ διαφορετικών πηγών. Συγκεντρωτικά μεταδεδομένα που έχουν περιγραφεί με το DCAT μπορούν να χρησιμοποιηθούν σαν ένα δηλωτικό αρχείο (manifest file) ώστε να διευκολύνουν την ψηφιακή συντήρηση."@el , "DCAT è un vocabolario RDF progettato per facilitare l'interoperabilità tra i cataloghi di dati pubblicati nel Web. Utilizzando DCAT per descrivere i dataset nei cataloghi di dati, i fornitori migliorano la capacità di individuazione dei dati e abilitano le applicazioni al consumo di dati provenienti da cataloghi differenti. DCAT permette di decentralizzare la pubblicazione di cataloghi e facilita la ricerca federata dei dataset. L'aggregazione dei metadati federati può fungere da file manifesto per facilitare la conservazione digitale. DCAT è definito all'indirizzo http://www.w3.org/TR/vocab-dcat/. Qualsiasi scostamento tra tale definizione normativa e questo schema è da considerarsi un errore di questo schema."@it ; + rdfs:label "Slovník pro datové katalogy"@cs , "Il vocabolario del catalogo dei dati"@it , "Le vocabulaire des jeux de données"@fr , "El vocabulario de catálogo de datos"@es , "Datakatalogvokabular"@da , "أنطولوجية فهارس قوائم البيانات"@ar , "Το λεξιλόγιο των καταλόγων δεδομένων"@el , "データ・カタログ語彙(DCAT)"@ja , "The data catalog vocabulary"@en ; + dcterms:contributor [ rdfs:seeAlso ; + sdo:affiliation [ foaf:homepage ; + foaf:name "Science and Technology Facilities Council, UK" + ] ; + foaf:homepage ; + foaf:name "Alejandra Gonzalez-Beltran" + ] ; + dcterms:contributor [ rdfs:seeAlso ; + foaf:homepage ; + foaf:name "Jakub Klímek" + ] ; + dcterms:contributor [ foaf:name "Martin Alvarez-Espinar" ] ; + dcterms:contributor [ foaf:name "Richard Cyganiak" ] ; + dcterms:contributor [ rdfs:seeAlso ; + sdo:affiliation ; + foaf:homepage ; + foaf:name "Phil Archer" + ] ; + dcterms:contributor [ rdfs:seeAlso ; + foaf:homepage ; + foaf:name "Makx Dekkers" + ] ; + dcterms:contributor [ sdo:affiliation [ foaf:homepage ; + foaf:name "Refinitiv" + ] ; + foaf:name "David Browning" + ] ; + dcterms:contributor [ sdo:affiliation [ foaf:homepage ; + foaf:name "Open Knowledge Foundation" + ] ; + foaf:name "Rufus Pollock" + ] ; + dcterms:contributor [ rdfs:seeAlso ; + foaf:homepage , ; + foaf:name "Riccardo Albertoni" + ] ; + dcterms:contributor [ foaf:homepage ; + foaf:name "Shuji Kamitsuna" + ] ; + dcterms:contributor [ rdfs:seeAlso ; + foaf:name "Ghislain Auguste Atemezing" + ] ; + dcterms:contributor [ foaf:name "Boris Villazón-Terrazas" ] ; + dcterms:contributor [ rdf:type foaf:Person ; + rdfs:seeAlso ; + sdo:affiliation [ foaf:homepage ; + foaf:name "Commonwealth Scientific and Industrial Research Organisation" + ] ; + foaf:name "Simon J D Cox" ; + foaf:workInfoHomepage + ] ; + dcterms:contributor [ foaf:name "Marios Meimaris" ] ; + dcterms:contributor [ rdfs:seeAlso ; + foaf:homepage ; + foaf:name "Andrea Perego" + ] ; + dcterms:contributor [ sdo:affiliation [ foaf:homepage ; + foaf:name "European Commission, DG DIGIT" + ] ; + foaf:name "Vassilios Peristeras" + ] ; + dcterms:creator [ foaf:name "John Erickson" ] ; + dcterms:creator [ rdfs:seeAlso ; + foaf:name "Fadi Maali" + ] ; + dcterms:license ; + dcterms:modified "2013-09-20"^^xsd:date , "2020-11-30"^^xsd:date , "2019" , "2012-04-24"^^xsd:date , "2021-09-14"^^xsd:date , "2013-11-28"^^xsd:date , "2017-12-19"^^xsd:date ; + owl:imports , , dcterms: ; + owl:versionInfo "Questa è una copia aggiornata del vocabolario DCAT v2.0 disponibile in https://www.w3.org/ns/dcat.ttl"@en , "This is an updated copy of v2.0 of the DCAT vocabulary, taken from https://www.w3.org/ns/dcat.ttl"@en , "Esta es una copia del vocabulario DCAT v2.0 disponible en https://www.w3.org/ns/dcat.ttl"@es , "Dette er en opdateret kopi af DCAT v. 2.0 som er tilgænglig på https://www.w3.org/ns/dcat.ttl"@da , "Toto je aktualizovaná kopie slovníku DCAT verze 2.0, převzatá z https://www.w3.org/ns/dcat.ttl"@cs ; + skos:editorialNote "English language definitions updated in this revision in line with ED. Multilingual text unevenly updated."@en ; + foaf:maker [ foaf:homepage ; + foaf:name "Government Linked Data WG" + ] . + +dcat:qualifiedRelation + rdf:type owl:ObjectProperty ; + rdfs:comment "Odkaz na popis vztahu s jiným zdrojem."@cs , "Link a una descrizione di una relazione con un'altra risorsa."@it , "Link to a description of a relationship with another resource."@en , "Enlace a una descripción de la relación con otro recurso."@es , "Reference til en beskrivelse af en relation til en anden ressource."@da ; + rdfs:domain dcat:Resource ; + rdfs:label "qualified relation"@en , "relazione qualificata"@it , "Kvalificeret relation"@da , "relación calificada"@es , "kvalifikovaný vztah"@cs ; + rdfs:range dcat:Relationship ; + skos:changeNote "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property added in DCAT 2.0."@en , "Propiedad nueva añadida en DCAT 2.0."@es , "Ny egenskab tilføjet i DCAT 2.0."@da , "Nuova proprietà aggiunta in DCAT 2.0."@it ; + skos:definition "Odkaz na popis vztahu s jiným zdrojem."@cs , "Reference til en beskrivelse af en relation til en anden ressource."@da , "Link a una descrizione di una relazione con un'altra risorsa."@it , "Enlace a una descripción de la relación con otro recurso."@es , "Link to a description of a relationship with another resource."@en ; + skos:editorialNote "Introduced into DCAT to complement the other PROV qualified relations. "@en , "Přidáno do DCAT k doplnění jiných kvalifikovaných vztahů ze slovníku PROV."@cs , "Se incluyó en DCAT para complementar las relaciones calificadas disponibles en PROV."@es , "Introdotta in DCAT per integrare le altre relazioni qualificate di PROV."@it , "Introduceret i DCAT med henblik på at supplere de øvrige kvalificerede relationer fra PROV. "@da ; + skos:scopeNote "Used to link to another resource where the nature of the relationship is known but does not match one of the standard Dublin Core properties (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@en , "Anvendes til at referere til en anden ressource hvor relationens betydning er kendt men ikke matcher en af de standardiserede egenskaber fra Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) eller PROV-O-egenskaber (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@da , "Viene utilizzato per associarsi a un'altra risorsa nei casi per i quali la natura della relazione è nota ma non è alcuna delle proprietà fornite dallo standard Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat , dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:require, dct:isRequiredBy) o dalle proprietà fornite da PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom , prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@it , "Použito pro odkazování na jiný zdroj, kde druh vztahu je znám, ale neodpovídá standardním vlastnostem ze slovníku Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) či slovníku PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@cs , "Se usa para asociar con otro recurso para el cuál la naturaleza de la relación es conocida pero no es ninguna de las propiedades que provee el estándar Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@es . + +time:hasTime rdf:type owl:ObjectProperty ; + rdfs:comment "Proporciona soporte a la asociación de una entidad temporal (instante o intervalo) a cualquier cosa."@es , "Supports the association of a temporal entity (instant or interval) to any thing"@en ; + rdfs:label "tiene tiempo"@es , "has time"@en ; + rdfs:range time:TemporalEntity ; + skos:definition "Proporciona soporte a la asociación de una entidad temporal (instante o intervalo) a cualquier cosa."@es , "Supports the association of a temporal entity (instant or interval) to any thing"@en ; + skos:editorialNote "Feature at risk - added in 2017 revision, and not yet widely used. "@en , "Característica arriesgada -añadida en la revisión del 2017 que no ha sido todavía utilizada de forma amplia."@es . + +spdx:licenseInfoInSnippet + rdf:type owl:ObjectProperty ; + rdfs:comment "Licensing information that was discovered directly in the subject snippet. This is also considered a declared license for the snippet.\n\nIf the licenseInfoInSnippet field is not present for a snippet, it implies an equivalent meaning to NOASSERTION."@en ; + rdfs:domain spdx:Snippet ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( spdx:AnyLicenseInfo + [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseInfoInFile + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseInfoInFile + ] + ) + ] ; + rdfs:subPropertyOf spdx:licenseInfoFromFiles ; + vs:term_status "stable"@en . + + + rdf:type sh:NodeShape ; + rdfs:comment "the union of Catalog, Dataset and DataService" ; + rdfs:label "dcat:Resource" ; + sh:message "The node is either a Catalog, Dataset or a DataService" ; + sh:or ( [ sh:class dcat:Catalog ] + [ sh:class dcat:Dataset ] + [ sh:class dcat:DataService ] + ) . + +spdx:annotationType_other + rdf:type owl:NamedIndividual , spdx:AnnotationType ; + rdfs:comment "Type of annotation which does not fit in any of the pre-defined annotation types."@en ; + vs:term_status "stable"@en . + +prov: rdf:type owl:Ontology . + +vcard:note rdf:type owl:DatatypeProperty ; + rdfs:comment "A note associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "note"@en ; + rdfs:range xsd:string . + +skos:semanticRelation + rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:domain skos:Concept ; + rdfs:isDefinedBy ; + rdfs:label "is in semantic relation with"@en ; + rdfs:range skos:Concept ; + skos:definition "Links a concept to a concept related by meaning."@en ; + skos:scopeNote "This property should not be used directly, but as a super-property for all properties denoting a relationship of meaning between concepts."@en . + + + rdf:type owl:DatatypeProperty ; + rdfs:domain ; + rdfs:range xsd:positiveInteger ; + vs:term_status "stable"@en . + +prov:Quotation rdf:type owl:Class ; + rdfs:comment "An instance of prov:Quotation provides additional descriptions about the binary prov:wasQuotedFrom relation from some taken prov:Entity from an earlier, larger prov:Entity. For example, :here_is_looking_at_you_kid prov:wasQuotedFrom :casablanca_script; prov:qualifiedQuotation [ a prov:Quotation; prov:entity :casablanca_script; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Quotation" ; + rdfs:subClassOf prov:Derivation ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:definition "A quotation is the repeat of (some or all of) an entity, such as text or image, by someone who may or may not be its original author. Quotation is a particular case of derivation."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-quotation"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-quotation"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasQuotedFrom . + +prov:wasGeneratedBy rdf:type owl:ObjectProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasGeneratedBy" ; + rdfs:range prov:Activity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedGeneration prov:activity ) ; + owl:propertyChainAxiom ( prov:qualifiedGeneration prov:activity ) ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:inverse "generated" ; + prov:qualifiedForm prov:Generation , prov:qualifiedGeneration . + +dcterms:title rdf:type rdf:Property ; + rdfs:comment "A name given to the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Title"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:title ; + dcterms:issued "2008-01-14"^^xsd:date . + +foaf:homepage rdf:type owl:ObjectProperty ; + rdfs:comment "This axiom needed so that Protege loads DCAT2 without errors." . + +spdx:fileDependency rdf:type owl:ObjectProperty ; + rdfs:comment "This field is deprecated since SPDX 2.0 in favor of using Section 7 which provides more granularity about relationships."@en ; + rdfs:domain spdx:File ; + rdfs:range spdx:File ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +vcard:hasCalendarBusy + rdf:type owl:ObjectProperty ; + rdfs:comment "To specify the busy time associated with the object. (Was called FBURL in RFC6350)"@en ; + rdfs:isDefinedBy ; + rdfs:label "has calendar busy"@en . + +spdx:purpose_source rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is a collection of source files."@en ; + vs:term_status "stable"@en . + +spdx:hasExtractedLicensingInfo + rdf:type owl:ObjectProperty ; + rdfs:comment "Indicates that a particular ExtractedLicensingInfo was defined in the subject SpdxDocument."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:ExtractedLicensingInfo ; + vs:term_status "stable"@en . + +spdx:purpose_application + rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is a software application."@en ; + vs:term_status "stable"@en . + +dcterms:coverage rdf:type rdf:Property ; + rdfs:comment "The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Coverage"@en ; + rdfs:subPropertyOf dc:coverage ; + dcam:rangeIncludes dcterms:Location , dcterms:Period , dcterms:Jurisdiction ; + dcterms:description "Spatial topic and spatial applicability may be a named place or a location specified by its geographic coordinates. Temporal topic may be a named period, date, or date range. A jurisdiction may be a named administrative entity or a geographic place to which the resource applies. Recommended practice is to use a controlled vocabulary such as the Getty Thesaurus of Geographic Names [[TGN](https://www.getty.edu/research/tools/vocabulary/tgn/index.html)]. Where appropriate, named places or time periods may be used in preference to numeric identifiers such as sets of coordinates or date ranges. Because coverage is so broadly defined, it is preferable to use the more specific subproperties Temporal Coverage and Spatial Coverage."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:date rdf:type rdf:Property ; + rdfs:comment "A point or period of time associated with an event in the lifecycle of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date ; + dcterms:description "Date may be used to express temporal information at any level of granularity. Recommended practice is to express the date, date/time, or period of time according to ISO 8601-1 [[ISO 8601-1](https://www.iso.org/iso-8601-date-and-time-format.html)] or a published profile of the ISO standard, such as the W3C Note on Date and Time Formats [[W3CDTF](https://www.w3.org/TR/NOTE-datetime)] or the Extended Date/Time Format Specification [[EDTF](http://www.loc.gov/standards/datetime/)]. If the full date is unknown, month and year (YYYY-MM) or just year (YYYY) may be used. Date ranges may be specified using ISO 8601 period of time specification in which start and end dates are separated by a '/' (slash) character. Either the start or end date may be missing."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +time:unitYear rdf:type time:TemporalUnit ; + rdfs:label "Year (unit of temporal duration)"@en ; + skos:prefLabel "anno"@it , "سنة واحدة"@ar , "jaar"@nl , "一年"@zh , "Jahr"@de , "один год"@ru , "un año"@es , "year"@en , "an"@fr , "1 년"@kr , "1年"@jp , "ano"@pt , "rok"@pl ; + time:days "0"^^xsd:decimal ; + time:hours "0"^^xsd:decimal ; + time:minutes "0"^^xsd:decimal ; + time:months "0"^^xsd:decimal ; + time:seconds "0"^^xsd:decimal ; + time:weeks "0"^^xsd:decimal ; + time:years "1"^^xsd:decimal . + +spdx:fileType_image rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file is assoicated with an picture image file (MIME type of image/*, ie. .jpg, .gif )."@en ; + vs:term_status "stable"@en . + +prov:wasAttributedTo rdf:type owl:ObjectProperty ; + rdfs:comment "Attribution is the ascribing of an entity to an agent."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasAttributedTo" ; + rdfs:range prov:Agent ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedAttribution prov:agent ) ; + owl:propertyChainAxiom ( prov:qualifiedAttribution prov:agent ) ; + prov:category "starting-point" ; + prov:component "agents-responsibility" ; + prov:definition "Attribution is the ascribing of an entity to an agent."@en ; + prov:inverse "contributed" ; + prov:qualifiedForm prov:Attribution , prov:qualifiedAttribution . + +dcat:hadRole rdf:type owl:ObjectProperty ; + rdfs:comment "La función de una entidad o agente con respecto a otra entidad o recurso."@es , "La funzione di un'entità o un agente rispetto ad un'altra entità o risorsa."@it , "Den funktion en entitet eller aktør har i forhold til en anden ressource."@da , "Funkce entity či agenta ve vztahu k jiné entitě či zdroji."@cs , "The function of an entity or agent with respect to another entity or resource."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( prov:Attribution dcat:Relationship ) + ] ; + rdfs:label "tiene rol"@it , "sehraná role"@cs , "hadRole"@en , "havde rolle"@da , "haRuolo"@it ; + rdfs:range dcat:Role ; + skos:changeNote "New property added in DCAT 2.0."@en , "Nueva propiedad agregada en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs ; + skos:definition "Den funktion en entitet eller aktør har i forhold til en anden ressource."@da , "La funzione di un'entità o un agente rispetto ad un'altra entità o risorsa."@it , "The function of an entity or agent with respect to another entity or resource."@en , "La función de una entidad o agente con respecto a otra entidad o recurso."@es , "Funkce entity či agenta ve vztahu k jiné entitě či zdroji."@cs ; + skos:editorialNote "Přidáno do DCAT pro doplnění vlastnosti prov:hadRole (jejíž užití je omezeno na role v kontextu aktivity, s definičním oborem prov:Association)."@cs , "Introduceret i DCAT for at supplere prov:hadRole (hvis anvendelse er begrænset til roller i forbindelse med en aktivitet med domænet prov:Association)."@da , "Introduced into DCAT to complement prov:hadRole (whose use is limited to roles in the context of an activity, with the domain of prov:Association."@en , "Introdotta in DCAT per completare prov:hadRole (il cui uso è limitato ai ruoli nel contesto di un'attività, con il dominio di prov:Association."@it , "Agregada en DCAT para complementar prov:hadRole (cuyo uso está limitado a roles en el contexto de una actividad, con dominio prov:Association."@es ; + skos:scopeNote "Può essere utilizzata in una relazione qualificata per specificare il ruolo di un'entità rispetto a un'altra entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di entità come ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, o MARC relators https://id.loc.gov/vocabulary/relators."@it , "Může být použito v kvalifikovaném vztahu pro specifikaci role Entity ve vztahu k jiné Entitě. Je doporučeno použít hodnotu z řízeného slovníku rolí entit, jako například ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode, IANA Registry of Link Relations https://www.iana.org/assignments/link-relation, DataCite metadata schema, MARC relators https://id.loc.gov/vocabulary/relators."@cs , "May be used in a qualified-relation to specify the role of an Entity with respect to another Entity. It is recommended that the value be taken from a controlled vocabulary of entity roles such as: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; DataCite metadata schema; MARC relators https://id.loc.gov/vocabulary/relators."@en , "May be used in a qualified-attribution to specify the role of an Agent with respect to an Entity. It is recommended that the value be taken from a controlled vocabulary of agent roles, such as http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@en , "Může být použito v kvalifikovaném přiřazení pro specifikaci role Agenta ve vztahu k Entitě. Je doporučeno hodnotu vybrat z řízeného slovníku rolí agentů, jako například http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@cs , "Può essere utilizzato in un'attribuzione qualificata per specificare il ruolo di un agente rispetto a un'entità. Si raccomanda che il valore sia preso da un vocabolario controllato di ruoli di agente, come ad esempio http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@it , "Puede usarse en una atribución cualificada para especificar el rol de un Agente con respecto a una Entidad. Se recomienda que el valor sea de un vocabulario controlado de roles de agentes, como por ejemplo http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@es , "Kan vendes ved kvalificerede krediteringer til at angive en aktørs rolle i forhold en entitet. Det anbefales at værdierne styres som et kontrolleret udfaldsrum med aktørroller, såsom http://registry.it.csiro.au/def/isotc211/CI_RoleCode."@da , "Puede usarse en una atribución cualificada para especificar el rol de una Entidad con respecto a otra Entidad. Se recomienda que su valor se tome de un vocabulario controlado de roles de entidades como por ejemplo: ISO 19115 DS_AssociationTypeCode http://registry.it.csiro.au/def/isotc211/DS_AssociationTypeCode; IANA Registry of Link Relations https://www.iana.org/assignments/link-relation; esquema de metadatos de DataCite; MARC relators https://id.loc.gov/vocabulary/relators."@es . + +xsd:dateTimeStamp rdfs:label "sello de tiempo"@es . + +spdx:noassertion rdf:type owl:NamedIndividual ; + rdfs:comment "Individual to indicate the creator of the SPDX document does not assert any value for the object." . + +spdx:purpose_container + rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package refers to a container image which can be used by a container runtime application."@en ; + vs:term_status "stable"@en . + +prov:PrimarySource rdf:type owl:Class ; + rdfs:comment "An instance of prov:PrimarySource provides additional descriptions about the binary prov:hadPrimarySource relation from some secondary prov:Entity to an earlier, primary prov:Entity. For example, :blog prov:hadPrimarySource :newsArticle; prov:qualifiedPrimarySource [ a prov:PrimarySource; prov:entity :newsArticle; :foo :bar ] ."@en ; + rdfs:isDefinedBy ; + rdfs:label "PrimarySource" ; + rdfs:subClassOf prov:Derivation ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:definition "A primary source for a topic refers to something produced by some agent with direct experience and knowledge about the topic, at the time of the topic's study, without benefit from hindsight.\n\nBecause of the directness of primary sources, they 'speak for themselves' in ways that cannot be captured through the filter of secondary sources. As such, it is important for secondary sources to reference those primary sources from which they were derived, so that their reliability can be investigated.\n\nA primary source relation is a particular case of derivation of secondary materials from their primary sources. It is recognized that the determination of primary sources can be up to interpretation, and should be done according to conventions accepted within the application's domain."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-primary-source"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-original-source"^^xsd:anyURI ; + prov:unqualifiedForm prov:hadPrimarySource . + +skos:OrderedCollection + rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Ordered Collection"@en ; + rdfs:subClassOf skos:Collection ; + skos:definition "An ordered collection of concepts, where both the grouping and the ordering are meaningful."@en ; + skos:scopeNote "Ordered collections can be used where you would like a set of concepts to be displayed in a specific order, and optionally under a 'node label'."@en . + +prov:EntityInfluence rdf:type owl:Class ; + rdfs:comment "EntityInfluence provides additional descriptions of an Entity's binary influence upon any other kind of resource. Instances of EntityInfluence use the prov:entity property to cite the influencing Entity."@en , "It is not recommended that the type EntityInfluence be asserted without also asserting one of its more specific subclasses."@en ; + rdfs:isDefinedBy ; + rdfs:label "EntityInfluence" ; + rdfs:seeAlso prov:entity ; + rdfs:subClassOf prov:Influence ; + prov:category "qualified" ; + prov:editorsDefinition "EntityInfluence is the capacity of an entity to have an effect on the character, development, or behavior of another by means of usage, start, end, derivation, or other. "@en . + +spdx:relationshipType_variantOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_variantOf expresses that an SPDXElement is a variant of the relatedSPDXElement, but it is not clear which came first. For example, if the content of two Files differs by some edit, but there is no way to tell which came first (no reliable date information), then one File is a variant of the other File."@en ; + vs:term_status "stable"@en . + +dcterms:publisher rdf:type rdf:Property ; + rdfs:comment "An entity responsible for making the resource available."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Publisher"@en ; + rdfs:subPropertyOf dc:publisher ; + dcam:rangeIncludes dcterms:Agent ; + dcterms:issued "2008-01-14"^^xsd:date . + +time:inXSDDate rdf:type owl:DatatypeProperty ; + rdfs:comment "Position of an instant, expressed using xsd:date"@en , "Posición de un instante, expresado utilizando xsd:date."@es ; + rdfs:domain time:Instant ; + rdfs:label "in XSD date"@en , "en fecha XSD"@es ; + rdfs:range xsd:date ; + skos:definition "Position of an instant, expressed using xsd:date"@en , "Posición de un instante, expresado utilizando xsd:date."@es . + +dcterms:dateAccepted rdf:type rdf:Property ; + rdfs:comment "Date of acceptance of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Accepted"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty. Examples of resources to which a date of acceptance may be relevant are a thesis (accepted by a university department) or an article (accepted by a journal)."@en ; + dcterms:issued "2002-07-13"^^xsd:date . + +prov:dm rdf:type owl:AnnotationProperty ; + rdfs:comment "A reference to the principal section of the PROV-DM document that describes this concept."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + +adms:sample rdf:type owl:ObjectProperty ; + rdfs:comment "Links to a sample of an Asset (which is itself an Asset)."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "sample"@en ; + rdfs:range rdfs:Resource . + +[ rdf:type owl:Axiom ; + rdfs:comment "A collection is an entity that provides a structure to some constituents, which are themselves entities. These constituents are said to be member of the collections."@en ; + owl:annotatedProperty rdfs:range ; + owl:annotatedSource prov:hadMember ; + owl:annotatedTarget prov:Entity ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-collection" +] . + +prov:endedAtTime rdf:type owl:DatatypeProperty ; + rdfs:comment "The time at which an activity ended. See also prov:startedAtTime."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "endedAtTime" ; + rdfs:range xsd:dateTime ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:editorialNote "It is the intent that the property chain holds: (prov:qualifiedEnd o prov:atTime) rdfs:subPropertyOf prov:endedAtTime."@en ; + prov:qualifiedForm prov:End , prov:atTime . + +time:Instant rdf:type owl:Class ; + rdfs:comment "A temporal entity with zero extent or duration"@en , "Una entidad temporal con una extensión o duración cero."@es ; + rdfs:label "Time instant"@en , "instante de tiempo."@es ; + rdfs:subClassOf time:TemporalEntity ; + skos:definition "A temporal entity with zero extent or duration"@en , "Una entidad temporal con una extensión o duración cero."@es . + +adms:identifier rdf:type owl:ObjectProperty ; + rdfs:comment "Links a resource to an adms:Identifier class."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "identifier"@en ; + rdfs:range adms:Identifier . + +prov:qualifiedDerivation + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Entity prov:wasDerivedFrom Entity :e, then it can qualify how it was derived using prov:qualifiedDerivation [ a prov:Derivation; prov:entity :e; :foo :bar ]."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedDerivation" ; + rdfs:range prov:Derivation ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "qualifiedDerivationOf" ; + prov:sharesDefinitionWith prov:Derivation ; + prov:unqualifiedForm prov:wasDerivedFrom . + +dcat:endpointURL rdf:type owl:ObjectProperty ; + rdfs:comment "La posición raíz o end-point principal del servicio (una IRI web)."@es , "La locazione principale o l'endpoint primario del servizio (un IRI risolvibile via web)."@it , "The root location or primary endpoint of the service (a web-resolvable IRI)."@en , "Kořenové umístění nebo hlavní přístupový bod služby (IRI přístupné přes Web)."@cs , "Rodplaceringen eller det primære endpoint for en tjeneste (en web-resolverbar IRI)."@da ; + rdfs:domain dcat:DataService ; + rdfs:label "end-point del servizio"@it , "service end-point"@en , "end-point del servicio"@es , "přístupový bod služby"@cs , "tjenesteendpoint"@da ; + rdfs:range rdfs:Resource ; + skos:changeNote "Nueva propiedad agregada en DCAT 2.0."@es , "Nuova proprietà in DCAT 2.0."@it , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property in DCAT 2.0."@en ; + skos:definition "Rodplaceringen eller det primære endpoint for en tjeneste (en web-resolverbar IRI)."@da , "The root location or primary endpoint of the service (a web-resolvable IRI)."@en , "La locazione principale o l'endpoint primario del servizio (un IRI risolvibile via web)."@it , "Kořenové umístění nebo hlavní přístupový bod služby (IRI přístupné přes Web)."@cs , "La posición raíz o end-point principal del servicio (una IRI web)."@es . + +skos:inScheme rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "is in scheme"@en ; + rdfs:range skos:ConceptScheme ; + skos:definition "Relates a resource (for example a concept) to a concept scheme in which it is included."@en ; + skos:scopeNote "A concept may be a member of more than one concept scheme."@en . + +time:minutes rdf:type owl:DatatypeProperty ; + rdfs:comment "length, or element of, a temporal extent expressed in minutes"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en minutos."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "minutes"@en , "minutos"@es ; + rdfs:range xsd:decimal ; + skos:definition "length, or element of, a temporal extent expressed in minutes"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en minutos."@es . + +prov:Association rdf:type owl:Class ; + rdfs:comment "An instance of prov:Association provides additional descriptions about the binary prov:wasAssociatedWith relation from an prov:Activity to some prov:Agent that had some responsiblity for it. For example, :baking prov:wasAssociatedWith :baker; prov:qualifiedAssociation [ a prov:Association; prov:agent :baker; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Association" ; + rdfs:subClassOf prov:AgentInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:definition "An activity association is an assignment of responsibility to an agent for an activity, indicating that the agent had a role in the activity. It further allows for a plan to be specified, which is the plan intended by the agent to achieve some goals in the context of this activity."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Association"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-Association"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasAssociatedWith . + +prov:SoftwareAgent rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "SoftwareAgent" ; + rdfs:subClassOf prov:Agent ; + prov:category "expanded" ; + prov:component "agents-responsibility" ; + prov:definition "A software agent is running software."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-agent"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-types"^^xsd:anyURI . + + + rdfs:label "Turtle version of the ISA Programme Location Core Vocabulary"@en ; + dcterms:format ; + dcat:mediaType "text/turtle"^^dcterms:IMT . + +vcard:hasTitle rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the title data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has title"@en . + +skos:broadMatch rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has broader match"@en ; + rdfs:subPropertyOf skos:mappingRelation , skos:broader ; + owl:inverseOf skos:narrowMatch ; + skos:definition "skos:broadMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes."@en . + +dcat:distribution rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "Connecte un jeu de données à des distributions disponibles."@fr , "Una distribución disponible del conjunto de datos."@es , "An available distribution of the dataset."@en , "En tilgængelig repræsentation af datasættet."@da , "Una distribuzione disponibile per il set di dati."@it , "تربط قائمة البيانات بطريقة أو بشكل يسمح الوصول الى البيانات"@ar , "データセットを、その利用可能な配信に接続します。"@ja , "Συνδέει ένα σύνολο δεδομένων με μία από τις διαθέσιμες διανομές του."@el , "Dostupná distribuce datové sady."@cs ; + rdfs:domain dcat:Dataset ; + rdfs:isDefinedBy ; + rdfs:label "データセット配信"@ja , "distribution"@en , "distribution"@fr , "distribution"@da , "distribuzione"@it , "διανομή"@el , "توزيع"@ar , "distribución"@es , "distribuce"@cs ; + rdfs:range dcat:Distribution ; + rdfs:subPropertyOf dcterms:relation ; + skos:altLabel "har distribution"@da ; + skos:definition "Συνδέει ένα σύνολο δεδομένων με μία από τις διαθέσιμες διανομές του."@el , "Connecte un jeu de données à des distributions disponibles."@fr , "Una distribución disponible del conjunto de datos."@es , "データセットを、その利用可能な配信に接続します。"@ja , "تربط قائمة البيانات بطريقة أو بشكل يسمح الوصول الى البيانات"@ar , "En tilgængelig repræsentation af datasættet."@da , "An available distribution of the dataset."@en , "Una distribuzione disponibile per il set di dati."@it , "Dostupná distribuce datové sady."@cs ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, translations pending (except for Italian, Spanish and Czech)."@en . + +rdfs:label rdf:type owl:AnnotationProperty ; + rdfs:comment ""@en ; + rdfs:isDefinedBy . + +dcat:packageFormat rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:comment "The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en , "Balíčkový formát souboru, ve kterém je jeden či více souborů seskupeno dohromady, např. aby bylo možné stáhnout sadu souvisejících souborů naráz."@cs , "Il formato di impacchettamento della distribuzione in cui uno o più file di dati sono raggruppati insieme, ad es. per abilitare un insieme di file correlati da scaricare insieme."@it , "El formato del archivo en que se agrupan uno o más archivos de datos, e.g. para permitir que un conjunto de archivos relacionados se bajen juntos."@es , "Format til pakning af data med henblik på distribution af en eller flere relaterede datafiler der samles til en enhed med henblik på samlet distribution. "@da ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "formát balíčku"@cs , "packaging format"@en , "formato di impacchettamento"@it , "pakkeformat"@da , "formato de empaquetado"@es ; + rdfs:range dcterms:MediaType ; + rdfs:subPropertyOf dcterms:format ; + skos:changeNote "Ny egenskab tilføjet i DCAT 2.0."@da , "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property added in DCAT 2.0."@en , "Nueva propiedad agregada en DCAT 2.0."@es , "Nuova proprietà aggiunta in DCAT 2.0."@it ; + skos:definition "Il formato di impacchettamento della distribuzione in cui uno o più file di dati sono raggruppati insieme, ad es. per abilitare un insieme di file correlati da scaricare insieme."@it , "The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en , "El formato del archivo en que se agrupan uno o más archivos de datos, e.g. para permitir que un conjunto de archivos relacionados se bajen juntos."@es , "Balíčkový formát souboru, ve kterém je jeden či více souborů seskupeno dohromady, např. aby bylo možné stáhnout sadu souvisejících souborů naráz."@cs ; + skos:scopeNote "Questa proprietà deve essere utilizzata quando i file nella distribuzione sono impacchettati, ad esempio in un file TAR, Frictionless Data Package o Bagit. Il formato DOVREBBE essere espresso utilizzando un tipo di supporto come definito dal registro dei tipi di media IANA https://www.iana.org/assignments/media-types/, se disponibili."@it , "Esta propiedad se debe usar cuando los archivos de la distribución están empaquetados, por ejemplo en un archivo TAR, Frictionless Data Package o Bagit. El formato DEBERÍA expresarse usando un 'media type', tales como los definidos en el registro IANA de 'media types' https://www.iana.org/assignments/media-types/, si está disponibles."@es , "Denne egenskab kan anvendes hvis filerne i en distribution er pakket, fx i en TAR-fil, en Frictionless Data Package eller en Bagit-fil. Formatet BØR udtrykkes ved en medietype som defineret i 'IANA media types registry', hvis der optræder en relevant medietype dér: https://www.iana.org/assignments/media-types/."@da , "This property to be used when the files in the distribution are packaged, e.g. in a TAR file, a Frictionless Data Package or a Bagit file. The format SHOULD be expressed using a media type as defined by IANA media types registry https://www.iana.org/assignments/media-types/, if available."@en , "Tato vlastnost se použije, když jsou soubory v distribuci zabaleny, např. v souboru TAR, v balíčku Frictionless Data Package nebo v souboru Bagit. Formát BY MĚL být vyjádřen pomocí typu média definovaného v registru IANA https://www.iana.org/assignments/media-types/, pokud existuje."@cs . + +time:inTimePosition rdf:type owl:ObjectProperty ; + rdfs:comment "Posición de un instante, expresada como una coordenada temporal o un valor nominal."@es , "Position of an instant, expressed as a temporal coordinate or nominal value"@en ; + rdfs:domain time:Instant ; + rdfs:label "posición de tiempo"@es , "Time position"@en ; + rdfs:range time:TimePosition ; + rdfs:subPropertyOf time:inTemporalPosition ; + skos:definition "Position of a time instant expressed as a TimePosition"@en , "Posición de un instante, expresada como una coordenada temporal o un valor nominal."@es . + +spdx:fileType_other rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "Indicates the file is not a source, archive or binary file."@en ; + vs:term_status "stable"@en . + +time:dayOfYear rdf:type owl:DatatypeProperty ; + rdfs:comment "The number of the day within the year"@en , "El número de día en el año."@es ; + rdfs:domain time:GeneralDateTimeDescription ; + rdfs:label "day of year"@en , "día del año"@es ; + rdfs:range xsd:nonNegativeInteger ; + skos:definition "The number of the day within the year"@en , "El número de día en el año."@es . + +dcterms:audience rdf:type rdf:Property ; + rdfs:comment "A class of agents for whom the resource is intended or useful."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Audience"@en ; + dcam:rangeIncludes dcterms:AgentClass ; + dcterms:description "Recommended practice is to use this property with non-literal values from a vocabulary of audience types."@en ; + dcterms:issued "2001-05-21"^^xsd:date . + + + rdf:type owl:NamedIndividual . + +spdx:purpose_firmware + rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package provides low level control over a device's hardware."@en ; + vs:term_status "stable"@en . + +time:hasBeginning rdf:type owl:ObjectProperty ; + rdfs:comment "Beginning of a temporal entity"@en , "Comienzo de una entidad temporal."@es ; + rdfs:domain time:TemporalEntity ; + rdfs:label "has beginning"@en , "tiene principio"@es ; + rdfs:range time:Instant ; + rdfs:subPropertyOf time:hasTime ; + skos:definition "Beginning of a temporal entity."@en , "Comienzo de una entidad temporal."@es . + +spdx:licenseText rdf:type owl:DatatypeProperty ; + rdfs:comment "Full text of the license."@en ; + rdfs:domain spdx:License ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +time:Saturday rdf:type time:DayOfWeek ; + rdfs:label "Saturday"@en ; + skos:prefLabel "السبت"@ar , "Sabato"@it , "土曜日"@ja , "Sábado"@es , "Sábado"@pt , "Zaterdag"@nl , "Суббота"@ru , "Sobota"@pl , "星期六"@zh , "Samedi"@fr , "Saturday"@en , "Samstag"@de . + +skos:topConceptOf rdf:type owl:ObjectProperty , rdf:Property ; + rdfs:domain skos:Concept ; + rdfs:isDefinedBy ; + rdfs:label "is top concept in scheme"@en ; + rdfs:range skos:ConceptScheme ; + rdfs:subPropertyOf skos:inScheme ; + owl:inverseOf skos:hasTopConcept ; + skos:definition "Relates a concept to the concept scheme that it is a top level concept of."@en . + +vcard:Male rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Male"@en ; + rdfs:subClassOf vcard:Gender . + +vcard:Colleague rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Colleague"@en ; + rdfs:subClassOf vcard:RelatedType . + +spdx:ExternalRef rdf:type owl:Class ; + rdfs:comment "An External Reference allows a Package to reference an external source of additional information, metadata, enumerations, asset identifiers, or downloadable content believed to be relevant to the Package."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:ReferenceCategory ; + owl:onProperty spdx:referenceCategory ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:ReferenceType ; + owl:onProperty spdx:referenceType ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:referenceLocator ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + vs:term_status "stable"@en . + +vcard:hasNickname rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the nickname data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has nickname"@en ; + rdfs:seeAlso vcard:nickname . + +spdx:url rdf:type owl:DatatypeProperty ; + rdfs:comment "URL Reference"@en ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:anyURI . + +prov:order rdf:type owl:AnnotationProperty ; + rdfs:comment "The position that this OWL term should be listed within documentation. The scope of the documentation (e.g., among all terms, among terms within a prov:category, among properties applying to a particular class, etc.) is unspecified."@en ; + rdfs:isDefinedBy . + +spdx:relationshipType_staticLink + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A statically links to SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:purpose_operatingSystem + rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package refers to an operating system."@en ; + vs:term_status "stable"@en . + +time:Year rdf:type owl:DeprecatedClass , owl:Class ; + rdfs:comment "Year duration" ; + rdfs:label "Year"@en ; + rdfs:subClassOf time:DurationDescription ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 1 ; + owl:onProperty time:years + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:hours + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:weeks + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:months + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:minutes + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:seconds + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality 0 ; + owl:onProperty time:days + ] ; + owl:deprecated true ; + skos:definition "Year duration" ; + skos:historyNote "Year was proposed in the 2006 version of OWL-Time as an example of how DurationDescription could be specialized to allow for a duration to be restricted to a number of years. \n\nIt is deprecated in this edition of OWL-Time. " ; + skos:prefLabel "Anno"@it , "سنة"@ar , "Rok"@pl , "Jaar"@nl , "Año"@es , "Jahr"@de , "Année (calendrier)"@fr , "Year"@en , "Год"@ru , "年"@ja , "年"@zh , "Ano"@pt . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + vs:term_status "stable" . + +spdx:fileType_text rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "The file is human readable text file (MIME type of text/*)."@en ; + vs:term_status "stable"@en . + +spdx:match rdf:type owl:DatatypeProperty ; + rdfs:comment "Status of a License List SeeAlso URL reference if it refers to a website that matches the license text." ; + rdfs:domain spdx:CrossRef ; + rdfs:range xsd:string . + + + rdf:type sh:NodeShape ; + rdfs:comment "Date time date disjunction shape checks that a datatype property receives a temporal value: date, dateTime, gYear or gYearMonth literal" ; + rdfs:label "Date time date disjunction" ; + sh:message "The values must be data typed as either xsd:date, xsd:dateTime, xsd:gYear or xsd:gYearMonth" ; + sh:or ( [ sh:datatype xsd:date ] + [ sh:datatype xsd:dateTime ] + [ sh:datatype xsd:gYear ] + [ sh:datatype xsd:gYearMonth ] + ) . + +dcterms:accrualMethod + rdf:type rdf:Property ; + rdfs:comment "The method by which items are added to a collection."@en ; + rdfs:domain dctype:Collection ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Accrual Method"@en ; + dcam:rangeIncludes dcterms:MethodOfAccrual ; + dcterms:description "Recommended practice is to use a value from the Collection Description Accrual Method Vocabulary [[DCMI-ACCRUALMETHOD](https://dublincore.org/groups/collections/accrual-method/)]."@en ; + dcterms:issued "2005-06-13"^^xsd:date . + +vcard:Kin rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Kin"@en ; + rdfs:subClassOf vcard:RelatedType . + +dcat:byteSize rdf:type owl:DatatypeProperty , rdf:Property ; + rdfs:comment "الحجم بالبايتات "@ar , "Το μέγεθος μιας διανομής σε bytes."@el , "The size of a distribution in bytes."@en , "Velikost distribuce v bajtech."@cs , "バイトによる配信のサイズ。"@ja , "La taille de la distribution en octects"@fr , "Størrelsen af en distributionen angivet i bytes."@da , "La dimensione di una distribuzione in byte."@it , "El tamaño de una distribución en bytes."@es ; + rdfs:domain dcat:Distribution ; + rdfs:isDefinedBy ; + rdfs:label "μέγεθος σε bytes"@el , "velikost v bajtech"@cs , "byte size"@en , "tamaño en bytes"@es , "dimensione in byte"@it , "الحجم بالبايت"@ar , "バイト・サイズ"@ja , "taille en octects"@fr , "bytestørrelse"@da ; + rdfs:range rdfs:Literal ; + skos:definition "Το μέγεθος μιας διανομής σε bytes."@el , "Størrelsen af en distribution angivet i bytes."@da , "الحجم بالبايتات "@ar , "El tamaño de una distribución en bytes."@es , "Velikost distribuce v bajtech."@cs , "La dimensione di una distribuzione in byte."@it , "The size of a distribution in bytes."@en , "La taille de la distribution en octects."@fr , "バイトによる配信のサイズ。"@ja ; + skos:scopeNote "Το μέγεθος σε bytes μπορεί να προσεγγιστεί όταν η ακριβής τιμή δεν είναι γνωστή. Η τιμή της dcat:byteSize θα πρέπει να δίνεται με τύπο δεδομένων xsd:decimal."@el , "الحجم يمكن أن يكون تقريبي إذا كان الحجم الدقيق غير معروف"@ar , "Bytestørrelsen kan approximeres hvis den præcise størrelse ikke er kendt. Værdien af dcat:byteSize bør angives som xsd:decimal."@da , "Velikost v bajtech může být přibližná, pokud její přesná hodnota není známa. Literál s hodnotou dcat:byteSize by měl mít datový typ xsd:decimal."@cs , "El tamaño en bytes puede ser aproximado cuando se desconoce el tamaño exacto. El valor literal de dcat:byteSize debe tener tipo 'xsd:decimal'."@es , "正確なサイズが不明である場合、サイズは、バイトによる近似値を示すことができます。"@ja , "La taille en octects peut être approximative lorsque l'on ignore la taille réelle. La valeur littérale de dcat:byteSize doit être de type xsd:decimal."@fr , "The size in bytes can be approximated when the precise size is not known. The literal value of dcat:byteSize should by typed as xsd:decimal."@en , "La dimensione in byte può essere approssimata quando non si conosce la dimensione precisa. Il valore di dcat:byteSize dovrebbe essere espresso come un xsd:decimal."@it . + +locn:fullAddress rdf:type rdf:Property ; + rdfs:comment "The complete address written as a string, with or without formatting. The domain of locn:fullAddress is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "full address"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:fullAddress" ; + vs:term_status "testing"@en . + +time:xsdDateTime rdf:type owl:DeprecatedProperty , owl:DatatypeProperty ; + rdfs:comment "Valor de 'intervalo de fecha-hora' expresado como un valor compacto."@es , "Value of DateTimeInterval expressed as a compact value."@en ; + rdfs:domain time:DateTimeInterval ; + rdfs:label "has XSD date-time"@en , "tiene fecha-hora XSD"@es ; + rdfs:range xsd:dateTime ; + owl:deprecated true ; + skos:note "Utilizando xsd:dateTime en este lugar significa que la duración del intervalo está implícita: se corresponde con la longitud del elemento más pequeño distinto de cero del literal fecha-hora. Sin embargo, esta regla no se puede utilizar para intervalos cuya duración es mayor que un rango más pequeño que el tiempo de comienzo - p.ej. el primer minuto o segundo del día, la primera hora del mes, o el primer día del año. En estos casos el intervalo deseado no se puede distinguir del intervalo correspondiente al próximo rango más alto. Debido a esta ambigüedad esencial, no se recomienda el uso de esta propiedad y está desaprobada." , "Using xsd:dateTime in this place means that the duration of the interval is implicit: it corresponds to the length of the smallest non-zero element of the date-time literal. However, this rule cannot be used for intervals whose duration is more than one rank smaller than the starting time - e.g. the first minute or second of a day, the first hour of a month, or the first day of a year. In these cases the desired interval cannot be distinguished from the interval corresponding to the next rank up. Because of this essential ambiguity, use of this property is not recommended and it is deprecated."@en . + +vcard:hasGivenName rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the given name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has given name"@en . + +spdx:relationshipType_dependsOn + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A depends on SPDXRef-B."@en ; + vs:term_status "stable"@en . + +dcat:theme rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "La categoria principale della risorsa. Una risorsa può avere più temi."@it , "Hlavní téma zdroje. Zdroj může mít více témat."@cs , "La categoría principal del recurso. Un recurso puede tener varios temas."@es , "A main category of the resource. A resource can have multiple themes."@en , "التصنيف الرئيسي لقائمة البيانات. قائمة البيانات يمكن أن تملك أكثر من تصنيف رئيسي واحد."@ar , "Et centralt emne for ressourcen. En ressource kan have flere centrale emner."@da , "La catégorie principale de la ressource. Une ressource peut avoir plusieurs thèmes."@fr , "データセットの主要カテゴリー。データセットは複数のテーマを持つことができます。"@ja , "Η κύρια κατηγορία του συνόλου δεδομένων. Ένα σύνολο δεδομένων δύναται να έχει πολλαπλά θέματα."@el ; + rdfs:isDefinedBy ; + rdfs:label "emne"@da , "Θέμα"@el , "theme"@en , "tema"@es , "tema"@it , "テーマ/カテゴリー"@ja , "téma"@cs , "التصنيف"@ar , "thème"@fr ; + rdfs:range skos:Concept ; + rdfs:subPropertyOf dcterms:subject ; + skos:altLabel "tema"@da ; + skos:definition "Hlavní téma zdroje. Zdroj může mít více témat."@cs , "La catégorie principale de la ressource. Une ressource peut avoir plusieurs thèmes."@fr , "La categoría principal del recurso. Un recurso puede tener varios temas."@es , "Η κύρια κατηγορία του συνόλου δεδομένων. Ένα σύνολο δεδομένων δύναται να έχει πολλαπλά θέματα."@el , "La categoria principale della risorsa. Una risorsa può avere più temi."@it , "データセットの主要カテゴリー。データセットは複数のテーマを持つことができます。"@ja , "A main category of the resource. A resource can have multiple themes."@en , "Et centralt emne for ressourcen. En ressource kan have flere centrale emner."@da , "التصنيف الرئيسي لقائمة البيانات. قائمة البيانات يمكن أن تملك أكثر من تصنيف رئيسي واحد."@ar ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, all except for Italian and Czech translations are pending."@en ; + skos:scopeNote "El conjunto de skos:Concepts utilizados para categorizar los recursos están organizados en un skos:ConceptScheme que describe todas las categorías y sus relaciones en el catálogo."@es , "データセットを分類するために用いられるskos:Conceptの集合は、カタログのすべてのカテゴリーとそれらの関係を記述しているskos:ConceptSchemeで組織化されます。"@ja , "Sada instancí třídy skos:Concept použitá pro kategorizaci zdrojů je organizována do schématu konceptů skos:ConceptScheme, které popisuje všechny kategorie v katalogu a jejich vztahy."@cs , "Il set di concetti skos usati per categorizzare le risorse sono organizzati in skos:ConceptScheme che descrive tutte le categorie e le loro relazioni nel catalogo."@it , "Samlingen af begreber (skos:Concept) der anvendes til at emneinddele ressourcer organiseres i et begrebssystem (skos:ConceptScheme) som beskriver alle emnerne og deres relationer i kataloget."@da , "Un ensemble de skos:Concepts utilisés pour catégoriser les ressources sont organisés en un skos:ConceptScheme décrivant toutes les catégories et ses relations dans le catalogue."@fr , "The set of skos:Concepts used to categorize the resources are organized in a skos:ConceptScheme describing all the categories and their relations in the catalog."@en , "Το σετ των skos:Concepts που χρησιμοποιείται για να κατηγοριοποιήσει τα σύνολα δεδομένων είναι οργανωμένο εντός ενός skos:ConceptScheme που περιγράφει όλες τις κατηγορίες και τις σχέσεις αυτών στον κατάλογο."@el . + +time:TemporalUnit rdf:type owl:Class ; + rdfs:comment "A standard duration, which provides a scale factor for a time extent, or the granularity or precision for a time position."@en , "Una duración estándar, que proporciona un factor de escala para una extensión de tiempo, o la granularidad o precisión para una posición de tiempo."@es ; + rdfs:label "unidad de tiempo"@es , "Temporal unit"@en ; + rdfs:subClassOf time:TemporalDuration ; + skos:changeNote "Remove enumeration from definition, in order to allow other units to be used when required in other coordinate systems. \nNOTE: existing units are still present as members of the class, but the class membership is now open. \n\nIn the original OWL-Time the following constraint appeared: \n owl:oneOf (\n time:unitSecond\n time:unitMinute\n time:unitHour\n time:unitDay\n time:unitWeek\n time:unitMonth\n time:unitYear\n ) ;"@en ; + skos:definition "Una duración estándar, que proporciona un factor de escala para una extensión de tiempo, o la granularidad o precisión para una posición de tiempo."@es , "A standard duration, which provides a scale factor for a time extent, or the granularity or precision for a time position."@en ; + skos:note "La pertenencia de la clase 'unidad de tiempo' está abierta, para permitir otras unidades de tiempo utilizadas en algunas aplicaciones técnicas (por ejemplo, millones de años o el mes Baha'i)."@es , "Membership of the class TemporalUnit is open, to allow for other temporal units used in some technical applications (e.g. millions of years, Baha'i month)."@en . + +skos:relatedMatch rdf:type owl:ObjectProperty , owl:SymmetricProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has related match"@en ; + rdfs:subPropertyOf skos:mappingRelation , skos:related ; + skos:definition "skos:relatedMatch is used to state an associative mapping link between two conceptual resources in different concept schemes."@en . + +dcterms:abstract rdf:type rdf:Property ; + rdfs:comment "A summary of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Abstract"@en ; + rdfs:subPropertyOf dc:description , dcterms:description ; + dcterms:issued "2000-07-11"^^xsd:date . + +dcat:dataset rdf:type rdf:Property , owl:ObjectProperty ; + rdfs:comment "Kolekce dat, která je katalogizována v katalogu."@cs , "Una raccolta di dati che è elencata nel catalogo."@it , "تربط الفهرس بقائمة بيانات ضمنه"@ar , "Un conjunto de datos que se lista en el catálogo."@es , "En samling af data som er opført i kataloget."@da , "A collection of data that is listed in the catalog."@en , "カタログの一部であるデータセット。"@ja , "Relie un catalogue à un jeu de données faisant partie de ce catalogue."@fr , "Συνδέει έναν κατάλογο με ένα σύνολο δεδομένων το οποίο ανήκει στον εν λόγω κατάλογο."@el ; + rdfs:domain dcat:Catalog ; + rdfs:isDefinedBy ; + rdfs:label "σύνολο δεδομένων"@el , "قائمة بيانات"@ar , "conjunto de datos"@es , "datová sada"@cs , "dataset"@en , "dataset"@it , "jeu de données"@fr , "データセット"@ja , "datasæt"@da ; + rdfs:range dcat:Dataset ; + rdfs:subPropertyOf rdfs:member , dcterms:hasPart ; + skos:altLabel "har datasæt"@da , "datasamling"@da ; + skos:definition "Συνδέει έναν κατάλογο με ένα σύνολο δεδομένων το οποίο ανήκει στον εν λόγω κατάλογο."@el , "カタログの一部であるデータセット。"@ja , "Kolekce dat, která je katalogizována v katalogu."@cs , "Relie un catalogue à un jeu de données faisant partie de ce catalogue."@fr , "Una raccolta di dati che è elencata nel catalogo."@it , "A collection of data that is listed in the catalog."@en , "تربط الفهرس بقائمة بيانات ضمنه"@ar , "En samling af data som er opført i kataloget."@da , "Un conjunto de datos que se lista en el catálogo."@es ; + skos:editorialNote "Status: English Definition text modified by DCAT revision team, Italian, Spanish and Czech translation provided, other translations pending."@en . + +spdx:CreationInfo rdf:type owl:Class ; + rdfs:comment "One instance is required for each SPDX file produced. It provides the necessary information for forward and backward compatibility for processing tools."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:creator + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:dateTimeStamp ; + owl:onProperty spdx:created ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseListVersion + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + vs:term_status "stable"@en . + +spdx:isFsfLibre rdf:type owl:DatatypeProperty ; + rdfs:domain spdx:License ; + rdfs:range xsd:boolean . + +spdx:referenceCategory_other + rdf:type owl:NamedIndividual , spdx:ReferenceCategory ; + vs:term_status "stable"@en . + +spdx:relationshipType_describes + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-DOCUMENT describes SPDXRef-A."@en ; + vs:term_status "stable" . + +vcard:Video rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Video"@en ; + rdfs:subClassOf vcard:TelephoneType . + +dcterms:format rdf:type rdf:Property ; + rdfs:comment "The file format, physical medium, or dimensions of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Format"@en ; + rdfs:subPropertyOf dc:format ; + dcam:rangeIncludes dcterms:Extent , dcterms:MediaType ; + dcterms:description "Recommended practice is to use a controlled vocabulary where available. For example, for file formats one could use the list of Internet Media Types [[MIME](https://www.iana.org/assignments/media-types/media-types.xhtml)]. Examples of dimensions include size and duration."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:Type rdf:type owl:Class ; + rdfs:comment "Used for type codes. The URI of the type code must be used as the value for Type."@en ; + rdfs:isDefinedBy ; + rdfs:label "Type"@en . + +locn:addressId rdf:type rdf:Property ; + rdfs:comment "The concept of adding a globally unique identifier for each instance of an address is a crucial part of the INSPIRE data spec. The domain of locn:addressId is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "address ID"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:addressId" ; + vs:term_status "unstable"@en . + +spdx:name rdf:type owl:DatatypeProperty ; + rdfs:comment "Identify name of this SpdxElement."@en ; + rdfs:domain spdx:SpdxElement ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:purpose_library rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package is a software library."@en ; + vs:term_status "stable"@en . + +prov:wasInvalidatedBy + rdf:type owl:ObjectProperty ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasInvalidatedBy" ; + rdfs:range prov:Activity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedInvalidation prov:activity ) ; + owl:propertyChainAxiom ( prov:qualifiedInvalidation prov:activity ) ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:inverse "invalidated" ; + prov:qualifiedForm prov:qualifiedInvalidation , prov:Invalidation . + +time:months rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in months"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en meses."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "months duration"@en , "duración en meses"@es ; + rdfs:range xsd:decimal ; + skos:definition "length of, or element of the length of, a temporal extent expressed in months"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en meses."@es . + +dcterms:subject rdf:type rdf:Property ; + rdfs:comment "A topic of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Subject"@en ; + rdfs:subPropertyOf dc:subject ; + dcterms:description "Recommended practice is to refer to the subject with a URI. If this is not possible or feasible, a literal value that identifies the subject may be provided. Both should preferably refer to a subject in a controlled vocabulary."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:SizeOrDuration + rdf:type rdfs:Class ; + rdfs:comment "A dimension or extent, or a time taken to play or execute."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Size or Duration"@en ; + rdfs:subClassOf dcterms:MediaTypeOrExtent ; + dcterms:description "Examples include a number of pages, a specification of length, width, and breadth, or a period in hours, minutes, and seconds."@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +dcterms:LocationPeriodOrJurisdiction + rdf:type rdfs:Class ; + rdfs:comment "A location, period of time, or jurisdiction."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Location, Period, or Jurisdiction"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + +spdx:releaseDate rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for recording the date the package was released."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:dateTime ; + rdfs:subPropertyOf spdx:date ; + vs:term_status "stable"@en . + +time:Monday rdf:type time:DayOfWeek ; + rdfs:label "Monday"@en ; + skos:prefLabel "Monday"@en , "Lunes"@es , "Lundi"@fr , "Montag"@de , "الاثنين"@ar , "月曜日"@ja , "Lunedì"@it , "Понедельник"@ru , "Maandag"@nl , "Poniedziałek"@pl , "星期一"@zh , "Segunda-feira"@pt . + +vcard:postal-code rdf:type owl:DatatypeProperty ; + rdfs:comment "The postal code associated with the address of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "postal code"@en ; + rdfs:range xsd:string . + +spdx:licenseConcluded + rdf:type owl:ObjectProperty , owl:FunctionalProperty ; + rdfs:comment "The licensing that the preparer of this SPDX document has concluded, based on the evidence, actually applies to the SPDX Item.\n\nIf the licenseConcluded field is not present for an SPDX Item, it implies an equivalent meaning to NOASSERTION."@en ; + rdfs:domain spdx:SpdxItem ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( spdx:AnyLicenseInfo + [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseConcluded + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseConcluded + ] + ) + ] ; + vs:term_status "stable"@en . + +dcat:Relationship rdf:type owl:Class ; + rdfs:comment "Una clase de asociación para adjuntar información adicional a una relación entre recursos DCAT."@es , "Asociační třída pro připojení dodatečných informací ke vztahu mezi zdroji DCAT."@cs , "An association class for attaching additional information to a relationship between DCAT Resources."@en , "En associationsklasse til brug for tilknytning af yderligere information til en relation mellem DCAT-ressourcer."@da , "Una classe di associazione per il collegamento di informazioni aggiuntive a una relazione tra le risorse DCAT."@it ; + rdfs:label "Relazione"@it , "Relation"@da , "Vztah"@cs , "Relación"@es , "Relationship"@en ; + skos:changeNote "Nueva clase añadida en DCAT 2.0."@es , "Nuova classe aggiunta in DCAT 2.0."@it , "Nová třída přidaná ve verzi DCAT 2.0."@cs , "New class added in DCAT 2.0."@en , "Ny klasse i DCAT 2.0."@da ; + skos:definition "An association class for attaching additional information to a relationship between DCAT Resources."@en , "Una classe di associazione per il collegamento di informazioni aggiuntive a una relazione tra le risorse DCAT."@it , "En associationsklasse til brug for tilknytning af yderligere information til en relation mellem DCAT-ressourcer."@da , "Asociační třída pro připojení dodatečných informací ke vztahu mezi zdroji DCAT."@cs , "Una clase de asociación para adjuntar información adicional a una relación entre recursos DCAT."@es ; + skos:scopeNote "Se usa para caracterizar la relación entre conjuntos de datos, y potencialmente otros recursos, donde la naturaleza de la relación se conoce pero no está caracterizada adecuadamente con propiedades del estándar 'Dublin Core' (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@es , "Use to characterize a relationship between datasets, and potentially other resources, where the nature of the relationship is known but is not adequately characterized by the standard Dublin Core properties (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) or PROV-O properties (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@en , "Anvendes til at karakterisere en relation mellem datasæt, og potentielt andre ressourcer, hvor relationen er kendt men ikke tilstrækkeligt beskrevet af de standardiserede egenskaber i Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) eller PROV-O-egenskaber (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@da , "Používá se pro charakterizaci vztahu mezi datovými sadami a případně i jinými zdroji, kde druh vztahu je sice znám, ale není přiměřeně charakterizován standardními vlastnostmi slovníku Dublin Core (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:requires, dct:isRequiredBy) či vlastnostmi slovníku PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource, prov:alternateOf, prov:specializationOf)."@cs , "Viene utilizzato per caratterizzare la relazione tra insiemi di dati, e potenzialmente altri tipi di risorse, nei casi in cui la natura della relazione è nota ma non adeguatamente caratterizzata dalle proprietà dello standard 'Dublin Core' (dct:hasPart, dct:isPartOf, dct:conformsTo, dct:isFormatOf, dct:hasFormat, dct:isVersionOf, dct:hasVersion, dct:replaces, dct:isReplacedBy, dct:references, dct:isReferencedBy, dct:require, dct:isRequiredBy) o dalle propietà fornite da PROV-O (prov:wasDerivedFrom, prov:wasInfluencedBy, prov:wasQuotedFrom, prov:wasRevisionOf, prov: hadPrimarySource, prov:alternateOf, prov:specializationOf)."@it . + +prov:qualifiedAssociation + rdf:type owl:ObjectProperty ; + rdfs:comment "If this Activity prov:wasAssociatedWith Agent :ag, then it can qualify the Association using prov:qualifiedAssociation [ a prov:Association; prov:agent :ag; :foo :bar ]."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "qualifiedAssociation" ; + rdfs:range prov:Association ; + rdfs:subPropertyOf prov:qualifiedInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:inverse "qualifiedAssociationOf" ; + prov:sharesDefinitionWith prov:Association ; + prov:unqualifiedForm prov:wasAssociatedWith . + +dcterms:accrualPolicy + rdf:type rdf:Property ; + rdfs:comment "The policy governing the addition of items to a collection."@en ; + rdfs:domain dctype:Collection ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Accrual Policy"@en ; + dcam:rangeIncludes dcterms:Policy ; + dcterms:description "Recommended practice is to use a value from the Collection Description Accrual Policy Vocabulary [[DCMI-ACCRUALPOLICY](https://dublincore.org/groups/collections/accrual-policy/)]."@en ; + dcterms:issued "2005-06-13"^^xsd:date . + +spdx:checksumAlgorithm_md2 + rdf:type owl:NamedIndividual , spdx:ChecksumAlgorithm ; + rdfs:comment "Indicates the algorithm used was MD2" ; + vs:term_status "stable" . + +spdx:externalDocumentId + rdf:type owl:DatatypeProperty ; + rdfs:comment "externalDocumentId is a string containing letters, numbers, ., - and/or + which uniquely identifies an external document within this document."@en ; + rdfs:domain spdx:ExternalDocumentRef ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +vcard:BBS rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "BBS"@en ; + rdfs:subClassOf vcard:TelephoneType ; + owl:deprecated true . + +spdx:sourceInfo rdf:type owl:DatatypeProperty ; + rdfs:comment "Allows the producer(s) of the SPDX document to describe how the package was acquired and/or changed from the original source."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +vcard:Home rdf:type owl:Class ; + rdfs:comment "This implies that the property is related to an individual's personal life"@en ; + rdfs:isDefinedBy ; + rdfs:label "Home"@en ; + rdfs:subClassOf vcard:Type . + +adms:schemeAgency rdf:type owl:DatatypeProperty ; + rdfs:comment "The name of the agency that issued the identifier."@en , "This property is deprecated because in in HTML specification another URI was used." ; + rdfs:domain adms:Identifier ; + rdfs:isDefinedBy ; + rdfs:label "schema agency"@en ; + rdfs:range rdfs:Literal ; + dcterms:isReplacedBy adms:schemaAgency ; + owl:deprecated "true" ; + owl:equivalentProperty adms:schemaAgency . + +spdx:relationshipType_hasPrerequisite + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A has as a prerequisite SPDXRef-B."@en ; + vs:term_status "stable"@en . + + + rdf:type sh:NodeShape ; + sh:name "Category Scheme"@en ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:targetClass skos:ConceptScheme . + +dcterms:Period rdf:type rdfs:Datatype ; + rdfs:comment "The set of time intervals defined by their limits according to the DCMI Period Encoding Scheme."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "DCMI Period"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:hasHonorificPrefix + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the honorific prefix data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has honorific prefix"@en . + +dcterms:MESH rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of labeled concepts specified by the Medical Subject Headings."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "MeSH"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +prov:Attribution rdf:type owl:Class ; + rdfs:comment "An instance of prov:Attribution provides additional descriptions about the binary prov:wasAttributedTo relation from an prov:Entity to some prov:Agent that had some responsible for it. For example, :cake prov:wasAttributedTo :baker; prov:qualifiedAttribution [ a prov:Attribution; prov:entity :baker; :foo :bar ]."@en ; + rdfs:isDefinedBy ; + rdfs:label "Attribution" ; + rdfs:subClassOf prov:AgentInfluence ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "Attribution is the ascribing of an entity to an agent.\n\nWhen an entity e is attributed to agent ag, entity e was generated by some unspecified activity that in turn was associated to agent ag. Thus, this relation is useful when the activity is not known, or irrelevant."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-attribution"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-attribution"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasAttributedTo . + +vcard:hasStreetAddress + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the street address data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has street address"@en . + +time:numericDuration rdf:type owl:DatatypeProperty ; + rdfs:comment "Value of a temporal extent expressed as a decimal number scaled by a temporal unit"@en , "Valor de una extensión temporal expresada como un número decimal escalado por una unidad de tiempo."@es ; + rdfs:domain time:Duration ; + rdfs:label "Numeric value of temporal duration"@en , "valor numérico de duración temporal"@es ; + rdfs:range xsd:decimal ; + skos:definition "Value of a temporal extent expressed as a decimal number scaled by a temporal unit"@en , "Valor de una extensión temporal expresada como un número decimal escalado por una unidad de tiempo."@es . + +prov:hadUsage rdf:type owl:ObjectProperty ; + rdfs:comment "The _optional_ Usage involved in an Entity's Derivation."@en ; + rdfs:domain prov:Derivation ; + rdfs:isDefinedBy ; + rdfs:label "hadUsage" ; + rdfs:range prov:Usage ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:inverse "wasUsedInDerivation" ; + prov:sharesDefinitionWith prov:Usage . + +vcard:hasPhoto rdf:type owl:ObjectProperty ; + rdfs:comment "To specify an image or photograph information that annotates some aspect of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has photo"@en ; + owl:equivalentProperty vcard:photo . + +adms:prev rdf:type owl:ObjectProperty ; + rdfs:comment "A link to the previous version of the Asset."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "prev"@en ; + rdfs:range rdfs:Resource ; + rdfs:subPropertyOf . + +spdx:fileContributor rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for the SPDX file creator to record file contributors. Contributors could include names of copyright holders and/or authors who may not be copyright holders yet contributed to the file content."@en ; + rdfs:domain spdx:File ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:fileType_source rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "Indicates the file is a source code file."@en ; + vs:term_status "stable"@en . + +time:TimePosition rdf:type owl:Class ; + rdfs:comment "A temporal position described using either a (nominal) value from an ordinal reference system, or a (numeric) value in a temporal coordinate system. "@en , "Una posición temporal descrita utilizando bien un valor (nominal) de un sistema de referencia ordinal, o un valor (numérico) en un sistema de coordenadas temporales."@es ; + rdfs:label "Time position"@en , "posición de tiempo"@es ; + rdfs:subClassOf time:TemporalPosition ; + rdfs:subClassOf [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:numericPosition + ] + [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:nominalPosition + ] + ) + ] ; + skos:definition "A temporal position described using either a (nominal) value from an ordinal reference system, or a (numeric) value in a temporal coordinate system. "@en , "Una posición temporal descrita utilizando bien un valor (nominal) de un sistema de referencia ordinal, o un valor (numérico) en un sistema de coordenadas temporales."@es . + +time:intervalMetBy rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 es 'intervalo encontrado por' otro intervalo propio T2, entonces el principio de T1 coincide con el final de T2."@es , "If a proper interval T1 is intervalMetBy another proper interval T2, then the beginning of T1 is coincident with the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "intervalo encontrado por"@es , "interval met by"@en ; + rdfs:range time:ProperInterval ; + owl:inverseOf time:intervalMeets ; + skos:definition "If a proper interval T1 is intervalMetBy another proper interval T2, then the beginning of T1 is coincident with the end of T2."@en , "Si un intervalo propio T1 es 'intervalo encontrado por' otro intervalo propio T2, entonces el principio de T1 coincide con el final de T2."@es . + +vcard:hasCategory rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the category data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has category"@en . + +time:inXSDgYear rdf:type owl:DatatypeProperty ; + rdfs:comment "Position of an instant, expressed using xsd:gYear"@en , "Posición de un instante, expresado utilizando xsd:gYear."@es ; + rdfs:domain time:Instant ; + rdfs:label "in XSD g-Year"@en , "en año gregoriano XSD"@es ; + rdfs:range xsd:gYear ; + skos:definition "Position of an instant, expressed using xsd:gYear"@en , "Posición de un instante, expresado utilizando xsd:gYear."@es . + + + rdf:type sh:NodeShape ; + sh:name "Distribution"@en ; + sh:property [ sh:maxCount 1 ; + sh:path adms:status ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:license ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcatap:availability ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path odrl:hasPolicy ; + sh:severity sh:Violation + ] ; + sh:property [ sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:spatialResolutionInMeters ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:rights ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:modified ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:language ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path foaf:page ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:format ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:accessURL ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:issued ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcat:packageFormat ; + sh:severity sh:Violation + ] ; + sh:property [ sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:byteSize ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcat:mediaType ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:downloadURL ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path spdx:checksum ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcat:compressFormat ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:description ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:conformsTo ; + sh:severity sh:Violation + ] ; + sh:property [ sh:datatype xsd:duration ; + sh:maxCount 1 ; + sh:path dcat:temporalResolution ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:accessService ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Distribution . + + + rdf:type sh:NodeShape ; + sh:name "Data Service"@en ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:accessRights ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:endpointDescription ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:servesDataset ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:description ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:endpointURL ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:license ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:DataService . + +vcard:VCard rdf:type owl:Class ; + rdfs:comment "The vCard class is equivalent to the new Kind class, which is the parent for the four explicit types of vCards (Individual, Organization, Location, Group)"@en ; + rdfs:isDefinedBy ; + rdfs:label "VCard"@en ; + owl:equivalentClass vcard:Kind . + +prov:InstantaneousEvent + rdf:type owl:Class ; + rdfs:comment "An instantaneous event, or event for short, happens in the world and marks a change in the world, in its activities and in its entities. The term 'event' is commonly used in process algebra with a similar meaning. Events represent communications or interactions; they are assumed to be atomic and instantaneous."@en ; + rdfs:isDefinedBy ; + rdfs:label "InstantaneousEvent" ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#dfn-event"^^xsd:anyURI ; + prov:definition "The PROV data model is implicitly based on a notion of instantaneous events (or just events), that mark transitions in the world. Events include generation, usage, or invalidation of entities, as well as starting or ending of activities. This notion of event is not first-class in the data model, but it is useful for explaining its other concepts and its semantics."@en . + +prov:startedAtTime rdf:type owl:DatatypeProperty ; + rdfs:comment "The time at which an activity started. See also prov:endedAtTime."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "startedAtTime" ; + rdfs:range xsd:dateTime ; + prov:category "starting-point" ; + prov:component "entities-activities" ; + prov:editorialNote "It is the intent that the property chain holds: (prov:qualifiedStart o prov:atTime) rdfs:subPropertyOf prov:startedAtTime."@en ; + prov:qualifiedForm prov:Start , prov:atTime . + +dcterms:isFormatOf rdf:type rdf:Property ; + rdfs:comment "A pre-existing related resource that is substantially the same as the described resource, but in another format."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Is Format Of"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Has Format."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:referenceLocator + rdf:type owl:DatatypeProperty ; + rdfs:comment "The unique string with no spaces necessary to access the package-specific information, metadata, or content within the target location. The format of the locator is subject to constraints defined by the ."@en ; + rdfs:domain spdx:ExternalRef ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +prov:Organization rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Organization" ; + rdfs:subClassOf prov:Agent ; + prov:category "expanded" ; + prov:component "agents-responsibility" ; + prov:definition "An organization is a social or legal institution such as a company, society, etc." ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-agent"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#expression-types"^^xsd:anyURI . + +vcard:hasCountryName rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the country name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has country name"@en . + +spdx:externalDocumentRef + rdf:type owl:ObjectProperty ; + rdfs:comment "Identify any external SPDX documents referenced within this SPDX document."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:ExternalDocumentRef ; + vs:term_status "stable"@en . + +dcterms:Jurisdiction rdf:type rdfs:Class ; + rdfs:comment "The extent or range of judicial, law enforcement, or other authority."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Jurisdiction"@en ; + rdfs:subClassOf dcterms:LocationPeriodOrJurisdiction ; + dcterms:issued "2008-01-14"^^xsd:date . + +vcard:ISDN rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "ISDN"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + + + rdf:type owl:Ontology ; + rdfs:label "adms"@en , "adms"@nl ; + dcterms:issued "2023-04-05" ; + dcterms:license ; + dcterms:mediator [ foaf:homepage ; + foaf:name "Semantic Interoperability Community (SEMIC)" + ] ; + rec:editor [ rdf:type foaf:Person ; + foaf:firstName "Bert" ; + foaf:lastName "Van Nuffelen" ; + foaf:mbox ; + j.0:affiliation [ foaf:name "TenForce" ] + ] ; + rec:editor [ rdf:type foaf:Person ; + foaf:firstName "Pavlina" ; + foaf:lastName "Fragkou" ; + j.0:affiliation [ foaf:name "SEMIC EU" ] + ] ; + rec:editor [ rdf:type foaf:Person ; + foaf:firstName "Natasa" ; + foaf:lastName "Sofou" + ] ; + rec:editor [ rdf:type foaf:Person ; + foaf:firstName "Makx" ; + foaf:lastName "Dekkers" + ] ; + foaf:maker [ rdf:type foaf:Person ; + foaf:firstName "Pavlina" ; + foaf:lastName "Fragkou" ; + j.0:affiliation [ foaf:name "SEMIC EU" ] + ] . + +time:TemporalDuration + rdf:type owl:Class ; + rdfs:comment "Time extent; duration of a time interval separate from its particular start position"@en , "Extensión de tiempo; duración de un intervalo de tiempo independiente de su posición de inicio particular."@es ; + rdfs:label "Temporal duration"@en , "duración temporal"@es ; + skos:definition "Time extent; duration of a time interval separate from its particular start position"@en , "Extensión de tiempo; duración de un intervalo de tiempo independiente de su posición de inicio particular."@es . + +vcard:Gender rdf:type owl:Class ; + rdfs:comment "Used for gender codes. The URI of the gender code must be used as the value for Gender."@en ; + rdfs:isDefinedBy ; + rdfs:label "Gender"@en . + +adms:representationTechnique + rdf:type owl:ObjectProperty ; + rdfs:comment "More information about the format in which an Asset Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "representation technique"@en ; + rdfs:range skos:Concept . + +vcard:Coworker rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Coworker"@en ; + rdfs:subClassOf vcard:RelatedType . + +locn:geometry rdf:type rdf:Property ; + rdfs:comment "Associates any resource with the corresponding geometry."@en ; + rdfs:isDefinedBy ; + rdfs:label "geometry"@en ; + rdfs:range locn:Geometry ; + dcterms:identifier "locn:geometry" ; + vann:example "\nThe following are examples of equivalent statements using different geometry encodings. In the examples, prefix gsp is used for namespace URI http://www.opengis.net/ont/geosparql#, whereas prefix sf is used for namespace URI http://www.opengis.net/ont/sf#.\n- WKT (GeoSPARQL)\n:Resource locn:geometry\n \" Point(-0.001475 51.477811)\"^^gsp:wktLiteral .\n- GML\n:Resource locn:geometry\n \"\n -0.001475, 51.477811\"^^gsp:gmlLiteral .\n- RDF+WKT (GeoSPARQL)\n:Resource locn:geometry\n [ a sf:Point; gsp:asWKT \" Point(-0.001475 51.477811)\"^^gsp:wktLiteral ] .\n- RDF+GML (GeoSPARQL)\n:Resource locn:geometry\n [ a sf:Point; gsp:asGML\n \"\n -0.001475, 51.477811\"^^gsp:gmlLiteral ] .\n- RDF (WGS84 lat/long)\n:Resource locn:geometry [ a geo:Point; geo:lat \"51.477811\"; geo:long \"-0.001475\" ] .\n- RDF (schema.org)\n:Resource locn:geometry [ a schema:GeoCoordinates; schema:latitude \"51.477811\"; schema:longitude \"-0.001475\" ] .\n- geo URI\n:Resource locn:geometry .\n- GeoHash URI\n:Resource locn:geometry .\n "@en ; + vann:usageNote "\nDepending on how a geometry is encoded, the range of this property may be one of the following:\n- a literal (e.g., WKT - string literal -, GML, KML - XML literal)\n- a geometry class, as those defined in the OGC's GeoSPARQL specification, in the W3C's Basic Geo (WGS84 lat/long) vocabulary, and at schema.org;\n- geocoded URIs, as geo or GeoHash URIs, treated as URI references.\nFor interoperability reasons, it is recommended using one of the following:\n- Any geometry:\n - WKT, GML, and RDF+WKT/GML, as per the GeoSPARQL specification.\n - KML (Keyhole Markup Language) - note that KML supports the following geometries only: point, line string, linear ring, and polygon.\n - RDF as per the schema.org vocabulary (see classes schema:GeoCoordinates and schema:GeoShape).\n- Points: one of the above, or:\n - RDF as per the W3C Basic Geo (WGS84 lat/long) vocabulary.\n - GeoHash URIs.\n - geo URIs.\n "@en ; + vs:term_status "testing"@en ; + wdsr:describedby . + +time:intervalStarts rdf:type owl:ObjectProperty ; + rdfs:comment "Si un intervalo propio T1 empieza otro intervalo propio T2, entonces del principio de T1 con el principio de T2, y el final de T1 es anterior al final de T2."@es , "If a proper interval T1 is intervalStarts another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is before the end of T2."@en ; + rdfs:domain time:ProperInterval ; + rdfs:label "interval starts"@en , "intervalo empieza"@es ; + rdfs:range time:ProperInterval ; + rdfs:subPropertyOf time:intervalIn ; + owl:inverseOf time:intervalStartedBy ; + skos:definition "If a proper interval T1 is intervalStarts another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is before the end of T2."@en , "Si un intervalo propio T1 empieza otro intervalo propio T2, entonces del principio de T1 con el final de T2, y el final de T1 es anterior al final de T2."@es . + +vcard:Pager rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Pager"@en ; + rdfs:subClassOf vcard:TelephoneType . + + + rdf:type sh:NodeShape ; + sh:name "PeriodOfTime"@en ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcat:startDate ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path time:hasEnd ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path time:hasBeginning ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcat:endDate ; + sh:severity sh:Violation + ] ; + sh:targetClass dcterms:PeriodOfTime . + +spdx:relationshipType_devToolOf + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A is a development dependency of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +vcard:hasLocality rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the locality data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has locality"@en . + + + rdf:type owl:Ontology ; + rdfs:seeAlso ; + dcterms:contributor "Dave Beckett" , "Nikki Rogers" , "Participants in W3C's Semantic Web Deployment Working Group." ; + dcterms:creator "Alistair Miles" , "Sean Bechhofer" ; + dcterms:description "An RDF vocabulary for describing the basic structure and content of concept schemes such as thesauri, classification schemes, subject heading lists, taxonomies, 'folksonomies', other types of controlled vocabulary, and also concept schemes embedded in glossaries and terminologies."@en ; + dcterms:title "SKOS Vocabulary"@en . + +dcterms:Point rdf:type rdfs:Datatype ; + rdfs:comment "The set of points in space defined by their geographic coordinates according to the DCMI Point Encoding Scheme."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "DCMI Point"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +vcard:photo rdf:type owl:ObjectProperty ; + rdfs:comment "This object property has been mapped"@en ; + rdfs:isDefinedBy ; + rdfs:label "photo"@en ; + owl:equivalentProperty vcard:hasPhoto . + + + rdfs:label "HTML version of the ISA Programme Location Core Vocabulary"@en ; + dcat:mediaType "text/html"^^dcterms:IMT . + +vcard:additional-name + rdf:type owl:DatatypeProperty ; + rdfs:comment "The additional name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "additional name"@en ; + rdfs:range xsd:string . + +spdx:licenseInfoInFile + rdf:type owl:ObjectProperty ; + rdfs:comment "Licensing information that was discovered directly in the subject file. This is also considered a declared license for the file.\n\nIf the licenseInfoInFile field is not present for a file, it implies an equivalent meaning to NOASSERTION."@en ; + rdfs:domain spdx:File ; + rdfs:range [ rdf:type owl:Class ; + owl:unionOf ( spdx:AnyLicenseInfo + [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseInfoInFile + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseInfoInFile + ] + ) + ] ; + rdfs:subPropertyOf spdx:licenseInfoFromFiles ; + vs:term_status "stable" . + +vcard:Friend rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Friend"@en ; + rdfs:subClassOf vcard:RelatedType . + +prov:hadPlan rdf:type owl:ObjectProperty ; + rdfs:comment "The _optional_ Plan adopted by an Agent in Association with some Activity. Plan specifications are out of the scope of this specification."@en ; + rdfs:domain prov:Association ; + rdfs:isDefinedBy ; + rdfs:label "hadPlan" ; + rdfs:range prov:Plan ; + prov:category "qualified" ; + prov:component "agents-responsibility" ; + prov:inverse "wasPlanOf" ; + prov:sharesDefinitionWith prov:Plan . + +spdx:LicenseException + rdf:type owl:Class ; + rdfs:comment "An exception to a license."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:anyURI ; + owl:onProperty rdfs:seeAlso + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:name ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:example + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseExceptionId ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseExceptionTemplate + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:licenseExceptionText ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable"@en . + + + cc:attributionName "European Commission"@en ; + cc:attributionURL ; + dcterms:title "ISA Open Metadata Licence v1.1" . + +time:inside rdf:type owl:ObjectProperty ; + rdfs:comment "An instant that falls inside the interval. It is not intended to include beginnings and ends of intervals."@en , "Un instante que cae dentro del intervalo. Se asume que no es ni el principio ni el final de ningún intervalo."@es ; + rdfs:domain time:Interval ; + rdfs:label "has time instant inside"@en , "tiene instante de tiempo dentro"@es ; + rdfs:range time:Instant ; + skos:definition "An instant that falls inside the interval. It is not intended to include beginnings and ends of intervals."@en , "Un instante que cae dentro del intervalo. Se asume que no es ni el principio ni el final de ningún intervalo."@es . + +prov:atTime rdf:type owl:DatatypeProperty ; + rdfs:comment "The time at which an InstantaneousEvent occurred, in the form of xsd:dateTime."@en ; + rdfs:domain prov:InstantaneousEvent ; + rdfs:isDefinedBy ; + rdfs:label "atTime" ; + rdfs:range xsd:dateTime ; + prov:category "qualified" ; + prov:component "entities-activities" ; + prov:sharesDefinitionWith prov:InstantaneousEvent ; + prov:unqualifiedForm prov:invalidatedAtTime , prov:startedAtTime , prov:generatedAtTime , prov:endedAtTime . + +prov:actedOnBehalfOf rdf:type owl:ObjectProperty ; + rdfs:comment "An object property to express the accountability of an agent towards another agent. The subordinate agent acted on behalf of the responsible agent in an actual activity. "@en ; + rdfs:domain prov:Agent ; + rdfs:isDefinedBy ; + rdfs:label "actedOnBehalfOf" ; + rdfs:range prov:Agent ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedDelegation prov:agent ) ; + owl:propertyChainAxiom ( prov:qualifiedDelegation prov:agent ) ; + prov:category "starting-point" ; + prov:component "agents-responsibility" ; + prov:inverse "hadDelegate" ; + prov:qualifiedForm prov:qualifiedDelegation , prov:Delegation . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:positiveInteger ; + owl:onProperty ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable" . + +dcat:startDate rdf:type rdf:Property , owl:DatatypeProperty ; + rdfs:domain dcterms:PeriodOfTime ; + rdfs:label "data di inizio"@it , "datum začátku"@cs , "start date"@en , "startdato"@da ; + rdfs:range rdfs:Literal ; + skos:altLabel "starttidspunkt"@da ; + skos:changeNote "Nová vlastnost přidaná ve verzi DCAT 2.0."@cs , "New property added in DCAT 2.0."@en , "Ny egenskab tilføjet i DCAT 2.0."@da , "Nuova proprietà aggiunta in DCAT 2.0."@it , "Nueva propiedad agregada en DCAT 2.0."@es ; + skos:definition "El comienzo del período"@es , "Začátek doby trvání"@cs , "The start of the period"@en , "Start på perioden."@da , "L'inizio del periodo"@it ; + skos:scopeNote "Rækkeviden for denne egenskab er bevidst generisk defineret med det formål at tillade forskellige niveauer af tidslig præcision ifm. angivelse af startdatoen for en periode. Den kan eksempelvis udtrykkes som en dato (xsd:date), en dato og et tidspunkt (xsd:dateTime), eller et årstal (xsd:gYear)."@da , "The range of this property is intentionally generic, with the purpose of allowing different level of temporal precision for specifying the start of a period. E.g., it can be expressed with a date (xsd:date), a date and time (xsd:dateTime), or a year (xsd:gYear)."@en , "Obor hodnot této vlastnosti je úmyslně obecný, aby umožnil různé úrovně časového rozlišení pro specifikaci začátku doby trvání. Ten může být kupříkladu vyjádřen datumem (xsd:date), datumem a časem (xsd:dateTime) či rokem (xsd:gYear)."@cs , "Il range di questa proprietà è volutamente generico, con lo scopo di consentire diversi livelli di precisione temporale per specificare l'inizio di un periodo. Ad esempio, può essere espresso con una data (xsd:date), una data e un'ora (xsd:dateTime), o un anno (xsd:gYear)."@it , "El rango de esta propiedad es intencionalmente genérico con el propósito de permitir distintos niveles de precisión temporal para especificar el comienzo de un período. Por ejemplo, puede expresarse como una fecha (xsd:date), una fecha y un tiempo (xsd:dateTime), o un año (xsd:gYear)."@es . + +spdx:checksum rdf:type owl:ObjectProperty ; + rdfs:comment "The checksum property provides a mechanism that can be used to verify that the contents of a File or Package have not changed."@en ; + rdfs:domain [ rdf:type owl:Class ; + owl:unionOf ( spdx:File spdx:Package ) + ] ; + rdfs:range spdx:Checksum ; + vs:term_status "stable"@en . + + + rdf:type owl:Class ; + rdfs:subClassOf ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:File ; + owl:onProperty ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + vs:term_status "stable" . + +dcterms:requires rdf:type rdf:Property ; + rdfs:comment "A related resource that is required by the described resource to support its function, delivery, or coherence."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Requires"@en ; + rdfs:subPropertyOf dc:relation , dcterms:relation ; + dcterms:description "This property is intended to be used with non-literal values. This property is an inverse property of Is Required By."@en ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:packageVerificationCodeExcludedFile + rdf:type owl:DatatypeProperty ; + rdfs:comment "A file that was excluded when calculating the package verification code. This is usually a file containing SPDX data regarding the package. If a package contains more than one SPDX file all SPDX files must be excluded from the package verification code. If this is not done it would be impossible to correctly calculate the verification codes in both files."@en ; + rdfs:domain spdx:PackageVerificationCode ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +skos:closeMatch rdf:type owl:ObjectProperty , owl:SymmetricProperty , rdf:Property ; + rdfs:isDefinedBy ; + rdfs:label "has close match"@en ; + rdfs:subPropertyOf skos:mappingRelation ; + skos:definition "skos:closeMatch is used to link two concepts that are sufficiently similar that they can be used interchangeably in some information retrieval applications. In order to avoid the possibility of \"compound errors\" when combining mappings across more than two concept schemes, skos:closeMatch is not declared to be a transitive property."@en . + +spdx:fileType_archive + rdf:type owl:NamedIndividual , spdx:FileType ; + rdfs:comment "Indicates the file is an archive file."@en ; + vs:term_status "stable"@en . + +locn:poBox rdf:type rdf:Property ; + rdfs:comment "The Post Office Box number. The domain of locn:poBox is locn:Address."@en ; + rdfs:domain locn:Address ; + rdfs:isDefinedBy ; + rdfs:label "PO box"@en ; + rdfs:range rdfs:Literal ; + dcterms:identifier "locn:poBox" ; + vs:term_status "testing"@en . + +dcterms:Frequency rdf:type rdfs:Class ; + rdfs:comment "A rate at which something recurs."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Frequency"@en ; + dcterms:issued "2008-01-14"^^xsd:date . + + + rdf:type sh:NodeShape ; + sh:name "Catalog"@en ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:description ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:language ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:hasPart ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:isPartOf ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:modified ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:license ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:minCount 1 ; + sh:path dcterms:publisher ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path foaf:homepage ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:issued ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:rights ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:record ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:spatial ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:themeTaxonomy ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:service ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:catalog ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:dataset ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:creator ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Catalog . + +adms:next rdf:type owl:ObjectProperty ; + rdfs:comment "A link to the next version of the Asset."@en ; + rdfs:domain rdfs:Resource ; + rdfs:isDefinedBy ; + rdfs:label "next"@en ; + rdfs:range rdfs:Resource ; + rdfs:subPropertyOf . + +spdx:referencesFile rdf:type owl:ObjectProperty ; + rdfs:comment "Indicates that a particular file belongs as part of the set of analyzed files in the SpdxDocument."@en , "This property has been replaced by a relationship between the SPDX document and file with a \"contains\" relationship type."@en ; + rdfs:domain spdx:SpdxDocument ; + rdfs:range spdx:File ; + owl:deprecated true ; + vs:term_status "deprecated"@en . + +owl:versionInfo rdf:type owl:AnnotationProperty . + +[ rdf:type owl:Axiom ; + rdfs:comment "Quotation is a particular case of derivation (see http://www.w3.org/TR/prov-dm/#term-quotation) in which an entity is derived from an original entity by copying, or \"quoting\", some or all of it. " ; + owl:annotatedProperty rdfs:subPropertyOf ; + owl:annotatedSource prov:wasQuotedFrom ; + owl:annotatedTarget prov:wasDerivedFrom +] . + +vcard:honorific-prefix + rdf:type owl:DatatypeProperty ; + rdfs:comment "The honorific prefix of the name associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "honorific prefix"@en ; + rdfs:range xsd:string . + +time:MonthOfYear rdf:type owl:Class ; + rdfs:comment "El mes del año."@es , "The month of the year"@en ; + rdfs:label "Month of year"@en , "mes del año"@es ; + rdfs:subClassOf time:DateTimeDescription ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:second + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:hour + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "1"^^xsd:nonNegativeInteger ; + owl:onProperty time:month + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:hasValue time:unitMonth ; + owl:onProperty time:unitType + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:week + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:year + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:day + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:cardinality "0"^^xsd:nonNegativeInteger ; + owl:onProperty time:minute + ] ; + skos:definition "The month of the year"@en , "El mes del año."@es ; + skos:editorialNote "Característica en riesgo - añadida en la revisión de 2017, y no utilizada todavía de forma amplia."@es , "Feature at risk - added in 2017 revision, and not yet widely used. "@en ; + skos:note "Membership of the class :MonthOfYear is open, to allow for alternative annual calendars and different month names."@en , "La pertenencia a la clase 'mes del año' está abierta, a permitir calendarios anuales alternativos y diferentes nombres de meses."@es . + +vcard:Emergency rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Emergency"@en ; + rdfs:subClassOf vcard:RelatedType . + +vcard:hasHonorificSuffix + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the honorific suffix data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has honorific suffix"@en . + +dcat:Resource rdf:type owl:Class ; + rdfs:comment "Ressource udgivet eller udvalgt og arrangeret af en enkelt aktør."@da , "Recurso publicado o curado por un agente único."@es , "Risorsa pubblicata o curata da un singolo agente."@it , "Resource published or curated by a single agent."@en , "Zdroj publikovaný či řízený jediným činitelem."@cs ; + rdfs:label "Katalogizovaný zdroj"@cs , "Recurso catalogado"@es , "Risorsa catalogata"@it , "Katalogiseret ressource"@da , "Catalogued resource"@en ; + skos:changeNote "New class added in DCAT 2.0."@en , "Nuova classe aggiunta in DCAT 2.0."@it , "Ny klasse i DCAT 2.0."@da , "Nová třída přidaná ve verzi DCAT 2.0."@cs , "Nueva clase agregada en DCAT 2.0."@es ; + skos:definition "Resource published or curated by a single agent."@en , "Risorsa pubblicata o curata da un singolo agente."@it , "Ressource udgivet eller udvalgt og arrangeret af en enkelt aktør."@da , "Recurso publicado o curado por un agente único."@es , "Zdroj publikovaný či řízený jediným činitelem."@cs ; + skos:scopeNote "dcat:Resource es un punto de extensión que permite la definición de cualquier tipo de catálogo. Se pueden definir subclases adicionales en perfil de DCAT o una aplicación para catálogos de otro tipo de recursos."@es , "dcat:Resource je bod pro rozšíření umožňující definici různých druhů katalogů. Další podtřídy lze definovat v profilech DCAT či aplikacích pro katalogy zdrojů jiných druhů."@cs , "Třída všech katalogizovaných zdrojů, nadtřída dcat:Dataset, dcat:DataService, dcat:Catalog a všech ostatních členů dcat:Catalog. Tato třída nese vlastnosti společné všem katalogizovaným zdrojům včetně datových sad a datových služeb. Je silně doporučeno používat specifičtější podtřídy, pokud je to možné. Při popisu zdroje, který není ani dcat:Dataset, ani dcat:DataService se doporučuje vytvořit odpovídající podtřídu dcat:Resrouce a nebo použít dcat:Resource s vlastností dct:type pro určení konkrétního typu."@cs , "Klassen for alle katalogiserede ressourcer, den overordnede klasse for dcat:Dataset, dcat:DataService, dcat:Catalog og enhvert medlem af et dcat:Catalog. Denne klasse bærer egenskaber der gælder alle katalogiserede ressourcer, herunder dataset og datatjenester. Det anbefales kraftigt at mere specifikke subklasser oprettes. Når der beskrives ressourcer der ikke er dcat:Dataset eller dcat:DataService, anbefales det at oprette passende subklasser af dcat:Resource eller at dcat:Resource anvendes sammen med egenskaben dct:type til opmærkning med en specifik typeangivelse."@da , "dcat:Resource è un punto di estensione che consente la definizione di qualsiasi tipo di catalogo. Sottoclassi aggiuntive possono essere definite in un profilo DCAT o in un'applicazione per cataloghi di altri tipi di risorse."@it , "La classe di tutte le risorse catalogate, la Superclasse di dcat:Dataset, dcat:DataService, dcat:Catalog e qualsiasi altro membro di dcat:Catalog. Questa classe porta proprietà comuni a tutte le risorse catalogate, inclusi set di dati e servizi dati. Si raccomanda vivamente di utilizzare una sottoclasse più specifica. Quando si descrive una risorsa che non è un dcat:Dataset o dcat:DataService, si raccomanda di creare una sottoclasse di dcat:Resource appropriata, o utilizzare dcat:Resource con la proprietà dct:type per indicare il tipo specifico."@it , "The class of all catalogued resources, the Superclass of dcat:Dataset, dcat:DataService, dcat:Catalog and any other member of a dcat:Catalog. This class carries properties common to all catalogued resources, including datasets and data services. It is strongly recommended to use a more specific sub-class. When describing a resource which is not a dcat:Dataset or dcat:DataService, it is recommended to create a suitable sub-class of dcat:Resource, or use dcat:Resource with the dct:type property to indicate the specific type."@en , "La clase de todos los recursos catalogados, la superclase de dcat:Dataset, dcat:DataService, dcat:Catalog y cualquier otro miembro de un dcat:Catalog. Esta clase tiene propiedades comunes a todos los recursos catalogados, incluyendo conjuntos de datos y servicios de datos. Se recomienda fuertemente que se use una clase más específica. Cuando se describe un recurso que no es un dcat:Dataset o dcat:DataService, se recomienda crear una sub-clase apropiada de dcat:Resource, o usar dcat:Resource con la propiedad dct:type to indicar el tipo específico."@es , "dcat:Resource is an extension point that enables the definition of any kind of catalog. Additional subclasses may be defined in a DCAT profile or application for catalogs of other kinds of resources."@en , "dcat:Resource er et udvidelsespunkt der tillader oprettelsen af enhver type af kataloger. Yderligere subklasser kan defineres i en DCAT-profil eller i en applikation til kataloger med andre typer af ressourcer."@da . + +prov:EmptyCollection rdf:type owl:Class , owl:NamedIndividual ; + rdfs:isDefinedBy ; + rdfs:label "EmptyCollection"@en ; + rdfs:subClassOf prov:Collection ; + prov:category "expanded" ; + prov:component "collections" ; + prov:definition "An empty collection is a collection without members."@en . + +spdx:summary rdf:type owl:DatatypeProperty ; + rdfs:comment "Provides a short description of the package."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:downloadLocation + rdf:type owl:DatatypeProperty ; + rdfs:comment "The URI at which this package is available for download. Private (i.e., not publicly reachable) URIs are acceptable as values of this property. The values http://spdx.org/rdf/terms#none and http://spdx.org/rdf/terms#noassertion may be used to specify that the package is not downloadable or that no attempt was made to determine its download location, respectively."@en ; + rdfs:domain spdx:Package ; + rdfs:range xsd:anyURI ; + vs:term_status "stable"@en . + +spdx:purpose_archive rdf:type owl:NamedIndividual , spdx:Purpose ; + rdfs:comment "The package refers to an archived collection of files (.tar, .zip, etc)."@en ; + vs:term_status "stable"@en . + +prov:wasStartedBy rdf:type owl:ObjectProperty ; + rdfs:comment "Start is when an activity is deemed to have started. A start may refer to an entity, known as trigger, that initiated the activity."@en ; + rdfs:domain prov:Activity ; + rdfs:isDefinedBy ; + rdfs:label "wasStartedBy" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedStart prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedStart prov:entity ) ; + prov:category "expanded" ; + prov:component "entities-activities" ; + prov:inverse "started" ; + prov:qualifiedForm prov:qualifiedStart , prov:Start . + +prov:wasDerivedFrom rdf:type owl:ObjectProperty ; + rdfs:comment "The more specific subproperties of prov:wasDerivedFrom (i.e., prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource) should be used when applicable."@en ; + rdfs:domain prov:Entity ; + rdfs:isDefinedBy ; + rdfs:label "wasDerivedFrom" ; + rdfs:range prov:Entity ; + rdfs:subPropertyOf prov:wasInfluencedBy ; + owl:propertyChainAxiom ( prov:qualifiedDerivation prov:entity ) ; + owl:propertyChainAxiom ( prov:qualifiedDerivation prov:entity ) ; + prov:category "starting-point" ; + prov:component "derivations" ; + prov:definition "A derivation is a transformation of an entity into another, an update of an entity resulting in a new one, or the construction of a new entity based on a pre-existing entity."@en ; + prov:inverse "hadDerivation" ; + prov:qualifiedForm prov:Derivation , prov:qualifiedDerivation . + +vcard:region rdf:type owl:DatatypeProperty ; + rdfs:comment "The region (e.g. state or province) associated with the address of the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "region"@en ; + rdfs:range xsd:string . + +spdx:ReferenceCategory + rdf:type owl:Class ; + rdfs:comment "Category used for ExternalRef"@en ; + vs:term_status "stable"@en . + +vcard:TelephoneType rdf:type owl:Class ; + rdfs:comment "Used for telephone type codes. The URI of the telephone type code must be used as the value for the Telephone Type."@en ; + rdfs:isDefinedBy ; + rdfs:label "Phone"@en . + +dcterms:TGN rdf:type dcam:VocabularyEncodingScheme ; + rdfs:comment "The set of places specified by the Getty Thesaurus of Geographic Names."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "TGN"@en ; + rdfs:seeAlso ; + dcterms:issued "2000-07-11"^^xsd:date . + +spdx:relationshipType_fileAdded + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_fileAdded expresses that the SPDXElement is a file which has been added to the relatedSPDXElement package. For example, a package (the relatedSPDXElement) has been patched to remove a file (the SPDXElement). This relationship is typically used to express the result of a patched package when the actual patchfile is not present."@en ; + vs:term_status "stable"@en . + +time:inDateTime rdf:type owl:ObjectProperty ; + rdfs:comment "Position of an instant, expressed using a structured description"@en , "Posición de un instante, expresada utilizando una descripción estructurada."@es ; + rdfs:domain time:Instant ; + rdfs:label "in date-time description"@en , "en descripción de fecha-hora"@es ; + rdfs:range time:GeneralDateTimeDescription ; + rdfs:subPropertyOf time:inTemporalPosition ; + skos:definition "Posición de un instante, expresada utilizando una descripción estructurada."@es , "Position of an instant, expressed using a structured description"@en . + +spdx:relationshipType_generatedFrom + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_generatedFrom expresses that an SPDXElement was generated from the relatedSPDXElement. For example, a binary File might have been generated from a source File."@en ; + vs:term_status "stable"@en . + +xsd:date rdf:type rdfs:Datatype . + +vcard:hasKey rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a public key or authentication certificate associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has key"@en ; + owl:equivalentProperty vcard:key . + +spdx:Package rdf:type owl:Class ; + rdfs:comment "A Package represents a collection of software files that are delivered as a single functional component."@en ; + rdfs:subClassOf spdx:SpdxItem ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:File ; + owl:onProperty spdx:hasFile + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:boolean ; + owl:onProperty spdx:filesAnalyzed + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:originator + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Checksum ; + owl:onProperty spdx:checksum + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:anyURI ; + owl:onProperty doap:homepage + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:date ; + owl:onProperty spdx:validUntilDate + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:summary + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:sourceInfo + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass spdx:PackageVerificationCode ; + owl:onProperty spdx:packageVerificationCode + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:packageFileName + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:description + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onDataRange xsd:anyURI ; + owl:onProperty spdx:downloadLocation ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:supplier + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:minQualifiedCardinality "0"^^xsd:nonNegativeInteger ; + owl:onClass spdx:ExternalRef ; + owl:onProperty spdx:externalRef + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass spdx:Purpose ; + owl:onProperty spdx:primaryPackagePurpose + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty spdx:versionInfo + ] ; + rdfs:subClassOf [ rdf:type owl:Class ; + owl:unionOf ( [ rdf:type owl:Restriction ; + owl:hasValue spdx:noassertion ; + owl:onProperty spdx:licenseDeclared + ] + [ rdf:type owl:Restriction ; + owl:hasValue spdx:none ; + owl:onProperty spdx:licenseDeclared + ] + [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onClass spdx:AnyLicenseInfo ; + owl:onProperty spdx:licenseDeclared + ] + ) + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:date ; + owl:onProperty spdx:builtDate + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:date ; + owl:onProperty spdx:releaseDate + ] ; + vs:term_status "stable"@en . + + + rdf:type owl:Ontology ; + owl:imports , , , , , , , , , . + +prov:unqualifiedForm rdf:type owl:AnnotationProperty ; + rdfs:comment "Classes and properties used to qualify relationships are annotated with prov:unqualifiedForm to indicate the property used to assert an unqualified provenance relation."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + +spdx:relationshipType_specificationFor + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "Is to be used when SPDXRef-A describes, illustrates, or defines a design specification for SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:crossRef rdf:type owl:ObjectProperty ; + rdfs:comment "Cross Reference Detail for a license SeeAlso URL"@en ; + rdfs:range spdx:SimpleLicensingInfo . + +vcard:Internet rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Internet"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + + + rdf:type sh:NodeShape ; + sh:name "Dataset"@en ; + sh:property [ sh:path dcterms:isVersionOf ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcterms:relation ; + sh:severity sh:Violation + ] ; + sh:property [ sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:spatialResolutionInMeters ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:creator ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dc:isReferencedBy ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:accessRights ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:provenance ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:publisher ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:spatial ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:hasVersion ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:distribution ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path adms:identifier ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:modified ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:conformsTo ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:path dcterms:accrualPeriodicity ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:type ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path adms:sample ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:source ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:theme ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:temporal ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcat:keyword ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path foaf:page ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcterms:language ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path adms:versionNotes ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:contactPoint ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:node ; + sh:path dcterms:issued ; + sh:severity sh:Violation + ] ; + sh:property [ sh:datatype xsd:duration ; + sh:maxCount 1 ; + sh:path dcat:temporalResolution ; + sh:severity sh:Violation + ] ; + sh:property [ sh:nodeKind sh:Literal ; + sh:path dcterms:identifier ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path prov:qualifiedAttribution ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path prov:wasGeneratedBy ; + sh:severity sh:Violation + ] ; + sh:property [ sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path owl:versionInfo ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:title ; + sh:severity sh:Violation + ] ; + sh:property [ sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcterms:description ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:landingPage ; + sh:severity sh:Violation + ] ; + sh:property [ sh:path dcat:qualifiedRelation ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Dataset . + +vcard:TextPhone rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Text phone"@en ; + rdfs:subClassOf vcard:TelephoneType . + +vcard:Date rdf:type owl:Class ; + rdfs:isDefinedBy ; + rdfs:label "Date"@en ; + rdfs:subClassOf vcard:RelatedType . + +vcard:hasURL rdf:type owl:ObjectProperty ; + rdfs:comment "To specify a uniform resource locator associated with the object"@en ; + rdfs:isDefinedBy ; + rdfs:label "has url"@en ; + owl:equivalentProperty vcard:url . + +spdx:licenseExceptionTemplate + rdf:type owl:DatatypeProperty ; + rdfs:comment "Template for matching license exception text"@en ; + rdfs:domain spdx:LicenseException ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +spdx:relationshipType_documentation + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "To be used when SPDXRef-A provides documentation of SPDXRef-B."@en ; + vs:term_status "stable"@en . + +spdx:relationshipType_containedBy + rdf:type owl:NamedIndividual , spdx:RelationshipType ; + rdfs:comment "A Relationship of relationshipType_containedBy expresses that an SPDXElement is contained by the relatedSPDXElement. For example, a File contained by a Package. "@en ; + vs:term_status "stable"@en . + +vcard:extended-address + rdf:type owl:DatatypeProperty ; + rdfs:comment "This data property has been deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "extended address"@en ; + owl:deprecated true . + +vcard:Dom rdf:type owl:Class ; + rdfs:comment "This class is deprecated"@en ; + rdfs:isDefinedBy ; + rdfs:label "Dom"@en ; + rdfs:subClassOf vcard:Type ; + owl:deprecated true . + +spdx:attributionText rdf:type owl:DatatypeProperty ; + rdfs:comment "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce."@en ; + rdfs:domain spdx:SpdxItem ; + rdfs:range xsd:string ; + vs:term_status "stable"@en . + +time:inXSDgYearMonth rdf:type owl:DatatypeProperty ; + rdfs:comment "Position of an instant, expressed using xsd:gYearMonth"@en , "Posición de un instante, expresado utilizando xsd:gYearMonth."@es ; + rdfs:domain time:Instant ; + rdfs:label "in XSD g-YearMonth"@en , "en año-mes gregoriano XSD"@es ; + rdfs:range xsd:gYearMonth ; + skos:definition "Position of an instant, expressed using xsd:gYearMonth"@en , "Posición de un instante, expresado utilizando xsd:gYearMonth."@es . + +prov:Derivation rdf:type owl:Class ; + rdfs:comment "An instance of prov:Derivation provides additional descriptions about the binary prov:wasDerivedFrom relation from some derived prov:Entity to another prov:Entity from which it was derived. For example, :chewed_bubble_gum prov:wasDerivedFrom :unwrapped_bubble_gum; prov:qualifiedDerivation [ a prov:Derivation; prov:entity :unwrapped_bubble_gum; :foo :bar ]."@en , "The more specific forms of prov:Derivation (i.e., prov:Revision, prov:Quotation, prov:PrimarySource) should be asserted if they apply."@en ; + rdfs:isDefinedBy ; + rdfs:label "Derivation" ; + rdfs:subClassOf prov:EntityInfluence ; + prov:category "qualified" ; + prov:component "derivations" ; + prov:constraints "http://www.w3.org/TR/2013/REC-prov-constraints-20130430/#prov-dm-constraints-fig"^^xsd:anyURI ; + prov:definition "A derivation is a transformation of an entity into another, an update of an entity resulting in a new one, or the construction of a new entity based on a pre-existing entity."@en ; + prov:dm "http://www.w3.org/TR/2013/REC-prov-dm-20130430/#term-Derivation"^^xsd:anyURI ; + prov:n "http://www.w3.org/TR/2013/REC-prov-n-20130430/#Derivation-Relation"^^xsd:anyURI ; + prov:unqualifiedForm prov:wasDerivedFrom . + +spdx:Relationship rdf:type owl:Class ; + rdfs:comment "A Relationship represents a relationship between two SpdxElements."@en ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:SpdxElement ; + owl:onProperty spdx:relatedSpdxElement ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onClass spdx:RelationshipType ; + owl:onProperty spdx:relationshipType ; + owl:qualifiedCardinality "1"^^xsd:nonNegativeInteger + ] ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:maxQualifiedCardinality "1"^^xsd:nonNegativeInteger ; + owl:onDataRange xsd:string ; + owl:onProperty rdfs:comment + ] ; + vs:term_status "stable"@en . + +dcterms:dateCopyrighted + rdf:type rdf:Property ; + rdfs:comment "Date of copyright of the resource."@en ; + rdfs:isDefinedBy dcterms: ; + rdfs:label "Date Copyrighted"@en ; + rdfs:range rdfs:Literal ; + rdfs:subPropertyOf dc:date , dcterms:date ; + dcterms:description "Typically a year. Recommended practice is to describe the date, date/time, or period of time as recommended for the property Date, of which this is a subproperty."@en ; + dcterms:issued "2002-07-13"^^xsd:date . + +time:years rdf:type owl:DatatypeProperty ; + rdfs:comment "length of, or element of the length of, a temporal extent expressed in years"@en , "Longitud de, o elemento de la longitud de, una extensión temporal expresada en años."@es ; + rdfs:domain time:GeneralDurationDescription ; + rdfs:label "years duration"@en , "duración en años"@es ; + rdfs:range xsd:decimal . + +vcard:hasOrganizationName + rdf:type owl:ObjectProperty ; + rdfs:comment "Used to support property parameters for the organization name data property"@en ; + rdfs:isDefinedBy ; + rdfs:label "has organization name"@en . + +prov:constraints rdf:type owl:AnnotationProperty ; + rdfs:comment "A reference to the principal section of the PROV-CONSTRAINTS document that describes this concept."@en ; + rdfs:isDefinedBy ; + rdfs:subPropertyOf rdfs:seeAlso . + + + rdf:type owl:Ontology ; + rdfs:comment "This specification describes the SPDX® language, defined as a dictionary of named properties and classes using W3C's RDF Technology.\n\nSPDX® is an open standard for communicating software bill of material information, including components, licenses, copyrights, and security references. SPDX reduces redundant work by providing a common format for companies and communities to share important data, thereby streamlining and improving compliance.\n.\nKnown issues:\n- rdfs:comment and rdfs:seeAlso are used within the SPDX classes and causes a redefinition of these standard terms"@en ; + rdfs:label "SPDX 2.3" ; + owl:versionIRI ; + owl:versionInfo 2.3 . diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-hvd-2.2.0-SHACL.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-hvd-2.2.0-SHACL.ttl new file mode 100644 index 000000000000..36fccb764e5d --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/dcat-ap-hvd-2.2.0-SHACL.ttl @@ -0,0 +1,712 @@ +@prefix dc: . +@prefix dcat: . +@prefix foaf: . +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix shacl: . +@prefix skos: . +@prefix vcard: . +@prefix xsd: . + + rdfs:member , + , + , + , + , + , + , + , + , + , + , + , + , + , + . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + ; + shacl:targetClass dcat:CatalogRecord . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:name "primary topic"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:primaryTopic; + "The expected value for primary topic is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:minCount 1; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic; + "Minimally 1 values are expected for primary topic"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#CatalogueRecord.primarytopic"; + shacl:class dcat:Resource; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic; + "The range of primary topic must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:maxCount 1; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic; + "Maximally 1 values allowed for primary topic"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + ; + shacl:targetClass dcat:Catalog . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.dataset"; + shacl:description "A Dataset that is part of the Catalogue."@en; + shacl:name "dataset"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:dataset; + "The expected value for dataset is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.record"; + shacl:class dcat:CatalogRecord; + shacl:description "A Catalogue Record that is part of the Catalogue"@en; + shacl:name "record"@en; + shacl:path dcat:record; + "The range of record must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.record"; + shacl:description "A Catalogue Record that is part of the Catalogue"@en; + shacl:name "record"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:record; + "The expected value for record is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.dataset"; + shacl:class dcat:Dataset; + shacl:description "A Dataset that is part of the Catalogue."@en; + shacl:name "dataset"@en; + shacl:path dcat:dataset; + "The range of dataset must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.service"; + shacl:class dcat:DataService; + shacl:description "A site or end-point (Data Service) that is listed in the Catalogue."@en; + shacl:name "service"@en; + shacl:path dcat:service; + "The range of service must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Catalogue.service"; + shacl:description "A site or end-point (Data Service) that is listed in the Catalogue."@en; + shacl:name "service"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:service; + "The expected value for service is a rdfs:Resource (URI or blank node)"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dcat:Resource . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass skos:Concept . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:DataService . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Data Service."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path ; + "The expected value for applicable legislation is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.servesdataset"; + shacl:class dcat:Dataset; + shacl:description "This property refers to a collection of data that this data service can distribute."@en; + shacl:name "serves dataset"@en; + shacl:path dcat:servesDataset; + "The range of serves dataset must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.HVDcategory"; + shacl:class skos:Concept; + shacl:description "The HVD category to which this Data Service belongs."@en; + shacl:name "HVD category"@en; + shacl:path ; + "The range of HVD category must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.endpointURL"; + shacl:description "The root location or primary endpoint of the service (an IRI)."@en; + shacl:name "endpoint URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:endpointURL; + "The expected value for endpoint URL is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.HVDcategory"; + shacl:description "The HVD category to which this Data Service belongs."@en; + shacl:name "HVD category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path ; + "The expected value for HVD category is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.endpointdescription"; + shacl:description "A description of the services available via the end-points, including their operations, parameters etc."@en; + shacl:name "endpoint description"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:endpointDescription; + "The expected value for endpoint description is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.documentation"; + shacl:class foaf:Document; + shacl:description "A page that provides additional information about the Data Service."@en; + shacl:name "documentation"@en; + shacl:path foaf:Page; + "The range of documentation must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Data Service."@en; + shacl:minCount 1; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint; + "Minimally 1 values are expected for contact point"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Data Service."@en; + shacl:name "applicable legislation"@en; + shacl:path ; + "The range of applicable legislation must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Data Service."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint; + "The range of contact point must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.endpointURL"; + shacl:description "The root location or primary endpoint of the service (an IRI)."@en; + shacl:minCount 1; + shacl:name "endpoint URL"@en; + shacl:path dcat:endpointURL; + "Minimally 1 values are expected for endpoint URL"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.documentation"; + shacl:description "A page that provides additional information about the Data Service."@en; + shacl:minCount 1; + shacl:name "documentation"@en; + shacl:path foaf:Page; + "Minimally 1 values are expected for documentation"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:path dc:rights; + "The range of rights must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.documentation"; + shacl:description "A page that provides additional information about the Data Service."@en; + shacl:name "documentation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:Page; + "The expected value for documentation is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Data Service."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint; + "The expected value for contact point is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.licence"; + shacl:description "A licence under which the Data service is made available."@en; + shacl:maxCount 1; + shacl:name "licence"@en; + shacl:path dc:license; + "Maximally 1 values allowed for licence"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.rights"; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights; + "The expected value for rights is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.servesdataset"; + shacl:description "This property refers to a collection of data that this data service can distribute."@en; + shacl:name "serves dataset"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:servesDataset; + "The expected value for serves dataset is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Data Service."@en; + shacl:minCount 1; + shacl:name "applicable legislation"@en; + shacl:path ; + "Minimally 1 values are expected for applicable legislation"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.licence"; + shacl:class dc:LicenseDocument; + shacl:description "A licence under which the Data service is made available."@en; + shacl:name "licence"@en; + shacl:path dc:license; + "The range of licence must be of type ."@en . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.servesdataset"; + shacl:description "This property refers to a collection of data that this data service can distribute."@en; + shacl:minCount 1; + shacl:name "serves dataset"@en; + shacl:path dcat:servesDataset; + "Minimally 1 values are expected for serves dataset"@en . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.licence"; + shacl:description "A licence under which the Data service is made available."@en; + shacl:name "licence"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:license; + "The expected value for licence is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.HVDcategory"; + shacl:description "The HVD category to which this Data Service belongs."@en; + shacl:minCount 1; + shacl:name "HVD category"@en; + shacl:path ; + "Minimally 1 values are expected for HVD category"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:Dataset . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.conformsto"; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo; + "The expected value for conforms to is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Dataset."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path ; + "The expected value for applicable legislation is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.datasetdistribution"; + shacl:class dcat:Distribution; + shacl:description "An available Distribution for the Dataset."@en; + shacl:name "dataset distribution"@en; + shacl:path dcat:distribution; + "The range of dataset distribution must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.datasetdistribution"; + shacl:description "An available Distribution for the Dataset."@en; + shacl:minCount 1; + shacl:name "dataset distribution"@en; + shacl:path dcat:distribution; + "Minimally 1 values are expected for dataset distribution"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Dataset."@en; + shacl:name "applicable legislation"@en; + shacl:path ; + "The range of applicable legislation must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Dataset."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint; + "The range of contact point must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.HVDCategory"; + shacl:class skos:Concept; + shacl:description "The HVD category to which this Dataset belongs."@en; + shacl:name "HVD Category"@en; + shacl:path ; + "The range of HVD Category must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.HVDCategory"; + shacl:description "The HVD category to which this Dataset belongs."@en; + shacl:name "HVD Category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path ; + "The expected value for HVD Category is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Dataset."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint; + "The expected value for contact point is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.HVDCategory"; + shacl:description "The HVD category to which this Dataset belongs."@en; + shacl:minCount 1; + shacl:name "HVD Category"@en; + shacl:path ; + "Minimally 1 values are expected for HVD Category"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.conformsto"; + shacl:class dc:Standard; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:path dc:conformsTo; + "The range of conforms to must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Dataset."@en; + shacl:minCount 1; + shacl:name "applicable legislation"@en; + shacl:path ; + "Minimally 1 values are expected for applicable legislation"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.datasetdistribution"; + shacl:description "An available Distribution for the Dataset."@en; + shacl:name "dataset distribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:distribution; + "The expected value for dataset distribution is a rdfs:Resource (URI or blank node)"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:Distribution . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Distribution"@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path ; + "The expected value for applicable legislation is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.linkedschemas"; + shacl:class dc:Standard; + shacl:description "An established schema to which the described Distribution conforms."@en; + shacl:name "linked schemas"@en; + shacl:path dc:conformsTo; + "The range of linked schemas must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.accessservice"; + shacl:description "A data service that gives access to the distribution of the dataset"@en; + shacl:name "access service"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:accessService; + "The expected value for access service is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.linkedschemas"; + shacl:description "An established schema to which the described Distribution conforms."@en; + shacl:name "linked schemas"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo; + "The expected value for linked schemas is a rdfs:Resource (URI or blank node)"@en . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Distribution"@en; + shacl:name "applicable legislation"@en; + shacl:path ; + "The range of applicable legislation must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.accessservice"; + shacl:class dcat:DataService; + shacl:description "A data service that gives access to the distribution of the dataset"@en; + shacl:name "access service"@en; + shacl:path dcat:accessService; + "The range of access service must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:path dc:rights; + "The range of rights must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.licence"; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:maxCount 1; + shacl:name "licence"@en; + shacl:path dc:license; + "Maximally 1 values allowed for licence"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.accessURL"; + shacl:description "A URL that gives access to a Distribution of the Dataset."@en; + shacl:name "access URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:accessURL; + "The expected value for access URL is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.rights"; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights; + "The expected value for rights is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Distribution"@en; + shacl:minCount 1; + shacl:name "applicable legislation"@en; + shacl:path ; + "Minimally 1 values are expected for applicable legislation"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.licence"; + shacl:class dc:LicenseDocument; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:name "licence"@en; + shacl:path dc:license; + "The range of licence must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.accessURL"; + shacl:description "A URL that gives access to a Distribution of the Dataset."@en; + shacl:minCount 1; + shacl:name "access URL"@en; + shacl:path dcat:accessURL; + "Minimally 1 values are expected for access URL"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.licence"; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:name "licence"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:license; + "The expected value for licence is a rdfs:Resource (URI or blank node)"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass foaf:Document . + + a shacl:NodeShape; + shacl:closed false; + shacl:property + , + , + , + ; + shacl:targetClass vcard:Kind . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Kind.email"; + shacl:description """A email address via which contact can be made."""@en; + shacl:name "email"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasEmail; + "The expected value for email is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Kind.contactpage"; + shacl:description "A webpage that either allows to make contact (i.e. a webform) or the information contains how to get into contact. "@en; + shacl:name "contact page"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasURL; + "The expected value for contact page is a rdfs:Resource (URI or blank node)"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Kind.contactpage"; + shacl:description "A webpage that either allows to make contact (i.e. a webform) or the information contains how to get into contact. "@en; + shacl:maxCount 1; + shacl:name "contact page"@en; + shacl:path vcard:hasURL; + "Maximally 1 values allowed for contact page"@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Kind.email"; + shacl:description """A email address via which contact can be made."""@en; + shacl:maxCount 1; + shacl:name "email"@en; + shacl:path vcard:hasEmail; + "Maximally 1 values allowed for email"@en . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:LicenseDocument . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Literal . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Resource . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:RightsStatement . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:Standard . + + + + rdfs:seeAlso "https://semiceu.github.io/uri.semic.eu-generated/DCAT-AP/releases/2.2.0-hvd/#Kind"; + shacl:description """It is recommended to provide at least either an email or a contact form from e.g. a service desk. """@en; + shacl:or ( + [ + shacl:path vcard:hasEmail; + shacl:minCount 1 ; + ] + [ + shacl:path vcard:hasURL; + shacl:minCount 1 ; + ] + ) ; + a shacl:NodeShape ; + shacl:targetClass vcard:Kind ; + shacl:severity shacl:Warning ; + "It is recommended to provide at least either an email or a contact form from e.g. a service desk. "@en . + + + + rdfs:seeAlso "https://semiceu.github.io/uri.semic.eu-generated/DCAT-AP/releases/2.2.0-hvd/#c3"; + shacl:description """It is mandatory to provide legal information."""@en; + shacl:or ( + [ + shacl:path dc:license; + shacl:minCount 1 ; + ] + [ + shacl:path dc:rights; + shacl:minCount 1 ; + ] + ) ; + a shacl:NodeShape ; + shacl:targetClass dcat:Distribution; + "It is mandatory to provide legal information."@en . + + rdfs:seeAlso "https://semiceu.github.io/uri.semic.eu-generated/DCAT-AP/releases/2.2.0-hvd/#c3"; + shacl:description """It is mandatory to provide legal information."""@en; + shacl:or ( + [ + shacl:path dc:license; + shacl:minCount 1 ; + ] + [ + shacl:path dc:rights; + shacl:minCount 1 ; + ] + ) ; + a shacl:NodeShape ; + shacl:targetClass dcat:DataService; + "It is mandatory to provide legal information."@en . + + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.applicablelegislation"; + shacl:description "The applicable legislation must be set to the HVD IR ELI."@en; + shacl:path ; + shacl:hasValue ; + "The applicable legislation must be set to the HVD IR ELI."@en . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.applicablelegislation"; + shacl:description "The applicable legislation must be set to the HVD IR ELI."@en; + shacl:path ; + shacl:hasValue ; + "The applicable legislation must be set to the HVD IR ELI."@en . + + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Distribution.applicablelegislation"; + shacl:description "The applicable legislation must be set to the HVD IR ELI."@en; + shacl:path ; + shacl:hasValue ; + "The applicable legislation must be set to the HVD IR ELI."@en . + + + rdf:type owl:Ontology ; + owl:imports . + + + a shacl:NodeShape ; + rdfs:comment "HVD Category Restriction" ; + rdfs:label "HVD Category Restriction" ; + shacl:property [ + shacl:hasValue ; + shacl:minCount 1 ; + shacl:nodeKind shacl:IRI ; + shacl:path skos:inScheme + ] . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#Dataset.HVDcategory"; + shacl:description "The HVD category to which this Dataset belongs."@en; + shacl:name "HVD category"@en; + shacl:path ; + shacl:node ; + "The range of HVD category must be of type ."@en . + + rdfs:seeAlso "https://semiceu.github.io//DCAT-AP/releases/2.2.0-hvd#DataService.HVDcategory"; + shacl:description "The HVD category to which this Data Service belongs."@en; + shacl:name "HVD category"@en; + shacl:path ; + shacl:node ; + "The range of HVD category must be of type ."@en . diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/README.md b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/README.md new file mode 100644 index 000000000000..c77c21496c7c --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/README.md @@ -0,0 +1 @@ +SHACL from https://github.com/SEMICeu/DCAT-AP/tree/gh-pages/releases/3.0.0-draft/html/shacl \ No newline at end of file diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/deprecateduris.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/deprecateduris.ttl new file mode 100644 index 000000000000..4f606ff14feb --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/deprecateduris.ttl @@ -0,0 +1,123 @@ +@prefix rdf: . +@prefix : . +@prefix adms: . +@prefix dc: . +@prefix dcat: . +@prefix dct: . +@prefix foaf: . +@prefix lcon: . +@prefix org: . +@prefix owl: . +@prefix odrl: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vcard: . +@prefix xsd: . +@prefix dcatap: . + +#------------------------------------------------------------------------- +# The shapes in this file cover all URI changes that require attention +# by the catalogue owner. +# +# deprecated by the transition from version 1.x to 2.x +# +#------------------------------------------------------------------------- + +:PeriodOfTimeDeprecation_Shape + a sh:NodeShape ; + rdfs:label "PeriodOfTime Deprecation properties"@en ; + sh:property [ + sh:path schema:endDate ; + sh:severity sh:Warning ; + sh:message "replace property schema:endDate with dcat:endDate"@en + ], [ + sh:path schema:startDate ; + sh:severity sh:Warning ; + sh:message "replace property schema:startDate with dcat:startDate"@en + ] ; + sh:targetClass dct:PeriodOfTime . + + +#------------------------------------------------------------------------- +# The shapes in this file cover all URI changes that require attention +# by the catalogue owner. +# +# deprecated by the transition from version 1.x to 2.x +# +#------------------------------------------------------------------------- + + +:DatasetDeprecation_Shape + a sh:NodeShape ; + rdfs:label "Dataset Deprecation properties"@en ; + sh:property [ + sh:path dct:hasVersion ; + sh:severity sh:Warning ; + sh:message "replace property dct:hasVersion with dcat:hasVersion"@en + + ], [ + sh:path dct:isVersionOf ; + sh:severity sh:Warning ; + sh:message "replace dct:isVersionOf with dcat:isVersionOf"@en + ], [ + sh:path owl:versionInfo ; + sh:severity sh:Warning ; + sh:message "replace owl:versionInfo with dcat:version"@en + ]; + sh:targetClass dcat:Dataset . + + +#------------------------------------------------------------------------- +# The shapes in this file cover all URI changes that require attention +# by the catalogue owner. +# +# deprecated by the transition from version 2.x to 3.x +# +#------------------------------------------------------------------------- + + +:DatasetInverseProperties_Shape + a sh:NodeShape ; + rdfs:label "Dataset Deprecation properties"@en ; + sh:property [ + sh:path dcat:isVersionOf ; + sh:severity sh:Warning ; + sh:message "dcat:isVersionOf is an inverse property and should only be used if dcat:hasVersion is present."@en + ]; + sh:targetClass dcat:Dataset . + + + + + rdf:type owl:Ontology ; + owl:imports . + + +:StatusRestrictionADMS + a sh:NodeShape ; + rdfs:comment "Status restriction" ; + rdfs:label "Status restriction" ; + sh:property [ + sh:class skos:ConceptScheme ; + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + + +:Distribution_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :StatusRestrictionADMS ; + sh:nodeKind sh:IRI ; + sh:path adms:status ; + sh:description "The codelist of adms:status has changed from DCAT-AP 2.1 to DCAT-AP 3.0.0" ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Distribution. \ No newline at end of file diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/imports.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/imports.ttl new file mode 100644 index 000000000000..47cc2efc3500 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/imports.ttl @@ -0,0 +1,28 @@ +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +# +# This file provides the imports that are implicitly the result of reusing them in the DCAT-AP application profile. +# The imports point to the URL of the RDF serializations (mostly the turtle serializations) as not all ontology URIs have content negotation implemented. +# The RDF format is required for the ISA testbed validator. +# The following imports have been outcommented: +# owl:imports ; import is excluded because the shacl shape for Category applies to all instances of skos:Concept and the skos:Concepts in the ODRL do not comply to this. + + + + rdf:type owl:Ontology ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports + . + + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr-vocabularies.shape.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr-vocabularies.shape.ttl new file mode 100644 index 000000000000..c2cbfb8043c6 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr-vocabularies.shape.ttl @@ -0,0 +1,440 @@ +@prefix rdf: . +@prefix : . +@prefix adms: . +@prefix cc: . +@prefix dc: . +@prefix dcat: . +@prefix dct: . +@prefix foaf: . +@prefix lcon: . +@prefix org: . +@prefix owl: . +@prefix odrl: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vcard: . +@prefix xsd: . +@prefix dcatap: . + + + + dcat:accessURL ; + dcat:downloadURL ; + dcatap:availability ; + dct:format ; + dct:conformsTo ; + dct:creator [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Bert Van Nuffelen" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Natasa Sofou" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Eugeniu Costetchi" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Makx Dekkers" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Nikolaos Loutas" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Vassilios Peristeras" + ] ; + dct:license ; + cc:attributionURL ; + dct:modified "2021-12-01"^^xsd:date ; + dct:publisher ; + dct:relation ; + dct:description "This document specifies the controlled vocabulary constraints on properties expressed by DCAT-AP in SHACL."@en ; + dct:title "Controlled Vocabulary Constraints of DCAT Application Profile for Data Portals in Europe"@en ; + owl:versionInfo "2.1.1" ; + foaf:homepage ; + foaf:maker [ + foaf:mbox ; + foaf:name "DCAT-AP Working Group" ; + foaf:page , + ] . + +:AvailabilityRestriction + a sh:NodeShape ; + rdfs:comment "Availability restriction" ; + rdfs:label "Availability restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:ContinentRestriction + a sh:NodeShape ; + rdfs:comment "Continent restriction" ; + rdfs:label "Continent restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:CorporateBodyRestriction + a sh:NodeShape ; + rdfs:comment "Corporate Body Restriction" ; + rdfs:label "Corporate Body Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:CountryRestriction + a sh:NodeShape ; + rdfs:comment "Country restriction" ; + rdfs:label "Country restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:DataThemeRestriction + a sh:NodeShape ; + rdfs:comment "Data Theme Restriction" ; + rdfs:label "Data Theme Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:AccessRightRestriction + a sh:NodeShape ; + rdfs:comment "Access Rights Restriction" ; + rdfs:label "Data Theme Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:DatasetTypeRestriction + a sh:NodeShape ; + rdfs:comment "Dataset Type Restriction" ; + rdfs:label "Dataset Type Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + + +:FileTypeRestriction + a sh:NodeShape ; + rdfs:comment "File Type Restriction" ; + rdfs:label "File Type Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:FrequencyRestriction + a sh:NodeShape ; + rdfs:comment "Frequency Restriction" ; + rdfs:label "Frequency Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:GeoNamesRestrictionRegexURI + rdfs:comment "Geonames restriction - base itself on URI structure" ; + rdfs:label "Geonames restriction" ; + a sh:NodeShape ; + sh:pattern "^https://sws.geonames.org" . + + +:LanguageRestriction + a sh:NodeShape ; + rdfs:comment "Language Restriction" ; + rdfs:label "Language Restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:LicenceTypeRestriction + a sh:NodeShape ; + rdfs:comment "Licence type restriction" ; + rdfs:label "Licence type restriction" ; + sh:property [ + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:PlaceRestriction + a sh:NodeShape ; + rdfs:comment "Place restriction" ; + rdfs:label "Place restriction" ; + sh:property [ + sh:class skos:ConceptScheme ; + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:PublisherTypeRestriction + a sh:NodeShape ; + rdfs:comment "Publisher type restriction" ; + rdfs:label "Publisher type restriction" ; + sh:property [ + sh:class skos:ConceptScheme ; + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:IANARestrictionRegexURI + rdfs:comment "IANA restriction - base itself on URL structure" ; + rdfs:label "IANA restriction" ; + a sh:NodeShape ; + sh:pattern "^http.*://www.iana.org/assignments/media-types/" . + +:StatusRestriction + a sh:NodeShape ; + rdfs:comment "Status restriction" ; + rdfs:label "Status restriction" ; + sh:property [ + sh:class skos:ConceptScheme ; + sh:hasValue ; + sh:minCount 1 ; + sh:nodeKind sh:IRI ; + sh:path skos:inScheme + ] . + +:ChecksumAlgorithmRestriction + a sh:NodeShape ; + rdfs:comment "Checksum algorithm restriction" ; + rdfs:label "Checksum algorithm restriction" ; + sh:pattern "^https://spdx.org/rdf/terms" ; + sh:class . + +:Checksum_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :ChecksumAlgorithmRestriction ; + sh:nodeKind sh:IRI ; + sh:path spdx:algorithm + ] ; + sh:targetClass spdx:Checksum. + +:LicenseDocument_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :LicenceTypeRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:type + ] ; + sh:targetClass dct:LicenseDocument. + + + +:Catalog_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :LanguageRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:language ; + sh:description "A non EU managed concept is used to indicate a language. If no corresponding can be found inform the maintainer of the EU language NAL" ; + sh:severity sh:Violation + ], [ + sh:node :CorporateBodyRestriction ; + sh:node :Publisher_ShapeCV ; + sh:nodeKind sh:IRI ; + sh:path dct:publisher ; + sh:description "A non EU managed concept is used to indicate the publisher, check if a corresponding exists in the EU corporates bodies NAL" ; + sh:severity sh:Warning + ], [ + sh:node [ + a sh:NodeShape ; + sh:or (:CountryRestriction + :PlaceRestriction + :ContinentRestriction + :GeoNamesRestrictionRegexURI + ) + ] ; + sh:nodeKind sh:IRI ; + sh:path dct:spatial ; + sh:description "A non managed concept is used to indicate a spatial description, check if a corresponding exists" ; + sh:severity sh:Warning + ], [ + sh:hasValue ; + sh:nodeKind sh:IRI ; + sh:path dcat:themeTaxonomy ; + sh:description "Multiple themes can be used but at least should be present" ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Catalog. + +:Dataset_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :FrequencyRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:accrualPeriodicity ; + sh:description "A non EU managed concept is used to indicate the accrualPeriodicity frequency. If no corresponding can be found inform the maintainer of the EU frequency NAL" ; + sh:severity sh:Violation + ], [ + sh:node :LanguageRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:language ; + sh:description "A non EU managed concept is used to indicate a language. If no corresponding can be found inform the maintainer of the EU language NAL" ; + sh:severity sh:Violation + ], [ + sh:node :CorporateBodyRestriction ; + sh:node :Publisher_ShapeCV ; + sh:nodeKind sh:IRI ; + sh:path dct:publisher ; + sh:description "A non EU managed concept is used to indicate the publisher, check if a corresponding exists in the EU corporates bodies NAL" ; + sh:severity sh:Warning + ], [ + sh:node [ + a sh:NodeShape ; + sh:or (:CountryRestriction + :PlaceRestriction + :ContinentRestriction + :GeoNamesRestrictionRegexURI + ) + ] ; + sh:nodeKind sh:IRI ; + sh:path dct:spatial ; + sh:description "A non managed concept is used to indicate a spatial description, check if a corresponding exists" ; + sh:severity sh:Warning + ], [ + sh:node :DataThemeRestriction ; + sh:nodeKind sh:IRI ; + sh:path dcat:theme ; + sh:description "Multiple themes can be used but at least one concept of should be present" ; + sh:severity sh:Warning + ], [ + sh:node :DatasetTypeRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:type ; + sh:description "Multiple types can be used but it is recommended to also provide at least one concept of should be present" ; + sh:severity sh:Warning + ], [ + sh:node :AccessRightRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:accessRights ; + sh:description "A non EU managed concept is used to indicate the access right. If no corresponding can be found inform the maintainer of the EU language NAL" ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Dataset. + +# ------------------------------------------------------------------------------------------------------------------ +# The constraints for dcat:mediaType, dcat:compressFormat, dcat:packageFormat which are limited to the IANA codelist +# cannot be expressed in SHACL unless a copy in RDF for the IANA codelist is being created. +# A less formal check is provided based upon the assumption that the IANIA codelist is hosted on a known URL domain. +# This check is sensitive to the publication strategy of IANA. +# ------------------------------------------------------------------------------------------------------------------ +:Distribution_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :FileTypeRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:format ; + sh:description "A non EU managed concept is used to indicate the format of the distribution. If no corresponding can be found inform the maintainer of the fileformat NAL." ; + sh:severity sh:Violation + ], [ + sh:node :LanguageRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:language ; + sh:description "A non EU managed concept is used to indicate a language. If no corresponding can be found inform the maintainer of the EU language NAL" ; + sh:severity sh:Violation + ], [ + sh:node :StatusRestriction ; + sh:nodeKind sh:IRI ; + sh:path adms:status ; + sh:description "A non EU managed concept is used to indicate the status of the distribution. If no corresponding can be found inform the maintainer of the adms:status codelist." ; + sh:severity sh:Violation + ], [ + sh:node :AvailabilityRestriction ; + sh:nodeKind sh:IRI ; + sh:path dcatap:availability ; + sh:description "A non EU managed concept is used to indicate the availability of the distribution. If no corresponding can be found inform the maintainer of the DCAT-AP availability codelist." ; + sh:severity sh:Violation + ], [ + sh:node :IANARestrictionRegexURI; + sh:nodeKind sh:IRI ; + sh:path dcat:mediaType ; + sh:description "A mediaType expects a value from IANA. This check uses the URLs from IANA to perform the check as there is no IANA codelist downloadable." ; + sh:severity sh:Warning + ], [ + sh:node :IANARestrictionRegexURI; + sh:nodeKind sh:IRI ; + sh:path dcat:compressFormat ; + sh:description "A compressFormat expects a value from IANA. This check uses the URLs from IANA to perform the check as there is no IANA codelist downloadable." ; + sh:severity sh:Warning + ], [ + sh:node :IANARestrictionRegexURI; + sh:nodeKind sh:IRI ; + sh:path dcat:packageFormat ; + sh:description "A packageFormat expects a value from IANA. This check uses the URLs from IANA to perform the check as there is no IANA codelist downloadable." ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Distribution. + +:DataService_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :AccessRightRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:accessRights ; + sh:description "A non EU managed concept is used to indicate the access right. If no corresponding can be found inform the maintainer of the EU language NAL" ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Distribution. + + +:Publisher_ShapeCV + a sh:NodeShape ; + sh:property [ + sh:node :PublisherTypeRestriction ; + sh:nodeKind sh:IRI ; + sh:path dct:type ; + sh:description "A non EU managed concept is used to indicate the type of the publisher. If no corresponding can be found inform the maintainer of the adms:publishertype codelist." ; + sh:severity sh:Violation + ] . diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr_imports.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr_imports.ttl new file mode 100644 index 000000000000..a5363b31df83 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/mdr_imports.ttl @@ -0,0 +1,29 @@ +@prefix owl: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +# +# This file provides the imports of the codelists recommended by the DCAT-AP application profile. +# http://publications.europa.eu/resource/authority/dataset-type (TODO) + + + + rdf:type owl:Ontology ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports ; + owl:imports +. + +# import of the checksum vocabulary +# owl:imports ; diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/range.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/range.ttl new file mode 100644 index 000000000000..2ce766d8519b --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/range.ttl @@ -0,0 +1,449 @@ +@prefix rdf: . +@prefix : . +@prefix adms: . +@prefix cc: . +@prefix dc: . +@prefix dcat: . +@prefix dct: . +@prefix foaf: . +@prefix lcon: . +@prefix org: . +@prefix owl: . +@prefix odrl: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vcard: . +@prefix xsd: . +@prefix dcatap: . + + + dct:description "This file contains the class range constraints for all properties in DCAT-AP"@en. + + + + +#------------------------------------------------------------------------- +# The shapes in this file cover all class range constraints in DCAT-AP +# +# Depending on the exchange agreements these may be necessary to be part +# of the validation process. However they mostly figure in the semantical +# understanding of the ranges. +# +#------------------------------------------------------------------------- + +:Agent_Shape + a sh:NodeShape ; + rdfs:label "Agent"@en ; + sh:property [ + sh:class skos:Concept ; + sh:path dct:type ; + sh:severity sh:Violation + ] ; + sh:targetClass foaf:Agent . + +:CatalogRecord_Shape + a sh:NodeShape ; + rdfs:label "Catalog Record"@en ; + sh:property [ + sh:node :DcatResource_Shape ; + sh:path foaf:primaryTopic ; + sh:severity sh:Violation + ], [ + sh:class dct:Standard ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path adms:status ; + sh:severity sh:Violation + ], [ + sh:class dct:LinguisticSystem ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:class dcat:CatalogRecord ; + sh:path dct:source ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:CatalogRecord . + +:Catalog_Shape + a sh:NodeShape ; + rdfs:label "Catalog"@en ; + sh:property [ + sh:class dct:LinguisticSystem ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:class ; + sh:severity sh:Violation + ], [ + sh:class dct:LicenseDocument ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:class dct:Location ; + sh:path dct:spatial ; + sh:severity sh:Violation + ], [ + sh:class dcat:Catalog ; + sh:path dct:hasPart ; + sh:severity sh:Violation + ], [ + sh:class dcat:Catalog ; + sh:path dct:isPartOf ; + sh:severity sh:Violation + ], [ + sh:class dct:RightsStatement ; + sh:path dct:rights ; + sh:severity sh:Violation + ], [ + sh:class dcat:CatalogRecord ; + sh:path dcat:record ; + sh:severity sh:Violation + ], [ + sh:class skos:ConceptScheme ; + sh:path dcat:themeTaxonomy ; + sh:severity sh:Violation + ], [ + sh:class dcat:DataService ; + sh:path dcat:service ; + sh:severity sh:Violation + ], [ + sh:class dcat:Catalog ; + sh:path dcat:catalog ; + sh:severity sh:Violation + ], [ + sh:class foaf:Agent ; + sh:path dct:creator ; + sh:severity sh:Violation + ], [ + sh:class dcat:Dataset ; + sh:path dcat:dataset ; + sh:severity sh:Violation + ], [ + sh:class foaf:Agent ; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:class foaf:Document ; + sh:path foaf:homepage ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Catalog . + + +:DataService_Shape + a sh:NodeShape ; + rdfs:label "Data Service"@en ; + sh:property [ + sh:class dct:RightsStatement ; + sh:path dct:accessRights ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:class ; + sh:severity sh:Violation + ], [ + sh:class dct:Standard ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:class vcard:Kind ; + sh:path dcat:contactPoint ; + sh:severity sh:Violation + ], [ + sh:class dct:MediaTypeOrExtent ; + sh:path dct:format ; + sh:severity sh:Violation + ], [ + sh:class foaf:Document ; + sh:path dcat:landingPage ; + sh:severity sh:Violation + ], [ + sh:class dct:LicenseDocument ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:path dct:publisher ; + sh:class foaf:Agent; + sh:severity sh:Violation + ], [ + sh:class dcat:Dataset ; + sh:path dcat:servesDataset ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path dcat:theme ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:DataService . + + +:Dataset_Shape + a sh:NodeShape ; + rdfs:label "Dataset"@en ; + sh:property [ + sh:class dct:RightsStatement ; + sh:path dct:accessRights ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:class ; + sh:severity sh:Violation + ], [ + sh:class dct:Standard ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:class vcard:Kind ; + sh:path dcat:contactPoint ; + sh:severity sh:Violation + ], [ + sh:class foaf:Agent ; + sh:path dct:creator ; + sh:severity sh:Violation + ], [ + sh:class dcat:Distribution ; + sh:path dcat:distribution ; + sh:severity sh:Violation + ], [ + sh:class foaf:Document ; + sh:path foaf:page ; + sh:severity sh:Violation + ], [ + sh:path dct:accrualPeriodicity; + sh:class dct:Frequency; + sh:severity sh:Violation + ], [ + sh:class dct:Location ; + sh:path dct:spatial ; + sh:severity sh:Violation + ], [ + sh:class dcat:Dataset ; + sh:path dct:hasVersion ; + sh:severity sh:Violation + ], [ + sh:class dcat:DatasetSeries ; + sh:path dcat:inSeries ; + sh:severity sh:Violation + ], [ + sh:class foaf:Document ; + sh:path dcat:landingPage ; + sh:severity sh:Violation + ], [ + sh:class dct:LinguisticSystem ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:class adms:Identifier ; + sh:path adms:identifier ; + sh:severity sh:Violation + ], [ + sh:class dct:ProvenanceStatement ; + sh:path dct:provenance ; + sh:severity sh:Violation + ], [ + sh:class foaf:Agent ; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:class prov:Attribution ; + sh:path prov:qualifiedAttribution ; + sh:severity sh:Violation + ], [ + sh:class dcat:Relationship ; + sh:path dcat:qualifiedRelation ; + sh:severity sh:Violation + ], [ + sh:class dcat:Distribution ; + sh:path adms:sample ; + sh:severity sh:Violation + ], [ + sh:class dcat:Dataset ; + sh:path dct:source ; + sh:severity sh:Violation + ], [ + sh:class dct:PeriodOfTime ; + sh:path dct:temporal ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path dcat:theme ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path dct:type ; + sh:severity sh:Violation + ], [ + sh:class prov:Activity ; + sh:path prov:wasGeneratedBy ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Dataset . + + +:DcatResource_Shape + a sh:NodeShape ; + rdfs:comment "the union of Catalog, Dataset, DataService or Dataset Series" ; + rdfs:label "dcat:Resource" ; + sh:message "The node is either a Catalog, Dataset, DataService or a Dataset Series" ; + sh:or ([ + sh:class dcat:Catalog + ] + [ + sh:class dcat:Dataset + ] + [ + sh:class dcat:DataService + ] + [ + sh:class dcat:DatasetSeries + ] + ) . + +:Distribution_Shape + a sh:NodeShape ; + rdfs:label "Distribution"@en ; + sh:property [ + sh:class dcat:DataService ; + sh:path dcat:accessService ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:class ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path dcatap:availability ; + sh:severity sh:Violation + ], [ + sh:class spdx:Checksum ; + sh:path spdx:checksum ; + sh:severity sh:Violation + ], [ + sh:class dct:MediaType ; + sh:path dcat:compressFormat ; + sh:severity sh:Violation + ], [ + sh:class foaf:Document ; + sh:path foaf:page ; + sh:severity sh:Violation + ], [ + sh:class dct:MediaTypeOrExtent ; + sh:path dct:format ; + sh:severity sh:Violation + ], [ + sh:class odrl:Policy ; + sh:path odrl:hasPolicy ; + sh:severity sh:Violation + ], [ + sh:class dct:LinguisticSystem ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:class dct:LicenseDocument ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:class dct:Standard ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:class dct:MediaType ; + sh:path dcat:mediaType ; + sh:severity sh:Violation + ], [ + sh:class dct:MediaType ; + sh:path dcat:packageFormat ; + sh:severity sh:Violation + ], [ + sh:class dct:RightsStatement ; + sh:path dct:rights ; + sh:severity sh:Violation + ], [ + sh:class skos:Concept ; + sh:path adms:status ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Distribution . + + +:LicenceDocument_Shape + a sh:NodeShape ; + rdfs:label "Licence Document"@en ; + sh:property [ + sh:class skos:Concept ; + sh:path dct:type ; + sh:severity sh:Violation + ] ; + sh:targetClass dct:LicenseDocument . + + +:PeriodOfTime_Shape + a sh:NodeShape ; + rdfs:label "PeriodOfTime"@en ; + sh:property [ + sh:class time:Instant ; + sh:path time:hasBeginning ; + sh:severity sh:Violation + ], [ + sh:class time:Instant ; + sh:path time:hasEnd ; + sh:severity sh:Violation + ] ; + sh:targetClass dct:PeriodOfTime . + +:Relationship_Shape + a sh:NodeShape ; + rdfs:label "Relationship"@en ; + sh:property [ + sh:node :DcatResource_Shape ; + sh:path dct:relation ; + sh:severity sh:Violation + ], [ + sh:class dcat:Role ; + sh:path dcat:hadRole ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Relationship . + +:DatasetSeries_Shape + a sh:NodeShape ; + rdfs:label "Dataset Series"@en ; + sh:property [ + sh:class dcat:Dataset ; + sh:path [ sh:inversePath dcat:inSeries; ]; + sh:severity sh:Warning + ], [ + sh:path dcatap:applicableLegislation; + sh:class ; + sh:severity sh:Violation + ], [ + sh:path dcat:contactPoint; + sh:class vcard:Kind; + sh:severity sh:Violation + ], [ + sh:path dct:accrualPeriodicity; + sh:class dct:Frequency; + sh:severity sh:Violation + ], [ + sh:path dct:spatial; + sh:class dct:Location; + sh:severity sh:Violation + ], [ + sh:path dct:publisher ; + sh:class foaf:Agent; + sh:severity sh:Violation + ], [ + sh:path dct:temporal; + sh:class dct:PeriodOfTime; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:DatasetSeries . \ No newline at end of file diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes.ttl new file mode 100644 index 000000000000..d179365472fb --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes.ttl @@ -0,0 +1,746 @@ +@prefix rdf: . +@prefix : . +@prefix adms: . +@prefix cc: . +@prefix dcat: . +@prefix dct: . +@prefix foaf: . +@prefix lcon: . +@prefix org: . +@prefix owl: . +@prefix odrl: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vcard: . +@prefix xsd: . +@prefix dcatap: . + + + dct:format ; + dct:conformsTo ; + dct:description "This files specifies the core constraints on properties and classes expressed by DCAT-AP in SHACL."@en. + + + +#------------------------------------------------------------------------- +# The shapes in this file cover all classes in DCAT-AP 3.0.0 +# It covers all constraints that must be satisfied except those verifying +# the class ranges. This can be found in a separate file. +# +#------------------------------------------------------------------------- + +:Agent_Shape + a sh:NodeShape ; + rdfs:label "Agent"@en ; + sh:property [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path foaf:name ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path dct:type ; + sh:severity sh:Violation + ] ; + sh:targetClass foaf:Agent . + +:CatalogRecord_Shape + a sh:NodeShape ; + rdfs:label "Catalog Record"@en ; + sh:property [ + sh:maxCount 1 ; + sh:minCount 1 ; + sh:node :DcatResource_Shape ; + sh:path foaf:primaryTopic ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path dct:modified ; + sh:severity sh:Violation ; + sh:shape :DateOrDateTimeDataType_Shape + ], [ + sh:maxCount 1 ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:node :DateOrDateTimeDataType_Shape ; + sh:path dct:issued ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path adms:status ; + sh:severity sh:Violation + ], [ + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path dct:source ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:description ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:CatalogRecord . + +:Catalog_Shape + a sh:NodeShape ; + rdfs:label "Catalog"@en ; + sh:property [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:nodeKind sh:IRI; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:node :DateOrDateTimeDataType_Shape ; + sh:path dct:issued ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:spatial ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:hasPart ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:node :DateOrDateTimeDataType_Shape ; + sh:path dct:modified ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:rights ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:record ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:themeTaxonomy ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:service ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:catalog ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:maxCount 1 ; + sh:path dct:creator ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:dataset ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:description ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path foaf:homepage ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Catalog . + +:CategoryScheme_Shape + a sh:NodeShape ; + rdfs:label "Category Scheme"@en ; + sh:property [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ] ; + sh:targetClass skos:ConceptScheme . + +:Category_Shape + a sh:NodeShape ; + rdfs:label "Category"@en ; + sh:property [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path skos:prefLabel ; + sh:severity sh:Violation + ] ; + sh:targetClass skos:Concept . + +:Checksum_Shape + a sh:NodeShape ; + rdfs:label "Checksum"@en ; + sh:property [ + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path spdx:algorithm ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:hexBinary ; + sh:maxCount 1 ; + sh:minCount 1 ; + sh:path spdx:checksumValue ; + sh:severity sh:Violation + ] ; + sh:targetClass spdx:Checksum . + +:DataService_Shape + a sh:NodeShape ; + rdfs:label "Data Service"@en ; + sh:property [ + sh:maxCount 1 ; + sh:path dct:accessRights ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:severity sh:Violation + ], [ + sh:path dcatap:applicableLegislation; + sh:nodeKind sh:IRI; + sh:severity sh:Violation + ], [ + sh:path dct:conformsTo ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:contactPoint ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:description ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:endpointURL ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:endpointDescription ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:format ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dcat:keyword ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:landingPage ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:maxCount 1 ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:maxCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:servesDataset ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:IRI ; + sh:path dcat:theme ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:DataService . + +:Dataset_Shape + a sh:NodeShape ; + rdfs:label "Dataset"@en ; + sh:property [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:maxCount 1 ; + sh:path dct:accessRights ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:IRI ; + sh:path dcatap:applicableLegislation; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:contactPoint ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:creator ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:distribution ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:description ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path foaf:page ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:accrualPeriodicity; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:spatial ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:hasVersion ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:identifier ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:inSeries ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:isReferencedBy ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dcat:keyword ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:landingPage ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path dct:modified ; + sh:severity sh:Violation ; + sh:shape :DateOrDateTimeDataType_Shape + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path adms:identifier ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:provenance ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:maxCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path prov:qualifiedAttribution ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:qualifiedRelation ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:relation ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path dct:issued ; + sh:severity sh:Violation ; + sh:shape :DateOrDateTimeDataType_Shape + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path adms:sample ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:source ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:spatialResolutionInMeters ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:temporal ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:duration ; + sh:maxCount 1 ; + sh:path dcat:temporalResolution ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:IRI ; + sh:path dcat:theme ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:type ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcat:version ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path adms:versionNotes ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path prov:wasGeneratedBy ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Dataset . + +:DateOrDateTimeDataType_Shape + a sh:NodeShape ; + rdfs:comment "Date time date disjunction shape checks that a datatype property receives a temporal value: date, dateTime, gYear or gYearMonth literal" ; + rdfs:label "Date time date disjunction" ; + sh:message "The values must be data typed as either xsd:date, xsd:dateTime, xsd:gYear or xsd:gYearMonth" ; + sh:or ([ + sh:datatype xsd:date + ] + [ + sh:datatype xsd:dateTime + ] + [ + sh:datatype xsd:gYear + ] + [ + sh:datatype xsd:gYearMonth + ] + ) . + +:DcatResource_Shape + a sh:NodeShape ; + rdfs:comment "the union of Catalog, Dataset and DataService" ; + rdfs:label "dcat:Resource" ; + sh:message "The node is either a Catalog, Dataset or a DataService" ; + sh:or ([ + sh:class dcat:Catalog + ] + [ + sh:class dcat:Dataset + ] + [ + sh:class dcat:DataService + ] + [ + sh:class dcat:DatasetSeries + ] + ) . + +:Distribution_Shape + a sh:NodeShape ; + rdfs:label "Distribution"@en ; + sh:property [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:accessService ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dcat:accessURL ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:IRI ; + sh:path dcatap:applicableLegislation; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcatap:availability ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:byteSize ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path spdx:checksum ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:compressFormat ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:description ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path foaf:page ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dcat:downloadURL ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:format ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path odrl:hasPolicy ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:language ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:license ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:conformsTo ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:mediaType ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:node :DateOrDateTimeDataType_Shape ; + sh:path dct:modified ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dcat:packageFormat ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:node :DateOrDateTimeDataType_Shape ; + sh:path dct:issued ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path dct:rights ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:decimal ; + sh:maxCount 1 ; + sh:path dcat:spatialResolutionInMeters ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI ; + sh:path adms:status ; + sh:severity sh:Violation + ], [ + sh:datatype xsd:duration ; + sh:maxCount 1 ; + sh:path dcat:temporalResolution ; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:Literal ; + sh:path dct:title ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Distribution . + +:Identifier_Shape + a sh:NodeShape ; + rdfs:label "Identifier"@en ; + sh:property [ + sh:maxCount 1 ; + sh:path skos:notation ; + sh:severity sh:Violation + ] ; + sh:targetClass adms:Identifier . + +:LicenceDocument_Shape + a sh:NodeShape ; + rdfs:label "Licence Document"@en ; + sh:property [ + sh:path dct:type ; + sh:severity sh:Violation + ] ; + sh:targetClass dct:LicenseDocument . + +:Location_Shape + a sh:NodeShape ; + rdfs:label "Location"@en ; + sh:property [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcat:bbox ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path dcat:centroid ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:Literal ; + sh:path lcon:geometry ; + sh:severity sh:Violation + ] ; + sh:targetClass dct:Location . + +:PeriodOfTime_Shape + a sh:NodeShape ; + rdfs:label "PeriodOfTime"@en ; + sh:property [ + sh:maxCount 1 ; + sh:path dcat:endDate ; + sh:severity sh:Violation ; + sh:shape :DateOrDateTimeDataType_Shape + ], [ + sh:maxCount 1 ; + sh:path time:hasBeginning ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path time:hasEnd ; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:path dcat:startDate ; + sh:severity sh:Violation ; + sh:shape :DateOrDateTimeDataType_Shape + ] ; + sh:targetClass dct:PeriodOfTime . + +:Relationship_Shape + a sh:NodeShape ; + rdfs:label "Relationship"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:relation ; + sh:severity sh:Violation + ], [ + sh:minCount 1 ; + sh:path dcat:hadRole ; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:Relationship . + +:DatasetSeries_Shape + a sh:NodeShape ; + rdfs:label "Dataset Series"@en ; + sh:property [ + sh:minCount 1; + sh:nodeKind sh:BlankNodeOrIRI; + sh:path [ sh:inversePath dcat:inSeries; ]; + sh:severity sh:Warning + ], [ + sh:nodeKind sh:IRI; + sh:path dcatap:applicableLegislation; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dcat:contactPoint; + sh:severity sh:Violation + ], [ + sh:minCount 1; + sh:nodeKind sh:Literal; + sh:path dct:description; + sh:severity sh:Violation + ], [ + sh:maxCount 1; + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dct:accrualPeriodicity; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dct:spatial; + sh:severity sh:Violation + ], [ + sh:node :DateOrDateTimeDataType_Shape ; + sh:maxCount 1; + sh:nodeKind sh:Literal; + sh:path dct:modified; + sh:severity sh:Violation + ], [ + sh:maxCount 1 ; + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dct:publisher ; + sh:severity sh:Violation + ], [ + sh:node :DateOrDateTimeDataType_Shape ; + sh:maxCount 1; + sh:nodeKind sh:Literal; + sh:path dct:issued; + sh:severity sh:Violation + ], [ + sh:nodeKind sh:BlankNodeOrIRI; + sh:path dct:temporal; + sh:severity sh:Violation + ], [ + sh:minCount 1; + sh:nodeKind sh:Literal; + sh:path dct:title; + sh:severity sh:Violation + ] ; + sh:targetClass dcat:DatasetSeries . \ No newline at end of file diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes_recommended.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes_recommended.ttl new file mode 100644 index 000000000000..1764aee15460 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/eu-dcat-ap-3.0.0/shapes_recommended.ttl @@ -0,0 +1,336 @@ +@prefix rdf: . +@prefix : . +@prefix adms: . +@prefix cc: . +@prefix dc: . +@prefix dcat: . +@prefix dct: . +@prefix foaf: . +@prefix lcon: . +@prefix org: . +@prefix owl: . +@prefix odrl: . +@prefix prov: . +@prefix rdfs: . +@prefix schema: . +@prefix sh: . +@prefix skos: . +@prefix spdx: . +@prefix time: . +@prefix vcard: . +@prefix xsd: . +@prefix dcatap: . + + + dcat:accessURL ; + dcat:downloadURL ; + dcatap:availability ; + dct:format ; + dct:conformsTo ; + dct:creator [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Bert Van Nuffelen" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Natasa Sofou" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Eugeniu Costetchi" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Makx Dekkers" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Nikolaos Loutas" + ], [ + rdfs:seeAlso ; + org:memberOf ; + foaf:homepage ; + foaf:name "Vassilios Peristeras" + ] ; + dct:license ; + cc:attributionURL ; + dct:modified "2021-12-01"^^xsd:date ; + dct:publisher ; + dct:relation ; + dct:description "This document specifies the constraints on properties and classes expressed by DCAT-AP in SHACL."@en ; + dct:title "The constraints of DCAT Application Profile for Data Portals in Europe"@en ; + owl:versionInfo "2.1.1" ; + foaf:homepage ; + foaf:maker [ + foaf:mbox ; + foaf:name "DCAT-AP Working Group" ; + foaf:page , + ] . + + + +#------------------------------------------------------------------------- +# The shapes in this file cover all recommendations in DCAT-AP 2.1.1. +# +# +#------------------------------------------------------------------------- + +:Agent_Shape + a sh:NodeShape ; + rdfs:label "Agent"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:type ; + sh:severity sh:Warning + ] ; + sh:targetClass foaf:Agent . + +:CatalogRecord_Shape + a sh:NodeShape ; + rdfs:label "Catalog Record"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:conformsTo ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:issued ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path adms:status ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:CatalogRecord . + + +:Catalog_Shape + a sh:NodeShape ; + rdfs:label "Catalog"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:language ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:issued ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:license; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:spatial ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:modified ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:themeTaxonomy ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path foaf:homepage ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Catalog . + +:Catalog_Shape2 + a sh:NodeShape ; + rdfs:label "Catalog"@en ; + sh:or ( + [ + sh:path dcat:dataset ; + sh:minCount 1 ; + ] + [ + sh:path dcat:service ; + sh:minCount 1 ; + ] + ) ; + sh:severity sh:Warning; + sh:targetClass dcat:Catalog . + +:DataService_Shape + a sh:NodeShape ; + rdfs:label "Data Service"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dcat:servesDataset ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:endpointDescription ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:contactPoint ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:theme ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:conformsTo ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:keyword ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:DataService . + +:Dataset_Shape + a sh:NodeShape ; + rdfs:label "Dataset"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dcat:contactPoint ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:distribution ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:keyword ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:spatial ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:temporal ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:theme ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Dataset . + +:DatasetSeries_Shape + a sh:NodeShape ; + rdfs:label "Dataset"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dcat:contactPoint ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:publisher ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:DatasetSeries . + +:DateOrDateTimeDataType_Shape + a sh:NodeShape ; + rdfs:comment "Date time date disjunction shape checks that a datatype property receives a date or a dateTime literal" ; + rdfs:label "Date time date disjunction" ; + sh:message "The values must be data typed as either xsd:date or xsd:dateTime" ; + sh:or ([ + sh:datatype xsd:date + ] + [ + sh:datatype xsd:dateTime + ] + ) . + +:DcatResource_Shape + a sh:NodeShape ; + rdfs:comment "the union of Catalog, Dataset and DataService" ; + rdfs:label "dcat:Resource" ; + sh:message "The node is either a Catalog, Dataset or a DataService" ; + sh:or ([ + sh:class dcat:Catalog + ] + [ + sh:class dcat:Dataset + ] + [ + sh:class dcat:DataService + ] + [ + sh:class dcat:DatasetSeries + ] + ) . + +:Distribution_Shape + a sh:NodeShape ; + rdfs:label "Distribution"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:description ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcatap:availability ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:format ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dct:license ; + sh:severity sh:Warning + ] ; + sh:targetClass dcat:Distribution . + +:LicenceDocument_Shape + a sh:NodeShape ; + rdfs:label "Licence Document"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dct:type ; + sh:severity sh:Warning + ] ; + sh:targetClass dct:LicenseDocument . + +:Location_Shape + a sh:NodeShape ; + rdfs:label "Location"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dcat:bbox ; + sh:severity sh:Warning + ], [ + sh:minCount 1 ; + sh:path dcat:centroid ; + sh:severity sh:Warning + ] ; + sh:targetClass dct:Location . + +:PeriodOfTime_Shape + a sh:NodeShape ; + rdfs:label "PeriodOfTime"@en ; + sh:property [ + sh:minCount 1 ; + sh:path dcat:endDate ; + sh:severity sh:Warning ; + ], [ + sh:minCount 1 ; + sh:path dcat:startDate ; + sh:severity sh:Warning ; + ] ; + sh:targetClass dct:PeriodOfTime . + diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/geodcat-ap-3.0.0-SHACL.ttl b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/geodcat-ap-3.0.0-SHACL.ttl new file mode 100644 index 000000000000..ac5d703711d4 --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/geodcat-ap-3.0.0-SHACL.ttl @@ -0,0 +1,4744 @@ +@prefix cnt: . +@prefix dc: . +@prefix dcat: . +@prefix foaf: . +@prefix org: . +@prefix owl: . +@prefix prov: . +@prefix rdf: . +@prefix rdfs: . +@prefix shacl: . +@prefix skos: . +@prefix vcard: . +@prefix xsd: . + + rdfs:member , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Literal . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + ; + shacl:targetClass . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Identifier.notation"; + shacl:description "A string that is an identifier in the context of the identifier scheme referenced by its datatype."@en; + shacl:name "notation"@en; + shacl:nodeKind shacl:Literal; + shacl:path skos:notation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Identifier.notation"; + shacl:description "A string that is an identifier in the context of the identifier scheme referenced by its datatype."@en; + shacl:minCount 1; + shacl:name "notation"@en; + shacl:path skos:notation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Identifier.notation"; + shacl:description "A string that is an identifier in the context of the identifier scheme referenced by its datatype."@en; + shacl:maxCount 1; + shacl:name "notation"@en; + shacl:path skos:notation . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:CatalogRecord . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.modificationdate"; + shacl:description "The most recent date on which the Catalogue entry was changed or modified."@en; + shacl:minCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.principalinvestigator"; + shacl:class foaf:Agent; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.changetype"; + shacl:description "The status of the catalogue record in the context of editorial flow of the dataset and data service descriptions."@en; + shacl:maxCount 1; + shacl:name "change type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.custodian"; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.processor"; + shacl:class foaf:Agent; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.language"; + shacl:class dc:LinguisticSystem; + shacl:description "A language used in the textual metadata describing titles, descriptions, etc. of the Catalogued Resource."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.resourceprovider"; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.applicationprofile"; + shacl:class dc:Standard; + shacl:description "An Application Profile that the Catalogued Resource's metadata conforms to."@en; + shacl:name "application profile"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.principalinvestigator"; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.rightsholder"; + shacl:class foaf:Agent; + shacl:description "An Agent (organisation) holding rights on the Catalogue."@en; + shacl:name "rights holder"@en; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:name "primary topic"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:primaryTopic . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.sourcemetadata"; + shacl:description "The original metadata that was used in creating metadata for the Dataset."@en; + shacl:maxCount 1; + shacl:name "source metadata"@en; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.originator"; + shacl:class foaf:Agent; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.changetype"; + shacl:class skos:Concept; + shacl:description "The status of the catalogue record in the context of editorial flow of the dataset and data service descriptions."@en; + shacl:name "change type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.listingdate"; + shacl:description "The date on which the description of the Resource was included in the Catalogue."@en; + shacl:maxCount 1; + shacl:name "listing date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.rightsholder"; + shacl:description "An Agent (organisation) holding rights on the Catalogue."@en; + shacl:name "rights holder"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:minCount 1; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.creationdate"; + shacl:description "The date on which the Catalogue Record has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.publisher"; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:maxCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.modificationdate"; + shacl:description "The most recent date on which the Catalogue entry was changed or modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.processor"; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.description"; + shacl:description "A free-text account of the record. This property can be repeated for parallel language versions of the description."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.custodian"; + shacl:class foaf:Agent; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Catalogue Record."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.creator"; + shacl:description "The Agent primarily responsible for producing the Catalogue Record."@en; + shacl:name "creator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.qualifiedattribution"; + shacl:class prov:Attribution; + shacl:description "Link to an Agent having some form of responsibility for the Catalogue Record."@en; + shacl:name "qualified attribution"@en; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.changetype"; + shacl:description "The status of the catalogue record in the context of editorial flow of the dataset and data service descriptions."@en; + shacl:name "change type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.resourceprovider"; + shacl:class foaf:Agent; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.publisher"; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:name "publisher"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.sourcemetadata"; + shacl:class dcat:CatalogRecord; + shacl:description "The original metadata that was used in creating metadata for the Dataset."@en; + shacl:name "source metadata"@en; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.applicationprofile"; + shacl:description "An Application Profile that the Catalogued Resource's metadata conforms to."@en; + shacl:name "application profile"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Catalogue Record."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.modificationdate"; + shacl:description "The most recent date on which the Catalogue entry was changed or modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.creationdate"; + shacl:description "The date on which the Catalogue Record has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.identifier"; + shacl:description "The main identifier for the Catalogue Record, e.g., the URI or other unique identifier in the context of the Catalogue."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.publisher"; + shacl:class foaf:Agent; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.user"; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.qualifiedattribution"; + shacl:description "Link to an Agent having some form of responsibility for the Catalogue Record."@en; + shacl:name "qualified attribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.creator"; + shacl:class foaf:Agent; + shacl:description "The Agent primarily responsible for producing the Catalogue Record."@en; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.distributor"; + shacl:class foaf:Agent; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.primarytopic"; + shacl:class dcat:Resource; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.user"; + shacl:class foaf:Agent; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.sourcemetadata"; + shacl:description "The original metadata that was used in creating metadata for the Dataset."@en; + shacl:name "source metadata"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.listingdate"; + shacl:description "The date on which the description of the Resource was included in the Catalogue."@en; + shacl:name "listing date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.title"; + shacl:description "A name given to the Catalogue Record."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.language"; + shacl:description "A language used in the textual metadata describing titles, descriptions, etc. of the Catalogued Resource."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.distributor"; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.primarytopic"; + shacl:description "A link to the Dataset, Data service or Catalog described in the record."@en; + shacl:maxCount 1; + shacl:name "primary topic"@en; + shacl:path foaf:primaryTopic . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#CatalogueRecord.originator"; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:Catalog . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.geographicalcoverage"; + shacl:class dc:Location; + shacl:description "A geographical area covered by the Catalogue."@en; + shacl:name "geographical coverage"@en; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.haspart"; + shacl:description "A related Catalogue that is part of the described Catalogue."@en; + shacl:name "has part"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:hasPart . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.dataset"; + shacl:description "A Dataset that is part of the Catalogue."@en; + shacl:name "dataset"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:dataset . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.themes"; + shacl:description "A knowledge organization system used to classify the Resources that are in the Catalogue."@en; + shacl:name "themes"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:themeTaxonomy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.conformsto"; + shacl:description "An established standard to which the described Catalogue conforms."@en; + shacl:name "conforms to"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Catalog."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.topiccategory"; + shacl:description "Topic category in accordance with EN ISO 19115."@en; + shacl:name "topic category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.principalinvestigator"; + shacl:class foaf:Agent; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.wasusedby"; + shacl:class prov:Activity; + shacl:description "An Activity that used the Catalogue."@en; + shacl:name "was used by"@en; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.custodian"; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.processor"; + shacl:class foaf:Agent; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.language"; + shacl:class dc:LinguisticSystem; + shacl:description "A language used in the textual metadata describing titles, descriptions, etc. of the Datasets, Data Services and Dataset Series in the Catalogue."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.resourceprovider"; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.principalinvestigator"; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.record"; + shacl:class dcat:CatalogRecord; + shacl:description "A Catalogue Record that is part of the Catalogue."@en; + shacl:name "record"@en; + shacl:path dcat:record . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.record"; + shacl:description "A Catalogue Record that is part of the Catalogue."@en; + shacl:name "record"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:record . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.rightsholder"; + shacl:class foaf:Agent; + shacl:description "An Agent (organisation) holding rights on the Catalogued Resource."@en; + shacl:name "rights holder"@en; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.originator"; + shacl:class foaf:Agent; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.catalogue"; + shacl:class dcat:Catalog; + shacl:description "A catalogue whose contents are of interest in the context of this catalogue."@en; + shacl:name "catalogue"@en; + shacl:path dcat:catalog . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Catalogue."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.rightsholder"; + shacl:description "An Agent (organisation) holding rights on the Catalogued Resource."@en; + shacl:name "rights holder"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.keyword"; + shacl:description "A keyword or tag describing the Catalogue."@en; + shacl:name "keyword"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:keyword . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.themes"; + shacl:class skos:ConceptScheme; + shacl:description "A knowledge organization system used to classify the Resources that are in the Catalogue."@en; + shacl:name "themes"@en; + shacl:path dcat:themeTaxonomy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.creationdate"; + shacl:description "The date on which the Catalogue has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.publisher"; + shacl:description "An entity (organisation) responsible for making the Catalogue available."@en; + shacl:maxCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.modificationdate"; + shacl:description "The most recent date on which the Catalogue was modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.processor"; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.description"; + shacl:description "A free-text account of the Catalogue."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Catalog."@en; + shacl:name "applicable legislation"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.custodian"; + shacl:class foaf:Agent; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:maxCount 1; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Catalogue."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.dataset"; + shacl:class dcat:Dataset; + shacl:description "A Dataset that is part of the Catalogue."@en; + shacl:name "dataset"@en; + shacl:path dcat:dataset . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.creator"; + shacl:description "An entity responsible for the creation of the Catalogue."@en; + shacl:name "creator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.topiccategory"; + shacl:class skos:Concept; + shacl:description "Topic category in accordance with EN ISO 19115."@en; + shacl:name "topic category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.temporalcoverage"; + shacl:description "A temporal period that the Catalogue covers."@en; + shacl:name "temporal coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.qualifiedattribution"; + shacl:class prov:Attribution; + shacl:description "A link to an Agent having some form of responsibility for the Catalogue."@en; + shacl:name "qualified attribution"@en; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.referencesystem"; + shacl:class dc:Standard; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.creator"; + shacl:description "An entity responsible for the creation of the Catalogue."@en; + shacl:maxCount 1; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.homepage"; + shacl:description "A web page that acts as the main page for the Catalogue."@en; + shacl:name "homepage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:homepage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.temporalcoverage"; + shacl:class dc:PeriodOfTime; + shacl:description "A temporal period that the Catalogue covers."@en; + shacl:name "temporal coverage"@en; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.description"; + shacl:description "A free-text account of the Catalogue."@en; + shacl:minCount 1; + shacl:name "description"@en; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.referencesystem"; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.resourceprovider"; + shacl:class foaf:Agent; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.publisher"; + shacl:description "An entity (organisation) responsible for making the Catalogue available."@en; + shacl:name "publisher"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.geographicalcoverage"; + shacl:description "A geographical area covered by the Catalogue."@en; + shacl:name "geographical coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.theme"; + shacl:description "A main category of the Catalogue. A Catalogue can have multiple categories."@en; + shacl:name "theme"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Catalogue."@en; + shacl:name "rights"@en; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Catalogue."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.modificationdate"; + shacl:description "The most recent date on which the Catalogue was modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.licence"; + shacl:description "A licence under which the Catalogue can be used or reused."@en; + shacl:maxCount 1; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.creationdate"; + shacl:description "The date on which the Catalogue has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.publisher"; + shacl:description "An entity (organisation) responsible for making the Catalogue available."@en; + shacl:minCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.identifier"; + shacl:description "The main identifier for the Catalogue, e.g. the URI or other unique identifier."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.wasusedby"; + shacl:description "An Activity that used the Catalogue."@en; + shacl:name "was used by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Catalogue."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.publisher"; + shacl:class foaf:Agent; + shacl:description "An entity (organisation) responsible for making the Catalogue available."@en; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.rights"; + shacl:description "A statement that specifies rights associated with the Catalogue."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.user"; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.catalogue"; + shacl:description "A catalogue whose contents are of interest in the context of this catalogue."@en; + shacl:name "catalogue"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:catalog . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.qualifiedattribution"; + shacl:description "A link to an Agent having some form of responsibility for the Catalogue."@en; + shacl:name "qualified attribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.creator"; + shacl:class foaf:Agent; + shacl:description "An entity responsible for the creation of the Catalogue."@en; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.service"; + shacl:class dcat:DataService; + shacl:description "A site or end-point (Data Service) that is listed in the Catalogue."@en; + shacl:name "service"@en; + shacl:path dcat:service . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.distributor"; + shacl:class foaf:Agent; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.conformsto"; + shacl:class dc:Standard; + shacl:description "An established standard to which the described Catalogue conforms."@en; + shacl:name "conforms to"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.haspart"; + shacl:class dcat:Catalog; + shacl:description "A related Catalogue that is part of the described Catalogue."@en; + shacl:name "has part"@en; + shacl:path dc:hasPart . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.user"; + shacl:class foaf:Agent; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.title"; + shacl:description "A name given to the Catalogue."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.homepage"; + shacl:description "A web page that acts as the main page for the Catalogue."@en; + shacl:maxCount 1; + shacl:name "homepage"@en; + shacl:path foaf:homepage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.language"; + shacl:description "A language used in the textual metadata describing titles, descriptions, etc. of the Datasets, Data Services and Dataset Series in the Catalogue."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.licence"; + shacl:class dc:LicenseDocument; + shacl:description "A licence under which the Catalogue can be used or reused."@en; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.theme"; + shacl:class skos:Concept; + shacl:description "A main category of the Catalogue. A Catalogue can have multiple categories."@en; + shacl:name "theme"@en; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.distributor"; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.service"; + shacl:description "A site or end-point (Data Service) that is listed in the Catalogue."@en; + shacl:name "service"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:service . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.accessrights"; + shacl:class dc:RightsStatement; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.originator"; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.licence"; + shacl:description "A licence under which the Catalogue can be used or reused."@en; + shacl:name "licence"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.title"; + shacl:description "A name given to the Catalogue."@en; + shacl:minCount 1; + shacl:name "title"@en; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Catalogue.homepage"; + shacl:class foaf:Document; + shacl:description "A web page that acts as the main page for the Catalogue."@en; + shacl:name "homepage"@en; + shacl:path foaf:homepage . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:DataService . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.geographicalcoverage"; + shacl:class dc:Location; + shacl:description "A geographic region that is covered by the Data Service."@en; + shacl:name "geographical coverage"@en; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.orginator"; + shacl:class foaf:Agent; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "orginator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.serviceprotocol"; + shacl:description "Protocol value in accordance with INSPIRE Protocol Values [[INSPIRE-PV]]."@en; + shacl:name "service protocol"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.conformsto"; + shacl:description "An established (technical) standard to which the Data Service conforms."@en; + shacl:name "conforms to"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Data Service."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicetype"; + shacl:description "Service type in accordance with INSPIRE Spatial Data Service Types [[INSPIRE-SDST]]."@en; + shacl:name "service type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.topiccategory"; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.principalinvestigator"; + shacl:class foaf:Agent; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servesdataset"; + shacl:class dcat:Dataset; + shacl:description "This property refers to a collection of data that this data service can distribute."@en; + shacl:name "serves dataset"@en; + shacl:path dcat:servesDataset . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.wasusedby"; + shacl:class prov:Activity; + shacl:description "To an Activity that used the Data Service."@en; + shacl:name "was used by"@en; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.type"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:maxCount 1; + shacl:name "type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.custodian"; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.processor"; + shacl:class foaf:Agent; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.temporalresolution"; + shacl:description "The minimum time period resolvable in the Data Service."@en; + shacl:name "temporal resolution"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.language"; + shacl:class dc:LinguisticSystem; + shacl:description "The language of the structure that can be returned by querying the endpointURL."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.resourceprovider"; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.landingpage"; + shacl:description "A web page that provides access to the Data Service and/or additional information."@en; + shacl:name "landing page"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:landingPage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.type"; + shacl:class skos:Concept; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.principalinvestigator"; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.rightsholder"; + shacl:class foaf:Agent; + shacl:description "An Agent (organisation) holding rights on the Catalogue."@en; + shacl:name "rights holder"@en; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.endpointURL"; + shacl:description "The root location or primary endpoint of the service (an IRI)."@en; + shacl:name "endpoint URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:endpointURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.spatialresolution"; + shacl:class ; + shacl:description "Spatial resolution, as defined in [[INSPIRE-MD-REG]], [[ISO-19115]], and [[ISO-19115-1]]."@en; + shacl:name "spatial resolution"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.spatialresolution"; + shacl:description "Spatial resolution, as defined in [[INSPIRE-MD-REG]], [[ISO-19115]], and [[ISO-19115-1]]."@en; + shacl:name "spatial resolution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.orginator"; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "orginator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Data Service."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.type"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.rightsholder"; + shacl:description "An Agent (organisation) holding rights on the Catalogue."@en; + shacl:name "rights holder"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.keyword"; + shacl:description "A keyword or tag describing the Data Service."@en; + shacl:name "keyword"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:keyword . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.endpointdescription"; + shacl:description "A description of the services available via the end-points, including their operations, parameters etc."@en; + shacl:name "endpoint description"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:endpointDescription . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicetype"; + shacl:class skos:Concept; + shacl:description "Service type in accordance with INSPIRE Spatial Data Service Types [[INSPIRE-SDST]]."@en; + shacl:name "service type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.creationdate"; + shacl:description "The date on which the Data Service has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.publisher"; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:maxCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.spatialresolutioninmetres"; + shacl:description "The minimum spatial separation resolvable in a Data Service, measured in metres."@en; + shacl:name "spatial resolution in metres"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.modificationdate"; + shacl:description "The most recent date on which the Data Service was changed or modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.processor"; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.description"; + shacl:description "A free-text account of the Data Service."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Data Service."@en; + shacl:name "applicable legislation"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.format"; + shacl:class dc:MediaTypeOrExtent; + shacl:description "The structure that can be returned by querying the endpointURL."@en; + shacl:name "format"@en; + shacl:path dc:format . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.custodian"; + shacl:class foaf:Agent; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:maxCount 1; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Data Service."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.creator"; + shacl:description "An Agent primarily responsible for producing the Data Service."@en; + shacl:name "creator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.topiccategory"; + shacl:class skos:Concept; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.temporalcoverage"; + shacl:description "A temporal period that the Data Service covers."@en; + shacl:name "temporal coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.qualifiedattribution"; + shacl:class prov:Attribution; + shacl:description "Link to an Agent having some form of responsibility for the Data Service."@en; + shacl:name "qualified attribution"@en; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.endpointURL"; + shacl:description "The root location or primary endpoint of the service (an IRI)."@en; + shacl:minCount 1; + shacl:name "endpoint URL"@en; + shacl:path dcat:endpointURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.referencesystem"; + shacl:class skos:Concept; + shacl:description "The reference system used in the Data Service."@en; + shacl:name "reference system"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.spatialresolutionastext"; + shacl:description "Textual description of spatial resolution."@en; + shacl:name "spatial resolution as text"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.temporalcoverage"; + shacl:class dc:PeriodOfTime; + shacl:description "A temporal period that the Data Service covers."@en; + shacl:name "temporal coverage"@en; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.referencesystem"; + shacl:description "The reference system used in the Data Service."@en; + shacl:name "reference system"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.resourceprovider"; + shacl:class foaf:Agent; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicetype"; + shacl:description "Service type in accordance with INSPIRE Spatial Data Service Types [[INSPIRE-SDST]]."@en; + shacl:maxCount 1; + shacl:name "service type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.publisher"; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:name "publisher"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.geographicalcoverage"; + shacl:description "A geographic region that is covered by the Data Service."@en; + shacl:name "geographical coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.theme"; + shacl:description "A category of the Data Service."@en; + shacl:name "theme"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Data Service."@en; + shacl:name "rights"@en; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Data Service."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.modificationdate"; + shacl:description "The most recent date on which the Data Service was changed or modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.licence"; + shacl:description "A licence under which the Data service is made available."@en; + shacl:maxCount 1; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.creationdate"; + shacl:description "The date on which the Data Service has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.spatialresolutioninmetres"; + shacl:datatype xsd:decimal; + shacl:description "The minimum spatial separation resolvable in a Data Service, measured in metres."@en; + shacl:name "spatial resolution in metres"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.identifier"; + shacl:description "The main identifier for the Data Service, e.g. the URI or other unique identifier in the context of the Catalogue."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicecategory"; + shacl:class skos:Concept; + shacl:description "Service category in accordance with INSPIRE Spatial Data Service Categories [[INSPIRE-SDSC]]."@en; + shacl:name "service category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.wasusedby"; + shacl:description "To an Activity that used the Data Service."@en; + shacl:name "was used by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Data Service."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.publisher"; + shacl:class foaf:Agent; + shacl:description "An entity (organisation) responsible for making the Data Service available."@en; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.rights"; + shacl:description "A statement that specifies rights associated with the Data Service."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.user"; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.qualifiedattribution"; + shacl:description "Link to an Agent having some form of responsibility for the Data Service."@en; + shacl:name "qualified attribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.format"; + shacl:description "The structure that can be returned by querying the endpointURL."@en; + shacl:name "format"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:format . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.creator"; + shacl:class foaf:Agent; + shacl:description "An Agent primarily responsible for producing the Data Service."@en; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.distributor"; + shacl:class foaf:Agent; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.conformsto"; + shacl:class dc:Standard; + shacl:description "An established (technical) standard to which the Data Service conforms."@en; + shacl:name "conforms to"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servesdataset"; + shacl:description "This property refers to a collection of data that this data service can distribute."@en; + shacl:name "serves dataset"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:servesDataset . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.user"; + shacl:class foaf:Agent; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.title"; + shacl:description "A name given to the Data Service."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.language"; + shacl:description "The language of the structure that can be returned by querying the endpointURL."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.licence"; + shacl:class dc:LicenseDocument; + shacl:description "A licence under which the Data service is made available."@en; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.endpointdescription"; + shacl:class rdfs:Resource; + shacl:description "A description of the services available via the end-points, including their operations, parameters etc."@en; + shacl:name "endpoint description"@en; + shacl:path dcat:endpointDescription . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicecategory"; + shacl:description "Service category in accordance with INSPIRE Spatial Data Service Categories [[INSPIRE-SDSC]]."@en; + shacl:maxCount 1; + shacl:name "service category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.theme"; + shacl:class skos:Concept; + shacl:description "A category of the Data Service."@en; + shacl:name "theme"@en; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.servicecategory"; + shacl:description "Service category in accordance with INSPIRE Spatial Data Service Categories [[INSPIRE-SDSC]]."@en; + shacl:name "service category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.serviceprotocol"; + shacl:description "Protocol value in accordance with INSPIRE Protocol Values [[INSPIRE-PV]]."@en; + shacl:maxCount 1; + shacl:name "service protocol"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.temporalresolution"; + shacl:datatype xsd:duration; + shacl:description "The minimum time period resolvable in the Data Service."@en; + shacl:name "temporal resolution"@en; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.distributor"; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.endpointURL"; + shacl:class rdfs:Resource; + shacl:description "The root location or primary endpoint of the service (an IRI)."@en; + shacl:name "endpoint URL"@en; + shacl:path dcat:endpointURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.serviceprotocol"; + shacl:class skos:Concept; + shacl:description "Protocol value in accordance with INSPIRE Protocol Values [[INSPIRE-PV]]."@en; + shacl:name "service protocol"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.accessrights"; + shacl:class dc:RightsStatement; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.licence"; + shacl:description "A licence under which the Data service is made available."@en; + shacl:name "licence"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.title"; + shacl:description "A name given to the Data Service."@en; + shacl:minCount 1; + shacl:name "title"@en; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DataService.landingpage"; + shacl:class foaf:Document; + shacl:description "A web page that provides access to the Data Service and/or additional information."@en; + shacl:name "landing page"@en; + shacl:path dcat:landingPage . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:DatasetSeries . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.geographicalcoverage"; + shacl:class dc:Location; + shacl:description "A geographic region that is covered by the Dataset Series."@en; + shacl:name "geographical coverage"@en; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.resourcetype"; + shacl:class skos:Concept; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "resource type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.otheridentifier"; + shacl:description "A secondary identifier of the Dataset Series."@en; + shacl:name "other identifier"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.frequency"; + shacl:class dc:Frequency; + shacl:description "The frequency at which the Dataset Series is updated."@en; + shacl:name "frequency"@en; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.conformsto"; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Dataset Series."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.relatedresource"; + shacl:description "A related resource."@en; + shacl:name "related resource"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.topiccategory"; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.accessrights"; + shacl:description "Information that indicates whether the Dataset Series is publicly accessible, has access restrictions or is not public."@en; + shacl:name "access rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.principalinvestigator"; + shacl:class foaf:Agent; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.wasusedby"; + shacl:class prov:Activity; + shacl:description "An Activity that used the Dataset Series."@en; + shacl:name "was used by"@en; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.wasgeneratedby"; + shacl:description "An activity that generated, or provides the business context for, the creation of the Dataset Series."@en; + shacl:name "was generated by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasGeneratedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.custodian"; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.processor"; + shacl:class foaf:Agent; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.language"; + shacl:class dc:LinguisticSystem; + shacl:description "A language of the Dataset Series."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.source"; + shacl:class dcat:Dataset; + shacl:description "A related Dataset from which the described Dataset Series is derived."@en; + shacl:name "source"@en; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.resourceprovider"; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.landingpage"; + shacl:description "A web page that provides access to the Dataset Series, its Distributions and/or additional information."@en; + shacl:name "landing page"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:landingPage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.principalinvestigator"; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.sample"; + shacl:description "A sample distribution of the Dataset Series."@en; + shacl:name "sample"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.rightsholder"; + shacl:class foaf:Agent; + shacl:description "An Agent (organisation) holding rights on the Dataset Series."@en; + shacl:name "rights holder"@en; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.wasgeneratedby"; + shacl:class prov:Activity; + shacl:description "An activity that generated, or provides the business context for, the creation of the Dataset Series."@en; + shacl:name "was generated by"@en; + shacl:path prov:wasGeneratedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.isreferencedby"; + shacl:description "A related resource, such as a publication, that references, cites, or otherwise points to the Dataset Series."@en; + shacl:name "is referenced by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:isReferencedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.qualifiedrelation"; + shacl:description "A description of a relationship with another resource."@en; + shacl:name "qualified relation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:qualifiedRelation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.spatialresolution"; + shacl:class ; + shacl:description "The performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.spatialresolution"; + shacl:description "The performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.originator"; + shacl:class foaf:Agent; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.relatedresource"; + shacl:class dcat:Resource; + shacl:description "A related resource."@en; + shacl:name "related resource"@en; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.hasversion"; + shacl:class dcat:Dataset; + shacl:description "A related Dataset that is a version, edition, or adaptation of the described Dataset Series."@en; + shacl:name "has version"@en; + shacl:path dcat:hasVersion . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Dataset Series."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.resourcetype"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:maxCount 1; + shacl:name "resource type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.rightsholder"; + shacl:description "An Agent (organisation) holding rights on the Dataset Series."@en; + shacl:name "rights holder"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.versionnotes"; + shacl:description "A description of the differences between this version and a previous version of the Dataset."@en; + shacl:name "version notes"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.keyword"; + shacl:description "A keyword or tag describing the Dataset."@en; + shacl:name "keyword"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:keyword . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.sample"; + shacl:class dcat:Distribution; + shacl:description "A sample distribution of the Dataset Series."@en; + shacl:name "sample"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.inseries"; + shacl:class dcat:DatasetSeries; + shacl:description "A dataset series of which the dataset series is part."@en; + shacl:name "in series"@en; + shacl:path dcat:inSeries . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.frequency"; + shacl:description "The frequency at which the Dataset Series is updated."@en; + shacl:name "frequency"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.creationdate"; + shacl:description "The date on which the Dataset Series has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.publisher"; + shacl:description "An entity (organisation) responsible for ensuring the coherency of the Dataset Series "@en; + shacl:maxCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.documentation"; + shacl:class foaf:Document; + shacl:description "A page or document about this Dataset Series."@en; + shacl:name "documentation"@en; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.spatialresolutioninmetres"; + shacl:description "The minimum spatial separation resolvable in a Dataset, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.modificationdate"; + shacl:description "The most recent date on which the Dataset Series was changed or modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.processor"; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.description"; + shacl:description "A free-text account of the Dataset Series."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.datasetseriesdistribution"; + shacl:class dcat:Distribution; + shacl:description "An available Distribution for the Dataset Series."@en; + shacl:name "dataset series distribution"@en; + shacl:path dcat:distribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Dataset Series."@en; + shacl:name "applicable legislation"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.custodian"; + shacl:class foaf:Agent; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.accessrights"; + shacl:description "Information that indicates whether the Dataset Series is publicly accessible, has access restrictions or is not public."@en; + shacl:maxCount 1; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Dataset Series."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.creator"; + shacl:description "An entity responsible for producing the Dataset Series."@en; + shacl:name "creator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.topiccategory"; + shacl:class skos:Concept; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.source"; + shacl:description "A related Dataset from which the described Dataset Series is derived."@en; + shacl:name "source"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.temporalcoverage"; + shacl:description "A temporal period that the Dataset Series covers."@en; + shacl:name "temporal coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.inseries"; + shacl:description "A dataset series of which the dataset series is part."@en; + shacl:name "in series"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:inSeries . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.qualifiedattribution"; + shacl:class prov:Attribution; + shacl:description "An Agent having some form of responsibility for the resource."@en; + shacl:name "qualified attribution"@en; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.frequency"; + shacl:description "The frequency at which the Dataset Series is updated."@en; + shacl:maxCount 1; + shacl:name "frequency"@en; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.referencesystem"; + shacl:class skos:Concept; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.spatialresolutionastext"; + shacl:description "Textual description of spatial resolution."@en; + shacl:name "spatial resolution as text"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.temporalcoverage"; + shacl:class dc:PeriodOfTime; + shacl:description "A temporal period that the Dataset Series covers."@en; + shacl:name "temporal coverage"@en; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.description"; + shacl:description "A free-text account of the Dataset Series."@en; + shacl:minCount 1; + shacl:name "description"@en; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.referencesystem"; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.resourceprovider"; + shacl:class foaf:Agent; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.publisher"; + shacl:description "An entity (organisation) responsible for ensuring the coherency of the Dataset Series "@en; + shacl:name "publisher"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.geographicalcoverage"; + shacl:description "A geographic region that is covered by the Dataset Series."@en; + shacl:name "geographical coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.isreferencedby"; + shacl:class rdfs:Resource; + shacl:description "A related resource, such as a publication, that references, cites, or otherwise points to the Dataset Series."@en; + shacl:name "is referenced by"@en; + shacl:path dc:isReferencedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.theme"; + shacl:description "A category of the Dataset Series."@en; + shacl:name "theme"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Dataset Series."@en; + shacl:name "rights"@en; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.documentation"; + shacl:description "A page or document about this Dataset Series."@en; + shacl:name "documentation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Dataset Series."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.modificationdate"; + shacl:description "The most recent date on which the Dataset Series was changed or modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.creationdate"; + shacl:description "The date on which the Dataset Series has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.otheridentifier"; + shacl:class ; + shacl:description "A secondary identifier of the Dataset Series."@en; + shacl:name "other identifier"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.spatialresolutioninmetres"; + shacl:datatype xsd:decimal; + shacl:description "The minimum spatial separation resolvable in a Dataset, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.identifier"; + shacl:description "The main identifier for the Dataset Series, e.g. the URI or other unique identifier in the context of the Catalogue."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.datasetseriesdistribution"; + shacl:description "An available Distribution for the Dataset Series."@en; + shacl:name "dataset series distribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:distribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.wasusedby"; + shacl:description "An Activity that used the Dataset Series."@en; + shacl:name "was used by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Dataset Series."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.publisher"; + shacl:class foaf:Agent; + shacl:description "An entity (organisation) responsible for ensuring the coherency of the Dataset Series "@en; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.rights"; + shacl:description "A statement that specifies rights associated with the Dataset Series."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.user"; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.qualifiedattribution"; + shacl:description "An Agent having some form of responsibility for the resource."@en; + shacl:name "qualified attribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.version"; + shacl:description "The version indicator (name or identifier) of a resource."@en; + shacl:name "version"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:version . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.creator"; + shacl:class foaf:Agent; + shacl:description "An entity responsible for producing the Dataset Series."@en; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.distributor"; + shacl:class foaf:Agent; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.conformsto"; + shacl:class dc:Standard; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.user"; + shacl:class foaf:Agent; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.hasversion"; + shacl:description "A related Dataset that is a version, edition, or adaptation of the described Dataset Series."@en; + shacl:name "has version"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:hasVersion . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.title"; + shacl:description "A name given to the Dataset Series."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.language"; + shacl:description "A language of the Dataset Series."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.theme"; + shacl:class skos:Concept; + shacl:description "A category of the Dataset Series."@en; + shacl:name "theme"@en; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.distributor"; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.accessrights"; + shacl:class dc:RightsStatement; + shacl:description "Information that indicates whether the Dataset Series is publicly accessible, has access restrictions or is not public."@en; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.originator"; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.title"; + shacl:description "A name given to the Dataset Series."@en; + shacl:minCount 1; + shacl:name "title"@en; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.qualifiedrelation"; + shacl:class dcat:Relationship; + shacl:description "A description of a relationship with another resource."@en; + shacl:name "qualified relation"@en; + shacl:path dcat:qualifiedRelation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.resourcetype"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "resource type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#DatasetSeries.landingpage"; + shacl:class foaf:Document; + shacl:description "A web page that provides access to the Dataset Series, its Distributions and/or additional information."@en; + shacl:name "landing page"@en; + shacl:path dcat:landingPage . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:Dataset . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.geographicalcoverage"; + shacl:class dc:Location; + shacl:description "A geographic region that is covered by the Dataset."@en; + shacl:name "geographical coverage"@en; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.resourcetype"; + shacl:class skos:Concept; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "resource type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.otheridentifier"; + shacl:description "A secondary identifier of the Dataset"@en; + shacl:name "other identifier"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.frequency"; + shacl:class dc:Frequency; + shacl:description "The frequency at which the Dataset is updated."@en; + shacl:name "frequency"@en; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.conformsto"; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Dataset."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.relatedresource"; + shacl:description "A related resource."@en; + shacl:name "related resource"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.provenance"; + shacl:class dc:ProvenanceStatement; + shacl:description "A statement about the lineage of a Dataset."@en; + shacl:name "provenance"@en; + shacl:path dc:provenance . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.topiccategory"; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.accessrights"; + shacl:description "Information that indicates whether the Dataset is publicly accessible, has access restrictions or is not public."@en; + shacl:name "access rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.principalinvestigator"; + shacl:class foaf:Agent; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.wasusedby"; + shacl:class prov:Activity; + shacl:description "An Activity that used the Dataset."@en; + shacl:name "was used by"@en; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.wasgeneratedby"; + shacl:description "An activity that generated, or provides the business context for, the creation of the Dataset."@en; + shacl:name "was generated by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasGeneratedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.custodian"; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.processor"; + shacl:class foaf:Agent; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.temporalresolution"; + shacl:description "The minimum time period resolvable in the Dataset."@en; + shacl:name "temporal resolution"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.language"; + shacl:class dc:LinguisticSystem; + shacl:description "A language of the Dataset."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.datasetdistribution"; + shacl:class dcat:Distribution; + shacl:description "An available Distribution for the Dataset."@en; + shacl:name "dataset distribution"@en; + shacl:path dcat:distribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.source"; + shacl:class dcat:Dataset; + shacl:description "A related Dataset from which the described Dataset is derived."@en; + shacl:name "source"@en; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.resourceprovider"; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.landingpage"; + shacl:description "A web page that provides access to the Dataset, its Distributions and/or additional information."@en; + shacl:name "landing page"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:landingPage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.principalinvestigator"; + shacl:description "Key party responsible for gathering information and conducting research [[ISO-19115]]."@en; + shacl:name "principal investigator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.sample"; + shacl:description "A sample distribution of the dataset."@en; + shacl:name "sample"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.rightsholder"; + shacl:class foaf:Agent; + shacl:description "An Agent (organisation) holding rights on the Dataset."@en; + shacl:name "rights holder"@en; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.wasgeneratedby"; + shacl:class prov:Activity; + shacl:description "An activity that generated, or provides the business context for, the creation of the Dataset."@en; + shacl:name "was generated by"@en; + shacl:path prov:wasGeneratedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.isreferencedby"; + shacl:description "A related resource, such as a publication, that references, cites, or otherwise points to the Dataset."@en; + shacl:name "is referenced by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:isReferencedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.qualifiedrelation"; + shacl:description "A description of a relationship with another resource."@en; + shacl:name "qualified relation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:qualifiedRelation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.spatialresolution"; + shacl:class ; + shacl:description "The performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.spatialresolution"; + shacl:description "The performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.originator"; + shacl:class foaf:Agent; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.relatedresource"; + shacl:class rdfs:Resource; + shacl:description "A related resource."@en; + shacl:name "related resource"@en; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.hasversion"; + shacl:class dcat:Dataset; + shacl:description "A related Dataset that is a version, edition, or adaptation of the described Dataset."@en; + shacl:name "has version"@en; + shacl:path dcat:hasVersion . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Dataset."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.resourcetype"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:maxCount 1; + shacl:name "resource type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.rightsholder"; + shacl:description "An Agent (organisation) holding rights on the Dataset."@en; + shacl:name "rights holder"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rightsHolder . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.versionnotes"; + shacl:description "A description of the differences between this version and a previous version of the Dataset."@en; + shacl:name "version notes"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.keyword"; + shacl:description "A keyword or tag describing the Dataset."@en; + shacl:name "keyword"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:keyword . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.provenance"; + shacl:description "A statement about the lineage of a Dataset."@en; + shacl:name "provenance"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:provenance . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.sample"; + shacl:class dcat:Distribution; + shacl:description "A sample distribution of the dataset."@en; + shacl:name "sample"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.inseries"; + shacl:class dcat:DatasetSeries; + shacl:description "A dataset series of which the dataset is part."@en; + shacl:name "in series"@en; + shacl:path dcat:inSeries . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.frequency"; + shacl:description "The frequency at which the Dataset is updated."@en; + shacl:name "frequency"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.creationdate"; + shacl:description "The date on which the Dataset has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.publisher"; + shacl:description "An entity (organisation) responsible for making the Dataset available."@en; + shacl:maxCount 1; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.documentation"; + shacl:class foaf:Document; + shacl:description "A page or document about this Dataset."@en; + shacl:name "documentation"@en; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.spatialresolutioninmetres"; + shacl:description "The minimum spatial separation resolvable in a Dataset, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.modificationdate"; + shacl:description "The most recent date on which the Dataset was changed or modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.processor"; + shacl:description "Party who has processed the data in a manner such that the resource has been modified [[ISO-19115]]."@en; + shacl:name "processor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.description"; + shacl:description "A free-text account of the Dataset."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Dataset."@en; + shacl:name "applicable legislation"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.custodian"; + shacl:class foaf:Agent; + shacl:description "Party that accepts accountability and responsibility for the data and ensures appropriate care and maintenance of the resource [[ISO-19115]]."@en; + shacl:name "custodian"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.accessrights"; + shacl:description "Information that indicates whether the Dataset is publicly accessible, has access restrictions or is not public."@en; + shacl:maxCount 1; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.contactpoint"; + shacl:class vcard:Kind; + shacl:description "Contact information that can be used for sending comments about the Dataset."@en; + shacl:name "contact point"@en; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.creator"; + shacl:description "The Agent primarily responsible for producing the Dataset."@en; + shacl:name "creator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.topiccategory"; + shacl:class skos:Concept; + shacl:description "Topic category in accordance with ISO-19115 [[INSPIRE-TC]]."@en; + shacl:name "topic category"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.source"; + shacl:description "A related Dataset from which the described Dataset is derived."@en; + shacl:name "source"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:source . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.temporalcoverage"; + shacl:description "A temporal period that the Dataset covers."@en; + shacl:name "temporal coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.inseries"; + shacl:description "A dataset series of which the dataset is part."@en; + shacl:name "in series"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:inSeries . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.qualifiedattribution"; + shacl:class prov:Attribution; + shacl:description "An Agent having some form of responsibility for the resource."@en; + shacl:name "qualified attribution"@en; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.frequency"; + shacl:description "The frequency at which the Dataset is updated."@en; + shacl:maxCount 1; + shacl:name "frequency"@en; + shacl:path dc:accrualPeriodicity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.referencesystem"; + shacl:class skos:Concept; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.spatialresolutionastext"; + shacl:description "Textual description of spatial resolution."@en; + shacl:name "spatial resolution as text"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.temporalcoverage"; + shacl:class dc:PeriodOfTime; + shacl:description "A temporal period that the Dataset covers."@en; + shacl:name "temporal coverage"@en; + shacl:path dc:temporal . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.description"; + shacl:description "A free-text account of the Dataset."@en; + shacl:minCount 1; + shacl:name "description"@en; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.referencesystem"; + shacl:description "The reference system used in accordance with OGC EPSG Coordinate Reference Systems Register [[OGC-EPSG]]."@en; + shacl:name "reference system"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.resourceprovider"; + shacl:class foaf:Agent; + shacl:description "Party that supplies the resource [[ISO-19115]]."@en; + shacl:name "resource provider"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.publisher"; + shacl:description "An entity (organisation) responsible for making the Dataset available."@en; + shacl:name "publisher"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.geographicalcoverage"; + shacl:description "A geographic region that is covered by the Dataset."@en; + shacl:name "geographical coverage"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:spatial . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.isreferencedby"; + shacl:class rdfs:Resource; + shacl:description "A related resource, such as a publication, that references, cites, or otherwise points to the Dataset."@en; + shacl:name "is referenced by"@en; + shacl:path dc:isReferencedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.theme"; + shacl:description "A category of the Dataset."@en; + shacl:name "theme"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Dataset."@en; + shacl:name "rights"@en; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.documentation"; + shacl:description "A page or document about this Dataset."@en; + shacl:name "documentation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.contactpoint"; + shacl:description "Contact information that can be used for sending comments about the Dataset."@en; + shacl:name "contact point"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:contactPoint . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.modificationdate"; + shacl:description "The most recent date on which the Dataset was changed or modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.creationdate"; + shacl:description "The date on which the Dataset has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.otheridentifier"; + shacl:class ; + shacl:description "A secondary identifier of the Dataset"@en; + shacl:name "other identifier"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.spatialresolutioninmetres"; + shacl:datatype xsd:decimal; + shacl:description "The minimum spatial separation resolvable in a Dataset, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.identifier"; + shacl:description "The main identifier for the Dataset, e.g. the URI or other unique identifier in the context of the Catalogue."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.wasusedby"; + shacl:description "An Activity that used the Dataset."@en; + shacl:name "was used by"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:wasUsedBy . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Dataset."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.publisher"; + shacl:class foaf:Agent; + shacl:description "An entity (organisation) responsible for making the Dataset available."@en; + shacl:name "publisher"@en; + shacl:path dc:publisher . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.rights"; + shacl:description "A statement that specifies rights associated with the Dataset."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.user"; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.qualifiedattribution"; + shacl:description "An Agent having some form of responsibility for the resource."@en; + shacl:name "qualified attribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAttribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.version"; + shacl:description "The version indicator (name or identifier) of a resource."@en; + shacl:name "version"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:version . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.creator"; + shacl:class foaf:Agent; + shacl:description "The Agent primarily responsible for producing the Dataset."@en; + shacl:name "creator"@en; + shacl:path dc:creator . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.distributor"; + shacl:class foaf:Agent; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.conformsto"; + shacl:class dc:Standard; + shacl:description "An implementing rule or other specification."@en; + shacl:name "conforms to"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.user"; + shacl:class foaf:Agent; + shacl:description "Party who uses the resource [[ISO-19115]]."@en; + shacl:name "user"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.hasversion"; + shacl:description "A related Dataset that is a version, edition, or adaptation of the described Dataset."@en; + shacl:name "has version"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:hasVersion . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.title"; + shacl:description "A name given to the Dataset."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.language"; + shacl:description "A language of the Dataset."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.theme"; + shacl:class skos:Concept; + shacl:description "A category of the Dataset."@en; + shacl:name "theme"@en; + shacl:path dcat:theme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.temporalresolution"; + shacl:datatype xsd:duration; + shacl:description "The minimum time period resolvable in the Dataset."@en; + shacl:name "temporal resolution"@en; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.distributor"; + shacl:description "Party who distributes the resource [[ISO-19115]]."@en; + shacl:name "distributor"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.accessrights"; + shacl:class dc:RightsStatement; + shacl:description "Information that indicates whether the Dataset is publicly accessible, has access restrictions or is not public."@en; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.originator"; + shacl:description "Party who created the resource [[ISO-19115]]."@en; + shacl:name "originator"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.datasetdistribution"; + shacl:description "An available Distribution for the Dataset."@en; + shacl:name "dataset distribution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:distribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.title"; + shacl:description "A name given to the Dataset."@en; + shacl:minCount 1; + shacl:name "title"@en; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.qualifiedrelation"; + shacl:class dcat:Relationship; + shacl:description "A description of a relationship with another resource."@en; + shacl:name "qualified relation"@en; + shacl:path dcat:qualifiedRelation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.resourcetype"; + shacl:description "Resource type in accordance with INSPIRE Resource Types [[INSPIRE-RT]]."@en; + shacl:name "resource type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Dataset.landingpage"; + shacl:class foaf:Document; + shacl:description "A web page that provides access to the Dataset, its Distributions and/or additional information."@en; + shacl:name "landing page"@en; + shacl:path dcat:landingPage . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dcat:Distribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.mediatype"; + shacl:class dc:MediaType; + shacl:description "The media type of the Distribution as defined in the official register of media types managed by IANA."@en; + shacl:name "media type"@en; + shacl:path dcat:mediaType . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.availability"; + shacl:description "An indication how long it is planned to keep the Distribution of the Dataset available."@en; + shacl:name "availability"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.applicablelegislation"; + shacl:description "The legislation that mandates the creation or management of the Distribution."@en; + shacl:name "applicable legislation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.status"; + shacl:class skos:Concept; + shacl:description "The status of the Distribution in the context of maturity lifecycle."@en; + shacl:name "status"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.compressionformat"; + shacl:description "The format of the file in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en; + shacl:maxCount 1; + shacl:name "compression format"@en; + shacl:path dcat:compressFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.linkedschemas"; + shacl:class dc:Standard; + shacl:description "An established schema to which the described Distribution conforms."@en; + shacl:name "linked schemas"@en; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessservice"; + shacl:description "A data service that gives access to the distribution of the dataset."@en; + shacl:name "access service"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:accessService . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.linkedschemas"; + shacl:description "An established schema to which the described Distribution conforms."@en; + shacl:name "linked schemas"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:conformsTo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.downloadURL"; + shacl:description "A URL that is a direct link to a downloadable file in a given format."@en; + shacl:name "download URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:downloadURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.temporalresolution"; + shacl:description "The minimum time period resolvable in the dataset distribution."@en; + shacl:name "temporal resolution"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.language"; + shacl:class dc:LinguisticSystem; + shacl:description "A language used in the Distribution."@en; + shacl:name "language"@en; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.status"; + shacl:description "The status of the Distribution in the context of maturity lifecycle."@en; + shacl:maxCount 1; + shacl:name "status"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolutionastext"; + shacl:description "Textual description of spatial resolution of a Distribution."@en; + shacl:maxCount 1; + shacl:name "spatial resolution as text"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.haspolicy"; + shacl:description "The policy expressing the rights associated with the Distribution if using the [[ODRL]] vocabulary."@en; + shacl:maxCount 1; + shacl:name "has policy"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.packagingformat"; + shacl:class dc:MediaType; + shacl:description "The format of the file in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en; + shacl:name "packaging format"@en; + shacl:path dcat:packageFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.status"; + shacl:description "The status of the Distribution in the context of maturity lifecycle."@en; + shacl:name "status"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolution"; + shacl:class ; + shacl:description "Refers to the performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.mediatype"; + shacl:description "The media type of the Distribution as defined in the official register of media types managed by IANA."@en; + shacl:name "media type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:mediaType . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolution"; + shacl:description "Refers to the performed quality measurements."@en; + shacl:name "spatial resolution"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Distribution."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.bytesize"; + shacl:description "The size of a Distribution in bytes."@en; + shacl:name "byte size"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:byteSize . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.checksum"; + shacl:class ; + shacl:description "A mechanism that can be used to verify that the contents of a distribution have not changed."@en; + shacl:name "checksum"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessURL"; + shacl:class rdfs:Resource; + shacl:description "A URL that gives access to a Distribution of the Dataset or Dataset Series."@en; + shacl:name "access URL"@en; + shacl:path dcat:accessURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.documentation"; + shacl:class foaf:Document; + shacl:description "A page or document about this Distribution."@en; + shacl:name "documentation"@en; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolutioninmetres"; + shacl:description "The minimum spatial separation resolvable in a dataset distribution, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.mediatype"; + shacl:description "The media type of the Distribution as defined in the official register of media types managed by IANA."@en; + shacl:maxCount 1; + shacl:name "media type"@en; + shacl:path dcat:mediaType . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.modificationdate"; + shacl:description "The most recent date on which the Distribution was changed or modified."@en; + shacl:name "modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.description"; + shacl:description "A free-text account of the Distribution."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.characterencoding"; + shacl:description "A character encoding used in the downloadable file or output of the data service represented by the Distribution."@en; + shacl:name "character encoding"@en; + shacl:nodeKind shacl:Literal; + shacl:path cnt:characterEncoding . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.applicablelegislation"; + shacl:class ; + shacl:description "The legislation that mandates the creation or management of the Distribution."@en; + shacl:name "applicable legislation"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.checksum"; + shacl:description "A mechanism that can be used to verify that the contents of a distribution have not changed."@en; + shacl:maxCount 1; + shacl:name "checksum"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.format"; + shacl:class dc:MediaTypeOrExtent; + shacl:description "The file format of the Distribution."@en; + shacl:name "format"@en; + shacl:path dc:format . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessservice"; + shacl:class dcat:DataService; + shacl:description "A data service that gives access to the distribution of the dataset."@en; + shacl:name "access service"@en; + shacl:path dcat:accessService . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.compressionformat"; + shacl:description "The format of the file in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en; + shacl:name "compression format"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:compressFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessrights"; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:maxCount 1; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.referencesystem"; + shacl:class dc:Standard; + shacl:description "The reference system used in the Distribution."@en; + shacl:name "reference system"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolutionastext"; + shacl:description "Textual description of spatial resolution of a Distribution."@en; + shacl:name "spatial resolution as text"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.haspolicy"; + shacl:class ; + shacl:description "The policy expressing the rights associated with the Distribution if using the [[ODRL]] vocabulary."@en; + shacl:name "has policy"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolutioninmetres"; + shacl:description "The minimum spatial separation resolvable in a dataset distribution, measured in meters."@en; + shacl:maxCount 1; + shacl:name "spatial resolution in metres"@en; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.referencesystem"; + shacl:description "The reference system used in the Distribution."@en; + shacl:name "reference system"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.format"; + shacl:description "The file format of the Distribution."@en; + shacl:maxCount 1; + shacl:name "format"@en; + shacl:path dc:format . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.representationtechnique"; + shacl:description "Information about the format in which an Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)."@en; + shacl:maxCount 1; + shacl:name "representation technique"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.rights"; + shacl:class dc:RightsStatement; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.documentation"; + shacl:description "A page or document about this Distribution."@en; + shacl:name "documentation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:page . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.availability"; + shacl:class skos:Concept; + shacl:description "An indication how long it is planned to keep the Distribution of the Dataset available."@en; + shacl:name "availability"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.modificationdate"; + shacl:description "The most recent date on which the Distribution was changed or modified."@en; + shacl:maxCount 1; + shacl:name "modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.licence"; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:maxCount 1; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.representationtechnique"; + shacl:class skos:Concept; + shacl:description "Information about the format in which an Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)."@en; + shacl:name "representation technique"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.downloadURL"; + shacl:class rdfs:Resource; + shacl:description "A URL that is a direct link to a downloadable file in a given format."@en; + shacl:name "download URL"@en; + shacl:path dcat:downloadURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.representationtechnique"; + shacl:description "Information about the format in which an Distribution is released. This is different from the file format as, for example, a ZIP file (file format) could contain an XML schema (representation technique)."@en; + shacl:name "representation technique"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolutioninmetres"; + shacl:datatype xsd:decimal; + shacl:description "The minimum spatial separation resolvable in a dataset distribution, measured in meters."@en; + shacl:name "spatial resolution in metres"@en; + shacl:path dcat:spatialResolutionInMeters . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessURL"; + shacl:description "A URL that gives access to a Distribution of the Dataset or Dataset Series."@en; + shacl:name "access URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:accessURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.availability"; + shacl:description "An indication how long it is planned to keep the Distribution of the Dataset available."@en; + shacl:maxCount 1; + shacl:name "availability"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Distribution."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.rights"; + shacl:description "A statement that specifies rights associated with the Distribution."@en; + shacl:name "rights"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:rights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.packagingformat"; + shacl:description "The format of the file in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en; + shacl:maxCount 1; + shacl:name "packaging format"@en; + shacl:path dcat:packageFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.format"; + shacl:description "The file format of the Distribution."@en; + shacl:name "format"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:format . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.packagingformat"; + shacl:description "The format of the file in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together."@en; + shacl:name "packaging format"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:packageFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.title"; + shacl:description "A name given to the Distribution."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.language"; + shacl:description "A language used in the Distribution."@en; + shacl:name "language"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:language . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.licence"; + shacl:class dc:LicenseDocument; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:name "licence"@en; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.checksum"; + shacl:description "A mechanism that can be used to verify that the contents of a distribution have not changed."@en; + shacl:name "checksum"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessURL"; + shacl:description "A URL that gives access to a Distribution of the Dataset or Dataset Series."@en; + shacl:minCount 1; + shacl:name "access URL"@en; + shacl:path dcat:accessURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.compressionformat"; + shacl:class dc:MediaType; + shacl:description "The format of the file in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file."@en; + shacl:name "compression format"@en; + shacl:path dcat:compressFormat . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.temporalresolution"; + shacl:datatype xsd:duration; + shacl:description "The minimum time period resolvable in the dataset distribution."@en; + shacl:name "temporal resolution"@en; + shacl:path dcat:temporalResolution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.bytesize"; + shacl:datatype xsd:nonNegativeInteger; + shacl:description "The size of a Distribution in bytes."@en; + shacl:name "byte size"@en; + shacl:path dcat:byteSize . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.accessrights"; + shacl:class dc:RightsStatement; + shacl:description "Information regarding access or restrictions based on privacy, security, or other policies."@en; + shacl:name "access rights"@en; + shacl:path dc:accessRights . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.licence"; + shacl:description "A licence under which the Distribution is made available."@en; + shacl:name "licence"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:license . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.bytesize"; + shacl:description "The size of a Distribution in bytes."@en; + shacl:maxCount 1; + shacl:name "byte size"@en; + shacl:path dcat:byteSize . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.haspolicy"; + shacl:description "The policy expressing the rights associated with the Distribution if using the [[ODRL]] vocabulary."@en; + shacl:name "has policy"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Distribution.spatialresolution"; + shacl:description "Refers to the performed quality measurements."@en; + shacl:maxCount 1; + shacl:name "spatial resolution"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + ; + shacl:targetClass dcat:Relationship . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.relation"; + shacl:description "A resource related to the source resource."@en; + shacl:name "relation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.hadrole"; + shacl:description "A function of an entity or agent with respect to another entity or resource."@en; + shacl:name "had role"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:hadRole . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.relation"; + shacl:description "A resource related to the source resource."@en; + shacl:minCount 1; + shacl:name "relation"@en; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.hadrole"; + shacl:class dcat:Role; + shacl:description "A function of an entity or agent with respect to another entity or resource."@en; + shacl:name "had role"@en; + shacl:path dcat:hadRole . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.relation"; + shacl:class rdfs:Resource; + shacl:description "A resource related to the source resource."@en; + shacl:name "relation"@en; + shacl:path dc:relation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Relationship.hadrole"; + shacl:description "A function of an entity or agent with respect to another entity or resource."@en; + shacl:minCount 1; + shacl:name "had role"@en; + shacl:path dcat:hadRole . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dcat:Resource . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dcat:Role . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:Frequency . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + ; + shacl:targetClass dc:LicenseDocument . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#LicenceDocument.type"; + shacl:class skos:Concept; + shacl:description "A type of licence, e.g. indicating 'public domain' or 'royalties required'."@en; + shacl:name "type"@en; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#LicenceDocument.type"; + shacl:description "A type of licence, e.g. indicating 'public domain' or 'royalties required'."@en; + shacl:name "type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:type . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:LinguisticSystem . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dc:Location . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.bbox"; + shacl:description "The geographic bounding box of a resource."@en; + shacl:name "bbox"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:bbox . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.gazetteer"; + shacl:description "The gazetteer to which the Location belongs."@en; + shacl:maxCount 1; + shacl:name "gazetteer"@en; + shacl:path skos:inScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.geograhicidentifier"; + shacl:description "The geographic identifier for the Location, e.g., the URI or other unique identifier in the context of the relevant gazetteer."@en; + shacl:name "geograhic identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.gazetteer"; + shacl:description "The gazetteer to which the Location belongs."@en; + shacl:name "gazetteer"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path skos:inScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.geometry"; + shacl:class ; + shacl:description "The corresponding geometry for a resource."@en; + shacl:name "geometry"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.gazetteer"; + shacl:class skos:ConceptScheme; + shacl:description "The gazetteer to which the Location belongs."@en; + shacl:name "gazetteer"@en; + shacl:path skos:inScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.geograhicname"; + shacl:description "A preferred label of the Location."@en; + shacl:name "geograhic name"@en; + shacl:nodeKind shacl:Literal; + shacl:path skos:prefLabel . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.centroid"; + shacl:description "The geographic center (centroid) of a resource."@en; + shacl:name "centroid"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:centroid . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.centroid"; + shacl:description "The geographic center (centroid) of a resource."@en; + shacl:maxCount 1; + shacl:name "centroid"@en; + shacl:path dcat:centroid . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.geometry"; + shacl:description "The corresponding geometry for a resource."@en; + shacl:name "geometry"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.bbox"; + shacl:description "The geographic bounding box of a resource."@en; + shacl:maxCount 1; + shacl:name "bbox"@en; + shacl:path dcat:bbox . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Location.geometry"; + shacl:description "The corresponding geometry for a resource."@en; + shacl:maxCount 1; + shacl:name "geometry"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:MediaTypeOrExtent . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass dc:MediaType . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dc:PeriodOfTime . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.beginning"; + shacl:description "The beginning of a period or interval."@en; + shacl:maxCount 1; + shacl:name "beginning"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.beginning"; + shacl:description "The beginning of a period or interval."@en; + shacl:name "beginning"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.end"; + shacl:description "The end of a period or interval."@en; + shacl:maxCount 1; + shacl:name "end"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.startdate"; + shacl:description "The start of the period."@en; + shacl:name "start date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:startDate . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.enddate"; + shacl:description "The end of the period."@en; + shacl:maxCount 1; + shacl:name "end date"@en; + shacl:path dcat:endDate . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.end"; + shacl:class ; + shacl:description "The end of a period or interval."@en; + shacl:name "end"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.end"; + shacl:description "The end of a period or interval."@en; + shacl:name "end"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.enddate"; + shacl:description "The end of the period."@en; + shacl:name "end date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dcat:endDate . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.beginning"; + shacl:class ; + shacl:description "The beginning of a period or interval."@en; + shacl:name "beginning"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Periodoftime.startdate"; + shacl:description "The start of the period."@en; + shacl:maxCount 1; + shacl:name "start date"@en; + shacl:path dcat:startDate . + + a shacl:NodeShape; + shacl:closed false; + shacl:property ; + shacl:targetClass dc:ProvenanceStatement . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#ProvenanceStatement.provenancestatementtext"; + shacl:description "The text of the Provenance Statement"@en; + shacl:name "provenance statement text"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + a shacl:NodeShape; + shacl:closed false; + shacl:property ; + shacl:targetClass dc:RightsStatement . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Rightsstatement.rightsstatementtext"; + shacl:description "The text of the Rights Statement."@en; + shacl:name "rights statement text"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass dc:Standard . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.version"; + shacl:description "A version number or other version designation of the Standard."@en; + shacl:maxCount 1; + shacl:name "version"@en; + shacl:path owl:versionInfo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.type"; + shacl:description "The type of the Standard."@en; + shacl:maxCount 1; + shacl:name "type"@en; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.type"; + shacl:class skos:Concept; + shacl:description "The type of the Standard."@en; + shacl:name "type"@en; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.referenceregister"; + shacl:description "The reference register to which the Standard belongs."@en; + shacl:name "reference register"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path skos:ínScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Standard."@en; + shacl:name "release date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.type"; + shacl:description "The type of the Standard."@en; + shacl:name "type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.creationdate"; + shacl:description "The date on which the Standard has been first created."@en; + shacl:maxCount 1; + shacl:name "creation date"@en; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.description"; + shacl:description "A free-text account of the Standard."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.creationdate"; + shacl:description "The date on which the Standard has been first created."@en; + shacl:name "creation date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:created . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.identifier"; + shacl:description "The main identifier for the Standard, e.g. the URI or other unique identifier in the context of the Catalogue, or of a reference register (e.g., the ISO, OGC, W3C catalogues of their standards, the OGC \"EPSG coordinate reference systems\" register [[OGC-EPSG]])."@en; + shacl:name "identifier"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:identifier . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.update%2Fmodificationdate"; + shacl:description "The most recent date on which the Standard was changed or modified."@en; + shacl:name "update / modification date"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.releasedate"; + shacl:description "The date of formal issuance (e.g., publication) of the Standard."@en; + shacl:maxCount 1; + shacl:name "release date"@en; + shacl:path dc:issued . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.version"; + shacl:description "A version number or other version designation of the Standard."@en; + shacl:name "version"@en; + shacl:nodeKind shacl:Literal; + shacl:path owl:versionInfo . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.referenceregister"; + shacl:description "The reference register to which the Standard belongs."@en; + shacl:maxCount 1; + shacl:name "reference register"@en; + shacl:path skos:ínScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.title"; + shacl:description "Name given to the Standard."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.update%2Fmodificationdate"; + shacl:description "The most recent date on which the Standard was changed or modified."@en; + shacl:maxCount 1; + shacl:name "update / modification date"@en; + shacl:path dc:modified . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Standard.referenceregister"; + shacl:class skos:ConceptScheme; + shacl:description "The reference register to which the Standard belongs."@en; + shacl:name "reference register"@en; + shacl:path skos:ínScheme . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + ; + shacl:targetClass . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.expecteddatatype"; + shacl:description "Represents the expected data type for the metric's observed value (e.g., xsd:boolean, xsd:double etc...)"@en; + shacl:name "expected data type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.dimension"; + shacl:description "The dimensions a quality metric, certificate and annotation allow a measurement of."@en; + shacl:maxCount 1; + shacl:name "dimension"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.expecteddatatype"; + shacl:description "Represents the expected data type for the metric's observed value (e.g., xsd:boolean, xsd:double etc...)"@en; + shacl:maxCount 1; + shacl:name "expected data type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.dimension"; + shacl:description "The dimensions a quality metric, certificate and annotation allow a measurement of."@en; + shacl:name "dimension"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.expecteddatatype"; + shacl:class rdfs:Datatype; + shacl:description "Represents the expected data type for the metric's observed value (e.g., xsd:boolean, xsd:double etc...)"@en; + shacl:name "expected data type"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Metric.dimension"; + shacl:class ; + shacl:description "The dimensions a quality metric, certificate and annotation allow a measurement of."@en; + shacl:name "dimension"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + ; + shacl:targetClass . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.unitofmeasure"; + shacl:class skos:Concept; + shacl:description "The unit in which the data values are measured."@en; + shacl:name "unit of measure"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.ismeasurementof"; + shacl:description "The Metric being observed."@en; + shacl:maxCount 1; + shacl:name "is measurement of"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.ismeasurementof"; + shacl:description "The Metric being observed."@en; + shacl:name "is measurement of"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.value"; + shacl:description "Value computed by the relevant Metric."@en; + shacl:name "value"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.unitofmeasure"; + shacl:description "The unit in which the data values are measured."@en; + shacl:maxCount 1; + shacl:name "unit of measure"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.unitofmeasure"; + shacl:description "The unit in which the data values are measured."@en; + shacl:name "unit of measure"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.ismeasurementof"; + shacl:class ; + shacl:description "The Metric being observed."@en; + shacl:name "is measurement of"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#QualityMeasurement.value"; + shacl:description "Value computed by the relevant Metric."@en; + shacl:maxCount 1; + shacl:name "value"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass foaf:Agent . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.address"; + shacl:description "The postal address of the Agent."@en; + shacl:maxCount 1; + shacl:name "address"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.affiliation"; + shacl:description "The affiliation of the Agent."@en; + shacl:maxCount 1; + shacl:name "affiliation"@en; + shacl:path org:memberOf . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.type"; + shacl:description "The nature of the agent."@en; + shacl:maxCount 1; + shacl:name "type"@en; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.address"; + shacl:description "The postal address of the Agent."@en; + shacl:name "address"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.phone"; + shacl:description "The phone number of the Agent, specified using fully qualified tel: URI scheme [[RFC3966]]."@en; + shacl:maxCount 1; + shacl:name "phone"@en; + shacl:path foaf:phone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.type"; + shacl:class skos:Concept; + shacl:description "The nature of the agent."@en; + shacl:name "type"@en; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.type"; + shacl:description "The nature of the agent."@en; + shacl:name "type"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dc:type . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.URL"; + shacl:description "The Web site of the Agent."@en; + shacl:name "URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:workplaceHomepage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.address"; + shacl:class ; + shacl:description "The postal address of the Agent."@en; + shacl:name "address"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.URL"; + shacl:class foaf:Document; + shacl:description "The Web site of the Agent."@en; + shacl:name "URL"@en; + shacl:path foaf:workplaceHomepage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.email"; + shacl:class rdfs:Resource; + shacl:description "The email address of the Agent, specified using fully qualified mailto: URI scheme [[RFC6068]]."@en; + shacl:name "email"@en; + shacl:path foaf:mbox . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.affiliation"; + shacl:class org:Organization; + shacl:description "The affiliation of the Agent."@en; + shacl:name "affiliation"@en; + shacl:path org:memberOf . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.name"; + shacl:description "A name of the agent."@en; + shacl:minCount 1; + shacl:name "name"@en; + shacl:path foaf:name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.phone"; + shacl:description "The phone number of the Agent, specified using fully qualified tel: URI scheme [[RFC3966]]."@en; + shacl:name "phone"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:phone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.email"; + shacl:description "The email address of the Agent, specified using fully qualified mailto: URI scheme [[RFC6068]]."@en; + shacl:name "email"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path foaf:mbox . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.name"; + shacl:description "A name of the agent."@en; + shacl:name "name"@en; + shacl:nodeKind shacl:Literal; + shacl:path foaf:name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.URL"; + shacl:description "The Web site of the Agent."@en; + shacl:maxCount 1; + shacl:name "URL"@en; + shacl:path foaf:workplaceHomepage . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.phone"; + shacl:class rdfs:Resource; + shacl:description "The phone number of the Agent, specified using fully qualified tel: URI scheme [[RFC3966]]."@en; + shacl:name "phone"@en; + shacl:path foaf:phone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.affiliation"; + shacl:description "The affiliation of the Agent."@en; + shacl:name "affiliation"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path org:memberOf . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Agent.email"; + shacl:description "The email address of the Agent, specified using fully qualified mailto: URI scheme [[RFC6068]]."@en; + shacl:maxCount 1; + shacl:name "email"@en; + shacl:path foaf:mbox . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + ; + shacl:targetClass foaf:Document . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Document.description"; + shacl:description "A free-text account of the Document."@en; + shacl:name "description"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:description . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Document.title"; + shacl:description "A name of the document."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + ; + shacl:targetClass . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).administrativeunitlevel2"; + shacl:description "The name of a secondary level/region of the address, usually a county, state or other such area that typically encompasses several localities."@en; + shacl:name "administrative unit level 2"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).streetaddress"; + shacl:description "The name of a passage or way through from one location to another."@en; + shacl:name "street address"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).administrativeunitlevel2"; + shacl:description "The name of a secondary level/region of the address, usually a county, state or other such area that typically encompasses several localities."@en; + shacl:maxCount 1; + shacl:name "administrative unit level 2"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).postcode"; + shacl:description "The code created and maintained for postal purposes to identify a subdivision of addresses and postal delivery points."@en; + shacl:maxCount 1; + shacl:name "post code"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).administrativeunitlevel1"; + shacl:description "The name of the uppermost level of the address, almost always a country."@en; + shacl:name "administrative unit level 1"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).streetaddress"; + shacl:description "The name of a passage or way through from one location to another."@en; + shacl:maxCount 1; + shacl:name "street address"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).postcode"; + shacl:description "The code created and maintained for postal purposes to identify a subdivision of addresses and postal delivery points."@en; + shacl:name "post code"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).administrativeunitlevel1"; + shacl:description "The name of the uppermost level of the address, almost always a country."@en; + shacl:maxCount 1; + shacl:name "administrative unit level 1"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).postname"; + shacl:description "A name created and maintained for postal purposes to identify a subdivision of addresses and postal delivery points."@en; + shacl:name "post name"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(CoreLocation).postname"; + shacl:description "A name created and maintained for postal purposes to identify a subdivision of addresses and postal delivery points."@en; + shacl:maxCount 1; + shacl:name "post name"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass org:Organization . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + ; + shacl:targetClass prov:Activity . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.generated"; + shacl:description "The Entity generated by the Activity."@en; + shacl:name "generated"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:generated . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.generated"; + shacl:class prov:Entity; + shacl:description "The Entity generated by the Activity."@en; + shacl:name "generated"@en; + shacl:path prov:generated . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.qualifiedassociation"; + shacl:description "Association to the Plan according to which the Activity has been carried out, and possibly to the Agent who played a role in it."@en; + shacl:name "qualified association"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:qualifiedAssociation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.qualifiedassociation"; + shacl:class prov:Association; + shacl:description "Association to the Plan according to which the Activity has been carried out, and possibly to the Agent who played a role in it."@en; + shacl:name "qualified association"@en; + shacl:path prov:qualifiedAssociation . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.used"; + shacl:class prov:Entity; + shacl:description "The entity (e.g., a Catalogue, a Dataset, a Data Service) which was the subject of the Activity."@en; + shacl:name "used"@en; + shacl:path prov:used . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Activity.used"; + shacl:description "The entity (e.g., a Catalogue, a Dataset, a Data Service) which was the subject of the Activity."@en; + shacl:name "used"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:used . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass prov:Association . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + ; + shacl:targetClass prov:Attribution . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Attribution.agent"; + shacl:description "The Agent to whom the resource is attributed."@en; + shacl:name "agent"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path prov:agent . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Attribution.hadrole"; + shacl:description "A function of an entity or agent with respect to another entity or resource."@en; + shacl:name "had role"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path dcat:hadRole . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Attribution.agent"; + shacl:class foaf:Agent; + shacl:description "The Agent to whom the resource is attributed."@en; + shacl:name "agent"@en; + shacl:path prov:agent . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Attribution.hadrole"; + shacl:class dcat:Role; + shacl:description "A function of an entity or agent with respect to another entity or resource."@en; + shacl:name "had role"@en; + shacl:path dcat:hadRole . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass prov:Entity . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Datatype . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Literal . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass rdfs:Resource . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + ; + shacl:targetClass skos:ConceptScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#ConceptScheme.title"; + shacl:description "A name of the concept scheme."@en; + shacl:name "title"@en; + shacl:nodeKind shacl:Literal; + shacl:path dc:title . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#ConceptScheme.title"; + shacl:description "A name of the concept scheme."@en; + shacl:minCount 1; + shacl:name "title"@en; + shacl:path dc:title . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + ; + shacl:targetClass skos:Concept . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Concept.categoryscheme"; + shacl:description "Relates a resource (for example a concept) to a concept scheme in which it is included."@en; + shacl:name "category scheme"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path skos:inScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Concept.preferredlabel"; + shacl:description "A preferred label of the concept."@en; + shacl:name "preferred label"@en; + shacl:nodeKind shacl:Literal; + shacl:path skos:prefLabel . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Concept.categoryscheme"; + shacl:class skos:ConceptScheme; + shacl:description "Relates a resource (for example a concept) to a concept scheme in which it is included."@en; + shacl:name "category scheme"@en; + shacl:path skos:inScheme . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Concept.preferredlabel"; + shacl:description "A preferred label of the concept."@en; + shacl:minCount 1; + shacl:name "preferred label"@en; + shacl:path skos:prefLabel . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Concept.categoryscheme"; + shacl:description "Relates a resource (for example a concept) to a concept scheme in which it is included."@en; + shacl:maxCount 1; + shacl:name "category scheme"@en; + shacl:path skos:inScheme . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + ; + shacl:targetClass . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.checksumvalue"; + shacl:description "A lower case hexadecimal encoded digest value produced using a specific algorithm."@en; + shacl:name "checksum value"@en; + shacl:nodeKind shacl:Literal; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.checksumvalue"; + shacl:datatype xsd:hexBinary; + shacl:description "A lower case hexadecimal encoded digest value produced using a specific algorithm."@en; + shacl:name "checksum value"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.checksumvalue"; + shacl:description "A lower case hexadecimal encoded digest value produced using a specific algorithm."@en; + shacl:maxCount 1; + shacl:name "checksum value"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.algorithm"; + shacl:description "The algorithm used to produce the subject Checksum."@en; + shacl:name "algorithm"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.algorithm"; + shacl:description "The algorithm used to produce the subject Checksum."@en; + shacl:maxCount 1; + shacl:name "algorithm"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.algorithm"; + shacl:description "The algorithm used to produce the subject Checksum."@en; + shacl:minCount 1; + shacl:name "algorithm"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.checksumvalue"; + shacl:description "A lower case hexadecimal encoded digest value produced using a specific algorithm."@en; + shacl:minCount 1; + shacl:name "checksum value"@en; + shacl:path . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Checksum.algorithm"; + shacl:class ; + shacl:description "The algorithm used to produce the subject Checksum."@en; + shacl:name "algorithm"@en; + shacl:path . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + ; + shacl:targetClass vcard:Address . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).country"; + shacl:description "The country of an Address."@en; + shacl:maxCount 1; + shacl:name "country"@en; + shacl:path vcard:country-name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).streetaddress"; + shacl:description "The street name and civic number of an Address."@en; + shacl:name "street address"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:street-address . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).postalcode"; + shacl:description "The postal code of an Address."@en; + shacl:maxCount 1; + shacl:name "postal code"@en; + shacl:path vcard:postal-code . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).country"; + shacl:description "The country of an Address."@en; + shacl:name "country"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:country-name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).administrativearea"; + shacl:description "The administrative area of an Address."@en; + shacl:name "administrative area"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:region . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).postalcode"; + shacl:description "The postal code of an Address."@en; + shacl:name "postal code"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:postal-code . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).city"; + shacl:description "The city of an Address."@en; + shacl:maxCount 1; + shacl:name "city"@en; + shacl:path vcard:locality . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).city"; + shacl:description "The city of an Address."@en; + shacl:name "city"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:locality . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).administrativearea"; + shacl:description "The administrative area of an Address."@en; + shacl:maxCount 1; + shacl:name "administrative area"@en; + shacl:path vcard:region . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Address(vCard).streetaddress"; + shacl:description "The street name and civic number of an Address."@en; + shacl:maxCount 1; + shacl:name "street address"@en; + shacl:path vcard:street-address . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass vcard:Email . + + a shacl:NodeShape; + shacl:closed false; + shacl:property , + , + , + , + , + , + , + , + , + , + , + , + , + , + , + ; + shacl:targetClass vcard:Kind . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.address"; + shacl:description "The postal address of the Kind."@en; + shacl:maxCount 1; + shacl:name "address"@en; + shacl:path vcard:hasAddress . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.affiliation"; + shacl:description "The affiliation of the Kind."@en; + shacl:maxCount 1; + shacl:name "affiliation"@en; + shacl:path vcard:organization-name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.address"; + shacl:description "The postal address of the Kind."@en; + shacl:name "address"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasAddress . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.phone"; + shacl:description "The phone number of the Kind."@en; + shacl:maxCount 1; + shacl:name "phone"@en; + shacl:path vcard:hasTelephone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.name"; + shacl:description "A name of the Kind."@en; + shacl:maxCount 1; + shacl:name "name"@en; + shacl:path vcard:fn . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.URL"; + shacl:description "A Web site of the Kind."@en; + shacl:name "URL"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.address"; + shacl:class vcard:Address; + shacl:description "The postal address of the Kind."@en; + shacl:name "address"@en; + shacl:path vcard:hasAddress . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.URL"; + shacl:class rdfs:Resource; + shacl:description "A Web site of the Kind."@en; + shacl:name "URL"@en; + shacl:path vcard:hasURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.email"; + shacl:class vcard:Email; + shacl:description "An email address of the Kind."@en; + shacl:name "email"@en; + shacl:path vcard:hasEmail . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.phone"; + shacl:description "The phone number of the Kind."@en; + shacl:name "phone"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasTelephone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.email"; + shacl:description "An email address of the Kind."@en; + shacl:name "email"@en; + shacl:nodeKind shacl:BlankNodeOrIRI; + shacl:path vcard:hasEmail . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.name"; + shacl:description "A name of the Kind."@en; + shacl:name "name"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:fn . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.URL"; + shacl:description "A Web site of the Kind."@en; + shacl:maxCount 1; + shacl:name "URL"@en; + shacl:path vcard:hasURL . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.phone"; + shacl:class rdfs:Resource; + shacl:description "The phone number of the Kind."@en; + shacl:name "phone"@en; + shacl:path vcard:hasTelephone . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.affiliation"; + shacl:description "The affiliation of the Kind."@en; + shacl:name "affiliation"@en; + shacl:nodeKind shacl:Literal; + shacl:path vcard:organization-name . + + rdfs:seeAlso "https://semiceu.github.io/GeoDCAT-AP/releases/3.0.0#Kind.email"; + shacl:description "An email address of the Kind."@en; + shacl:maxCount 1; + shacl:name "email"@en; + shacl:path vcard:hasEmail . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass xsd:decimal . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass xsd:duration . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass xsd:hexBinary . + + a shacl:NodeShape; + shacl:closed false; + shacl:targetClass xsd:nonNegativeInteger . diff --git a/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf new file mode 100644 index 000000000000..9142cee2d61d --- /dev/null +++ b/services/src/test/resources/org/fao/geonet/api/records/formatters/shacl/iso19115-3.2018-eu-geodcat-ap-dataset-core.rdf @@ -0,0 +1,636 @@ + + + + + + + Dataset + + + + + + + urn:uuid:{uuid} + 2023-12-08T12:26:19.337626Z + 2019-04-02T12:33:24 + Plan de secteur en vigueur (version coordonnée vectorielle) + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + + + + + UTF-8 + + + + ISO 19115 + 2003/Cor 1:2006 + + + + + + + + + + Direction de la gestion des informations territoriales (SPW - Territoire, Logement, + Patrimoine, Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction de la + gestion des informations territoriales) + + + + + + + + + + + + + + + + + Complete metadata + All information about the resource + + + Plan de secteur en vigueur (version coordonnée vectorielle) + 2023-03-31 + 2023-02-21 + 1.0 + http://geodata.wallonie.be/id/7fe2f305-1302-4297-b67e-792f55acd834 + + + + DGATLPE__PDS + BE.SPW.INFRASIG.CARTON + + + Le plan de secteur est un outil réglementaire d'aménagement du territoire et d'urbanisme + régional wallon constitué de plusieurs couches de données spatiales. + + Le plan de secteur organise l'espace territorial wallon et en définit les différentes affectations afin + d'assurer le développement des activités humaines de manière harmonieuse et d'éviter la consommation abusive + d'espace. Il dispose d'une pleine valeur réglementaire et constitue ainsi la colonne vertébrale d’un + développement territorial efficace, cohérent et concerté. Cet aspect est renforcé par la réforme engendrée par + l'entrée en vigueur du Code du Développement Territorial (CoDT). + + La Région wallonne est couverte par 23 plans de secteur, adoptés entre 1977 et 1987. + + Le plan de secteur est divisé en zones destinées à l'urbanisation (zone d'habitat, de loisirs, d'activité + économique, etc.) et en zones non destinées à l'urbanisation (zones agricoles, forestières, espaces verts, + etc.). Plusieurs couches de données spatiales constituent le plan de secteur. Elles sont définies dans le + CoDT. Outre la détermination des différentes zones d'affectation du territoire wallon, il contient : + - les limites communales du PdS; + - les révisions (infrastructures en révision, périmètres de révisions partielles du PdS, mesures + d'aménagement, prescriptions supplémentaires); + - les infrastructures (réseau routier, ferroviaire, voies navigables, lignes électriques haute tension, + canalisations); + - les périmètres de protection (périmètres de liaison écologique, d'intérêt paysager, d'intérêt culture, + historique ou esthétique, les points de vue remarquable et leur périmètre, les réservations d'infrastructure + principale, les extension de zone d'extraction); + - la référence au Plan de Secteur d'origine; + - les étiquettes des secteurs d'aménagement de 1978. + + Ces différentes couches de données sont présentées sous format vectoriel (point, ligne ou polygone). + + Si le plan de secteur a valeur réglementaire, il n’est pas figé pour autant. Les modalités de révision sont + formalisées dans des procédures qui ont été simplifiées et rationalisées dans le CoDT. Cette version constitue + la version la plus récente des couches de données et intègre les mises à jour faisant suite à la mise en œuvre + du CoDT. + + A ce jour, la gestion du plan de secteur relève de la Direction de l’Aménagement régional (DAR) qui est en + charge de l'outil "plan de secteur" : évolution au regard des objectifs régionaux, notamment du développement + économique dans une perspective durable, information, sensibilisation, lien avec la planification stratégique + régionale et avec les outils communaux. Les révisions sont instruites par la DAR, à l'exception de celles qui + ont été attribuées à la cellule de développement territorial (CDT), également dénommée "ESPACE", dont la + création a été décidée par le Gouvernement wallon le 19 septembre 2005. + + + Mis à jour continue + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Helpdesk carto du SPW (SPW - Secrétariat général - SPW Digital - Département de la + Géomatique - Direction de l'Intégration des géodonnées) + + + + + + + + + Thierry Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + + Jean Berthet + + + Direction du Développement territorial (SPW - Territoire, Logement, Patrimoine, + Énergie - Département de l'Aménagement du territoire et de l'Urbanisme - Direction du Développement + territorial) + + + + + + + + + + Service public de Wallonie (SPW) + + https://geoportail.wallonie.be + + + + + Agriculture + + + + + Société et activités + + + + + Aménagement du territoire + + + + + Plans et règlements + + + espace + zones naturelles, paysages, écosystèmes + législation + géographie + agriculture + aménagement du paysage + réseau ferroviaire + planification écologique + plan d'aménagement + extraction + habitat rural + gestion et planification rurale + secteur d'activité + infrastructure + plan de gestion + planification rurale + planification économique + plan + développement du territoire + infrastructure routière + plan d'occupation des sols + activité économique + réseau routier + planification urbaine + loisirs + canalisation + habitat urbain + mesure d'aménagement du territoire + territoire + planification régionale + habitat + PanierTelechargementGeoportail + Open Data + WalOnMap + Extraction_DIG + BDInfraSIGNO + aménagement du territoire + plan de secteur + point remarquable + PDS + CoDT + Point de vue + centre d'enfouissement + servitude + Code du Développement Territorial + + + Altitude + + + + + Caractéristiques géographiques + météorologiques + + + + + Caractéristiques géographiques + océanographiques + + + + + Conditions atmosphériques + + + + + Dénominations géographiques + + + + + Géologie + + + + + Hydrographie + + + + + Installations agricoles et aquacoles + + + + + Régions maritimes + + + + + Répartition des espèces + + + + + Ressources minérales + + + + + Santé et sécurité des personnes + + + + + Mobilité + + + + + Observation de la terre et environnement + + + + + + + + + + + + + + + Conditions d'accès et d'utilisation spécifiques + + + + + + + + + + + + + + + + + + + + + + + + INSPIRE Data Specification on Transport Networks – Technical Guidelines, + version 3.2 + 2014-04-17 + + + + + La version numérique vectorielle du plan de secteur se base sur la version papier originale + digitalisée par l'Institut Wallon en juin 1994 (fond de plan au 1/10.000) qui a été complétée en mai 2001 par + ce même institut. La donnée intègre la légende actuellement en vigueur et est mise à jour en continu par la + DGO4 depuis 2001. + + L'intégration des nouveaux dossiers, la correction d'erreurs et la suppression des dossiers abrogés se font au + fur et à mesure de la réception des informations. Les données publiées sont mises à jour mensuellement sur + base des données de travail. + + Depuis leur adoption, les plans de secteur ont fait l’objet de nombreuses révisions. Le Gouvernement wallon a + en effet estimé nécessaire de les adapter pour y inscrire de nouveaux projets: routes, lignes électriques à + haute tension, tracé TGV, nouvelles zones d'activité économique, zones d’extraction, etc. + + La procédure de révision et la légende ont été modifiées à plusieurs reprises. + + Suite à l'entrée en vigueur du CoDT, des changements sont à noter : + - Trois nouvelles zones destinées à l'urbanisation : Zone de dépendance d’extraction destinée à accueillir les + dépôts et dépendances industrielles (transformation des matières) à l’activité d’extraction, la zone d'enjeu + communal (ZEC) et la zone d'enjeu régional (ZER). Les ZEC et ZER sont toutes deux accompagnées d'une carte + d'affectation des sols à valeur indicative + - une nouvelle zone non destinée à l'urbanisation : zone d'extraction (ZE). + + + + + Agriculture, pêche, sylviculture et alimentation + + + + + Économie et finances + + + + + Énergie + + + + + Environnement + + + + + Santé + + + + + Régions et villes + + + + + Population et société + + + + + Science et technologie + + + + + + + + + + + + + + 0.01 + P0Y2M0DT0H0M0S + + + + + + + + + + + + + Région wallonne + + + + + 2023-12-06 + 2023-12-08 + + + + + + + + pds_codt_pic + + + + + 2023-12-08T00:00:00 + + + 10485760 + + + ESRI Shapefile (.shp) + + + + + + + + + + Application de consultation des données de la DGO4 - Plan de secteur + Application dédiée à la consultation des couches de données relatives au Plan de + secteur. Cette application constitue un thème de l'application de consultation des données de la DGO4. + + + + + Application de consultation des données de la DGO4 - Plan de secteur + + + + + + + + Application WalOnMap - Toute la Wallonie à la carte + Application cartographique du Geoportail (WalOnMap) qui permet de découvrir les + données géographiques de la Wallonie. + + + + + Application WalOnMap - Toute la Wallonie à la carte + + + + + + + + Service de visualisation ESRI-REST + Ce service ESRI-REST permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation ESRI-REST + + + + + + + + Service de visualisation WMS + Ce service WMS permet de visualiser la série de couches de données "Plan de + secteur" + + + + + Service de visualisation WMS + + + + + + + + + + + Base de données du Plan de secteur + Site permettant la recherche de Plans de secteur et des modifications dans la base + de données + + + + + Inventaire des données géographiques de la DGO4 + Inventaire des données géographiques produites ou exploitées à la DGO4. + + + + + La Direction de l'Aménagement Régional + Site de la Direction de l'Aménagement Régional (DAR) + + + + + Plan de Secteur au format SHP + Dossier compressé contenant le jeu de données du Plan de Secteur au format + shapefile en coordonnées Lambert 72 + + + + + Légende associée au plan de secteur (sur base du service de visualisation) + + + + + \ No newline at end of file diff --git a/slave/pom.xml b/slave/pom.xml index 77e1821d4bd4..be933f192246 100644 --- a/slave/pom.xml +++ b/slave/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT GeoNetwork Slave diff --git a/translationproviders/pom.xml b/translationproviders/pom.xml index 588219ee6d2f..8064003ccc9a 100644 --- a/translationproviders/pom.xml +++ b/translationproviders/pom.xml @@ -6,7 +6,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT gn-translationproviders diff --git a/web-ui/pom.xml b/web-ui/pom.xml index e01fc18a23cb..7f07b81a5c89 100644 --- a/web-ui/pom.xml +++ b/web-ui/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT org.geonetwork-opensource diff --git a/web-ui/src/main/resources/WEB-INF/classes/web-ui-wro-sources.xml b/web-ui/src/main/resources/WEB-INF/classes/web-ui-wro-sources.xml index 3708d2bbf224..b160fe4f051c 100644 --- a/web-ui/src/main/resources/WEB-INF/classes/web-ui-wro-sources.xml +++ b/web-ui/src/main/resources/WEB-INF/classes/web-ui-wro-sources.xml @@ -83,6 +83,26 @@ + + + + + + + + + + + + + + + + + + + + - @@ -157,6 +176,26 @@ + + + + + + + + + + + + + + + + + + + + - diff --git a/web-ui/src/main/resources/catalog/components/admin/schematron/partials/criteria-viewer.html b/web-ui/src/main/resources/catalog/components/admin/schematron/partials/criteria-viewer.html index 618fe3db8b53..c063f14364db 100644 --- a/web-ui/src/main/resources/catalog/components/admin/schematron/partials/criteria-viewer.html +++ b/web-ui/src/main/resources/catalog/components/admin/schematron/partials/criteria-viewer.html @@ -54,19 +54,18 @@

+ {{'save' | translate}}
- {{'cancel' | translate}} + + {{'cancel' | translate}}
diff --git a/web-ui/src/main/resources/catalog/components/admin/uiconfig/partials/uiconfig.html b/web-ui/src/main/resources/catalog/components/admin/uiconfig/partials/uiconfig.html index e4c6199680bd..132fc2c5b367 100644 --- a/web-ui/src/main/resources/catalog/components/admin/uiconfig/partials/uiconfig.html +++ b/web-ui/src/main/resources/catalog/components/admin/uiconfig/partials/uiconfig.html @@ -3,14 +3,12 @@ id="gn-uiconfig-customize" class="col-lg-6 col-lg-offset-6 gn-nopadding-right height-70-px" > -
- - -
+ +

ui/config-help

diff --git a/web-ui/src/main/resources/catalog/components/auditable/AuditableDirective.js b/web-ui/src/main/resources/catalog/components/auditable/AuditableDirective.js new file mode 100644 index 000000000000..b80673301e82 --- /dev/null +++ b/web-ui/src/main/resources/catalog/components/auditable/AuditableDirective.js @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +(function () { + goog.provide("gn_auditable_directive"); + goog.require("gn_auditable_service"); + + var module = angular.module("gn_auditable_directive", ["gn_auditable_service"]); + + module.directive("gnAuditableHistory", [ + "gnAuditableService", + "gnConfigService", + "gnConfig", + function (gnAuditableService, gnConfigService, gnConfig) { + return { + restrict: "A", + replace: true, + scope: { + id: "=gnAuditableHistory", + type: "@" + }, + templateUrl: "../../catalog/components/auditable/partials/auditableHistory.html", + link: function (scope, element, attrs) { + scope.history = []; + + gnConfigService.load().then(function (c) { + if (gnConfig["system.auditable.enable"]) { + scope.$watch("id", function (n, o) { + if (n !== o && n !== undefined) { + scope.history = []; + + gnAuditableService + .getEntityHistory(scope.type, scope.id) + .then(function (response) { + scope.history = response.data; + }); + } + }); + } + }); + } + }; + } + ]); +})(); diff --git a/web-ui/src/main/resources/catalog/components/auditable/AuditableModule.js b/web-ui/src/main/resources/catalog/components/auditable/AuditableModule.js new file mode 100644 index 000000000000..c9c653e62778 --- /dev/null +++ b/web-ui/src/main/resources/catalog/components/auditable/AuditableModule.js @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +(function () { + goog.provide("gn_auditable"); + + goog.require("gn_auditable_directive"); + goog.require("gn_auditable_service"); + + var module = angular.module("gn_auditable", [ + "gn_auditable_directive", + "gn_auditable_service" + ]); +})(); diff --git a/web-ui/src/main/resources/catalog/components/auditable/AuditableService.js b/web-ui/src/main/resources/catalog/components/auditable/AuditableService.js new file mode 100644 index 000000000000..304ef4913ba5 --- /dev/null +++ b/web-ui/src/main/resources/catalog/components/auditable/AuditableService.js @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +(function () { + goog.provide("gn_auditable_service"); + + var module = angular.module("gn_auditable_service", []); + + /** + * Service to deal with Auditable entities. + */ + module.service("gnAuditableService", [ + "$http", + function ($http) { + function getEntityHistory(entityType, entityId) { + return $http.get("../api/auditable/" + entityType + "/" + entityId); + } + + return { + getEntityHistory: getEntityHistory + }; + } + ]); +})(); diff --git a/web-ui/src/main/resources/catalog/components/auditable/partials/auditableHistory.html b/web-ui/src/main/resources/catalog/components/auditable/partials/auditableHistory.html new file mode 100644 index 000000000000..7a2366a3761f --- /dev/null +++ b/web-ui/src/main/resources/catalog/components/auditable/partials/auditableHistory.html @@ -0,0 +1,30 @@ +
+

noUserHistory

+ +
    +
  • +

    + {{ 'userHistoryRevision' | translate:{ revisionUser: u.user, revisionDate: u.date + } }} +

    + +
      +
    • {{u.value}}

    • + +
    • +

      + {{ 'userHistoryFieldUpdate' | translate:{ fieldName: c.name, oldValue: + c.oldValue, newValue: c.newValue } }} +

      +

      + {{ 'userHistoryFieldSet' | translate:{ fieldName: c.name, newValue: c.newValue + } }} +

      +

      + {{ 'userHistoryFieldUnset' | translate:{ fieldName: c.name } }} +

      +
    • +
    +
  • +
+
diff --git a/web-ui/src/main/resources/catalog/components/common/alert/AlertDirective.js b/web-ui/src/main/resources/catalog/components/common/alert/AlertDirective.js index 449f8563f6d3..f1406c99c4fe 100644 --- a/web-ui/src/main/resources/catalog/components/common/alert/AlertDirective.js +++ b/web-ui/src/main/resources/catalog/components/common/alert/AlertDirective.js @@ -69,6 +69,12 @@ } } }; + + this.closeAlerts = function () { + if (gnAlertValue.length) { + gnAlertValue.splice(0, gnAlertValue.length); + } + }; } ]); diff --git a/web-ui/src/main/resources/catalog/components/common/map/mapService.js b/web-ui/src/main/resources/catalog/components/common/map/mapService.js index 9c6ebc035997..ab9420fb2a65 100644 --- a/web-ui/src/main/resources/catalog/components/common/map/mapService.js +++ b/web-ui/src/main/resources/catalog/components/common/map/mapService.js @@ -747,10 +747,10 @@ extent[1] + ", " + "East " + - extent[0] + + extent[2] + ", " + "West " + - extent[2]; + extent[0]; if (location) { dc += ". " + location; } @@ -1390,7 +1390,7 @@ } else { gnAlertService.addAlert({ msg: $translate.instant("layerCRSNotFound"), - delay: 5000, + delay: 5, type: "warning" }); } @@ -1400,7 +1400,7 @@ msg: $translate.instant("layerNotAvailableInMapProj", { proj: mapProjection }), - delay: 5000, + delay: 5, type: "warning" }); } @@ -1981,7 +1981,7 @@ type: "wmts", url: encodeURIComponent(url) }), - delay: 20000, + delay: 20, type: "warning" }); var o = { @@ -2079,7 +2079,7 @@ type: "wfs", url: encodeURIComponent(url) }), - delay: 20000, + delay: 20, type: "warning" }); var o = { @@ -2159,7 +2159,7 @@ } catch (e) { gnAlertService.addAlert({ msg: $translate.instant("wmtsLayerNoUsableMatrixSet"), - delay: 5000, + delay: 5, type: "danger" }); return; diff --git a/web-ui/src/main/resources/catalog/components/common/needhelp/NeedHelpDirective.js b/web-ui/src/main/resources/catalog/components/common/needhelp/NeedHelpDirective.js index 0ca87cb85c0a..b837e562b05c 100644 --- a/web-ui/src/main/resources/catalog/components/common/needhelp/NeedHelpDirective.js +++ b/web-ui/src/main/resources/catalog/components/common/needhelp/NeedHelpDirective.js @@ -140,6 +140,18 @@ return defer.promise; }; + /** + * Processes an URL removing // characters in the URL path. + * + * @param url + * @returns {string} + */ + var processUrl = function (url) { + var urlToProcess = new URL(url); + urlToProcess.pathname = urlToProcess.pathname.replace(/\/\//g, "/"); + return urlToProcess.toString(); + }; + /** * Get the URL of the corresponding help page and open it in a new tab * @returns {boolean} @@ -152,20 +164,34 @@ if (gnGlobalSettings.lang !== "en") { baseUrl = scope.helpBaseUrl.replace("{{lang}}", gnGlobalSettings.lang); } else { - baseUrl = scope.helpBaseUrl.replace("/{{lang}}", ""); + baseUrl = scope.helpBaseUrl.replace("{{lang}}", ""); } baseUrl = baseUrl.replace("{{version}}", scope.applicationVersion); - var helpPageUrl = baseUrl + "/" + page; + var helpPageUrl; + if (baseUrl.includes("{{section}}")) { + helpPageUrl = baseUrl.replace("{{section}}", page); + } else { + helpPageUrl = baseUrl + "/" + page; + } + + helpPageUrl = processUrl(helpPageUrl); testAndOpen(helpPageUrl).then( function () {}, function () { var baseUrl = scope.helpBaseUrl - .replace("/{{lang}}", "") + .replace("{{lang}}", "") .replace("{{version}}", scope.applicationVersion); - var helpPageUrl = baseUrl + "/" + page; + var helpPageUrl; + if (baseUrl.includes("{{section}}")) { + helpPageUrl = baseUrl.replace("{{section}}", page); + } else { + helpPageUrl = baseUrl + "/" + page; + } + + helpPageUrl = processUrl(helpPageUrl); testAndOpen(helpPageUrl); } diff --git a/web-ui/src/main/resources/catalog/components/common/share/ShareService.js b/web-ui/src/main/resources/catalog/components/common/share/ShareService.js index 4ecd86461ffa..0b7fed29f806 100644 --- a/web-ui/src/main/resources/catalog/components/common/share/ShareService.js +++ b/web-ui/src/main/resources/catalog/components/common/share/ShareService.js @@ -245,9 +245,10 @@ // Do not submit internal groups info // If user is not allowed. var allowed = - ($.inArray(g.group, gnShareConstants.internalGroups) !== -1 && + !g.restricted && + (($.inArray(g.group, gnShareConstants.internalGroups) !== -1 && user.isReviewerOrMore()) || - $.inArray(g.group, gnShareConstants.internalGroups) === -1; + $.inArray(g.group, gnShareConstants.internalGroups) === -1); if (allowed) { ops.push({ diff --git a/web-ui/src/main/resources/catalog/components/common/share/partials/panel.html b/web-ui/src/main/resources/catalog/components/common/share/partials/panel.html index 2af0e27141f2..c595acf81f09 100644 --- a/web-ui/src/main/resources/catalog/components/common/share/partials/panel.html +++ b/web-ui/src/main/resources/catalog/components/common/share/partials/panel.html @@ -116,6 +116,7 @@
whoCanAccess
data-ng-repeat="g in privileges | filter:{reserved: false} | filter:pFilterFn | orderBy:sortGroups:sorter.reverse" data-ng-show="(onlyUserGroup == true && g.userGroup == true) || onlyUserGroup == false" + data-ng-class="g.restricted ? 'warning' : ''" > {{::('group-' + g.group) | translate}} @@ -123,7 +124,7 @@
whoCanAccess
type="checkbox" name="{{g.group + '-' + key}}" data-ng-click="selectPrivilege()" - data-ng-disabled="g.privileges[key].disabled" + data-ng-disabled="g.privileges[key].disabled || g.restricted" data-ng-model="g.operations[key]" /> whoCanAccess type="checkbox" data-ng-change="checkAll(g)" data-ng-model="g.isCheckedAll" + data-ng-disabled="g.restricted" /> diff --git a/web-ui/src/main/resources/catalog/components/doi/DoiDirective.js b/web-ui/src/main/resources/catalog/components/doi/DoiDirective.js index 14a49dad0481..364218dd450e 100644 --- a/web-ui/src/main/resources/catalog/components/doi/DoiDirective.js +++ b/web-ui/src/main/resources/catalog/components/doi/DoiDirective.js @@ -44,10 +44,24 @@ scope.response = {}; scope.isUpdate = angular.isDefined(scope.doiUrl); + scope.doiServers = []; + scope.selectedDoiServer = null; + + gnDoiService.getDoiServersForMetadata(scope.uuid).then(function (response) { + scope.doiServers = response.data; + if (scope.doiServers.length > 0) { + scope.selectedDoiServer = scope.doiServers[0].id; + } + }); + + scope.updateDoiServer = function () { + scope.response = {}; + }; + scope.check = function () { scope.response = {}; scope.response["check"] = null; - return gnDoiService.check(scope.uuid).then( + return gnDoiService.check(scope.uuid, scope.selectedDoiServer).then( function (r) { scope.response["check"] = r; scope.isUpdate = angular.isDefined(scope.doiUrl); @@ -60,7 +74,7 @@ }; scope.create = function () { - return gnDoiService.create(scope.uuid).then( + return gnDoiService.create(scope.uuid, scope.selectedDoiServer).then( function (r) { scope.response["create"] = r; delete scope.response["check"]; diff --git a/web-ui/src/main/resources/catalog/components/doi/DoiService.js b/web-ui/src/main/resources/catalog/components/doi/DoiService.js index e21427f8c31d..cee4d27b3ff8 100644 --- a/web-ui/src/main/resources/catalog/components/doi/DoiService.js +++ b/web-ui/src/main/resources/catalog/components/doi/DoiService.js @@ -33,11 +33,39 @@ "$http", "gnConfig", function ($http, gnConfig) { - function check(id) { - return $http.get("../api/records/" + id + "/doi/checkPreConditions"); + /** + * Returns a promise to validate a metadata to be published on a DOI server. + * + * @param id + * @param doiServerId + * @returns {*} + */ + function check(id, doiServerId) { + return $http.get( + "../api/records/" + id + "/doi/" + doiServerId + "/checkPreConditions" + ); } - function create(id) { - return $http.put("../api/records/" + id + "/doi"); + + /** + * Returns a promise to publish a metadata on a DOI server. + * + * @param id + * @param doiServerId + * @returns {*} + */ + function create(id, doiServerId) { + return $http.put("../api/records/" + id + "/doi/" + doiServerId); + } + + /** + * Returns a promise to retrieve the list of DOI servers + * where a metadata can be published. + * + * @param metadataId + * @returns {*} + */ + function getDoiServersForMetadata(metadataId) { + return $http.get("../api/doiservers/metadata/" + metadataId); } function isDoiApplicableForMetadata(md) { @@ -73,7 +101,8 @@ check: check, create: create, isDoiApplicableForMetadata: isDoiApplicableForMetadata, - canPublishDoiForResource: canPublishDoiForResource + canPublishDoiForResource: canPublishDoiForResource, + getDoiServersForMetadata: getDoiServersForMetadata }; } ]); diff --git a/web-ui/src/main/resources/catalog/components/doi/partials/doiwidget.html b/web-ui/src/main/resources/catalog/components/doi/partials/doiwidget.html index c1a6a8dc3bc3..2edf7718127c 100644 --- a/web-ui/src/main/resources/catalog/components/doi/partials/doiwidget.html +++ b/web-ui/src/main/resources/catalog/components/doi/partials/doiwidget.html @@ -5,6 +5,16 @@

createDoiForRecord

+ +
diff --git a/web-ui/src/main/resources/catalog/components/edit/validationreport/partials/validationreport.html b/web-ui/src/main/resources/catalog/components/edit/validationreport/partials/validationreport.html index cd08f1454940..5b144314137e 100644 --- a/web-ui/src/main/resources/catalog/components/edit/validationreport/partials/validationreport.html +++ b/web-ui/src/main/resources/catalog/components/edit/validationreport/partials/validationreport.html @@ -23,26 +23,30 @@ @@ -54,7 +58,67 @@ data-ng-repeat="type in ruleTypes" data-ng-class="'schematron-result-list-' + labelImportanceClass(type)" > + +
+
+

+ +  {{(type.label || type.id) | translate}} +

+
+ +
+ + + + + + + + + + + + +
+
+ +
@@ -70,7 +134,7 @@

data-ng-if="!type.schematronVerificationError" > @@ -83,11 +147,30 @@

+ + + + {{type.success}} / {{type.total}} + + +

-
    +
    • @@ -22,6 +23,7 @@ class="input-sm form-control" data-ng-model="range.to" data-ng-model-options="{debounce: 500}" + autocomplete="off" name="end" /> diff --git a/web-ui/src/main/resources/catalog/components/elasticsearch/directives/partials/facet.html b/web-ui/src/main/resources/catalog/components/elasticsearch/directives/partials/facet.html index fc81d1c48781..51963f2f8476 100644 --- a/web-ui/src/main/resources/catalog/components/elasticsearch/directives/partials/facet.html +++ b/web-ui/src/main/resources/catalog/components/elasticsearch/directives/partials/facet.html @@ -33,7 +33,7 @@ > {{::ctrl.item.value | facetTranslator: (ctrl.facet.meta && ctrl.facet.meta.field) - || ctrl.facet.key | capitalize}} + || ctrl.facet.key}} {{('facet-' + facet.key) | facetKeyTranslator}}{{getFacetLabel(facet, 'facet-' + facet.key)}}
      diff --git a/web-ui/src/main/resources/catalog/components/history/partials/recordHistory.html b/web-ui/src/main/resources/catalog/components/history/partials/recordHistory.html index 7bd26fc6311c..09bcd634bc72 100644 --- a/web-ui/src/main/resources/catalog/components/history/partials/recordHistory.html +++ b/web-ui/src/main/resources/catalog/components/history/partials/recordHistory.html @@ -4,7 +4,7 @@ recordHistory
-
+
- +
+
diff --git a/web-ui/src/main/resources/catalog/components/search/resultsview/SelectionDirective.js b/web-ui/src/main/resources/catalog/components/search/resultsview/SelectionDirective.js index 500ece65fd07..ae8fedf50bbd 100644 --- a/web-ui/src/main/resources/catalog/components/search/resultsview/SelectionDirective.js +++ b/web-ui/src/main/resources/catalog/components/search/resultsview/SelectionDirective.js @@ -99,7 +99,7 @@ function (r) { gnAlertService.addAlert({ msg: r.data.message || r.data.description, - delay: 20000, + delay: 20, type: "danger" }); if (r.id) { diff --git a/web-ui/src/main/resources/catalog/components/search/searchfiltertag/SearchFilterTagsDirective.js b/web-ui/src/main/resources/catalog/components/search/searchfiltertag/SearchFilterTagsDirective.js index 24fcd9e925fb..1d47a145a824 100644 --- a/web-ui/src/main/resources/catalog/components/search/searchfiltertag/SearchFilterTagsDirective.js +++ b/web-ui/src/main/resources/catalog/components/search/searchfiltertag/SearchFilterTagsDirective.js @@ -45,7 +45,8 @@ module.directive("searchFilterTags", [ "$location", - function ($location) { + "gnFacetMetaLabel", + function ($location, gnFacetMetaLabel) { return { restrict: "EA", require: "^ngSearchForm", @@ -61,6 +62,12 @@ // key is the raw facet path, value is a valid filter object scope.facetFilterCache = {}; + scope.getFacetLabel = gnFacetMetaLabel.getFacetLabel; + scope.dimensionList = {}; + for (var i = 0; i < scope.dimensions.length; i++) { + var dimension = scope.dimensions[i]; + scope.dimensionList[dimension.key] = dimension; + } function getSearchParams() { if (scope.useLocationParameters) { diff --git a/web-ui/src/main/resources/catalog/components/search/searchfiltertag/partials/searchFilterTagsTemplate.html b/web-ui/src/main/resources/catalog/components/search/searchfiltertag/partials/searchFilterTagsTemplate.html index cbb48b7df867..d1699c50567a 100644 --- a/web-ui/src/main/resources/catalog/components/search/searchfiltertag/partials/searchFilterTagsTemplate.html +++ b/web-ui/src/main/resources/catalog/components/search/searchfiltertag/partials/searchFilterTagsTemplate.html @@ -21,7 +21,9 @@ ng-click="removeFilter(filter)" > - {{('facet-' + filter.key) | facetKeyTranslator}} + {{getFacetLabel(dimensionList[filter.key], 'facet-' + filter.key)}} {{filter.key}}
diff --git a/web-ui/src/main/resources/catalog/components/search/searchmanager/LocationService.js b/web-ui/src/main/resources/catalog/components/search/searchmanager/LocationService.js index c04e9b9ff4b5..7d1a9c252e4b 100644 --- a/web-ui/src/main/resources/catalog/components/search/searchmanager/LocationService.js +++ b/web-ui/src/main/resources/catalog/components/search/searchmanager/LocationService.js @@ -77,12 +77,14 @@ return p.indexOf(this.METADATA) == 0 || p.indexOf(this.DRAFT) == 0; }; - this.isMap = function () { - return $location.path() == this.MAP; + this.isMap = function (path) { + var p = path || $location.path(); + return p == this.MAP; }; - this.isHome = function () { - return $location.path() == this.HOME; + this.isHome = function (path) { + var p = path || $location.path(); + return p == this.HOME; }; this.isUndefined = function () { diff --git a/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusDirective.js b/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusDirective.js index 203b3b546edd..3b371027283d 100644 --- a/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusDirective.js +++ b/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusDirective.js @@ -256,7 +256,8 @@ // on keyword. maxTags: "@", thesaurusTitle: "@", - browsable: "@" + browsable: "@", + required: "@" }, templateUrl: "../../catalog/components/thesaurus/" + "partials/keywordselector.html", @@ -661,7 +662,7 @@ scope.orderById = attrs.orderById || "false"; scope.max = gnThesaurusService.DEFAULT_NUMBER_OF_RESULTS; scope.fauxMultilingual = scope.fauxMultilingual === "true"; //default false - scope.showHintsOnFocus = attrs.showHintsOnFocus === "true"; // displays all the values on focus, default shows only the selected value + scope.showHintsOnFocus = attrs.showHintsOnFocus === "true"; // displays all the values on focus and adds a dropdown caret icon, default shows only the selected value // Configuration only required when using the directive in template fields. // diff --git a/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusService.js b/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusService.js index 28030bcfaddb..d444e9a1c3f9 100644 --- a/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusService.js +++ b/web-ui/src/main/resources/catalog/components/thesaurus/ThesaurusService.js @@ -44,7 +44,7 @@ var foundLang = _.find(UILangs, function (l) { return props.values[l] !== undefined; }); - if (foundLang) return props.values[foundLang]; + if (foundLang) return props.values[foundLang] || props.values["eng"]; } return this.props.value["#text"] || this.props.value; } @@ -152,6 +152,11 @@ if (outputLang) { parameters["pLang"] = outputLang; } + if (lang !== "eng") { + // Fallback in english if thesaurus has no translation in current record language + parameters["pLang"] = ["eng", lang]; + } + return gnUrlUtils.append( "../api/registries/vocabularies/search", gnUrlUtils.toKeyValue(parameters) diff --git a/web-ui/src/main/resources/catalog/components/thesaurus/partials/keywordselector.html b/web-ui/src/main/resources/catalog/components/thesaurus/partials/keywordselector.html index 4144fa182c04..ab309a23859c 100644 --- a/web-ui/src/main/resources/catalog/components/thesaurus/partials/keywordselector.html +++ b/web-ui/src/main/resources/catalog/components/thesaurus/partials/keywordselector.html @@ -17,7 +17,7 @@
-
+
-
- - {{table.name}}  {{table.loader.getCount()}} - features - -
+ +

+ {{table.name}} +

+
- - - - - - - - - - - - - - -
diff --git a/web-ui/src/main/resources/catalog/components/viewer/owscontext/OwsContextDirective.js b/web-ui/src/main/resources/catalog/components/viewer/owscontext/OwsContextDirective.js index c02aa5d8cac4..edb386ea705f 100644 --- a/web-ui/src/main/resources/catalog/components/viewer/owscontext/OwsContextDirective.js +++ b/web-ui/src/main/resources/catalog/components/viewer/owscontext/OwsContextDirective.js @@ -45,7 +45,7 @@ filters: [ { query_string: { - query: '+resourceType:"map/interactive"' + query: '+resourceType:"map-interactive"' } } ], @@ -140,27 +140,55 @@ scope.mapFileName = getMapFileName(); scope.map.once("postrender", function (event) { - domtoimage.toPng(scope.map.getTargetElement()).then(function (data) { - // resize if necessary - var finalData = data; - - if (scaleFactor !== undefined) { - var img = new Image(); - img.src = data; - img.onload = function () { - var canvas = document.createElement("canvas"); - var size = scope.map.getSize(); - canvas.width = size[0]; - canvas.height = size[1]; - canvas - .getContext("2d") - .drawImage(img, 0, 0, canvas.width, canvas.height); - finalData = canvas.toDataURL("image/png"); - }; + var mapCanvas = document.createElement("canvas"); + var size = scope.map.getSize(); + mapCanvas.width = size[0]; + mapCanvas.height = size[1]; + var mapContext = mapCanvas.getContext("2d"); + Array.prototype.forEach.call( + scope.map + .getViewport() + .querySelectorAll(".ol-layer canvas, canvas.ol-layer"), + function (canvas) { + if (canvas.width > 0) { + var opacity = + canvas.parentNode.style.opacity || canvas.style.opacity; + mapContext.globalAlpha = opacity === "" ? 1 : Number(opacity); + var matrix; + var transform = canvas.style.transform; + if (transform) { + // Get the transform parameters from the style's transform matrix + matrix = transform + .match(/^matrix\(([^\(]*)\)$/)[1] + .split(",") + .map(Number); + } else { + matrix = [ + parseFloat(canvas.style.width) / canvas.width, + 0, + 0, + parseFloat(canvas.style.height) / canvas.height, + 0, + 0 + ]; + } + // Apply the transform to the export map context + CanvasRenderingContext2D.prototype.setTransform.apply( + mapContext, + matrix + ); + var backgroundColor = canvas.parentNode.style.backgroundColor; + if (backgroundColor) { + mapContext.fillStyle = backgroundColor; + mapContext.fillRect(0, 0, canvas.width, canvas.height); + } + mapContext.drawImage(canvas, 0, 0); + } } - - defer.resolve(finalData); - }); + ); + mapContext.globalAlpha = 1; + mapContext.setTransform(1, 0, 0, 1, 0, 0); + defer.resolve(mapCanvas.toDataURL()); }); scope.map.renderSync(); } else { @@ -208,7 +236,8 @@ title: "", recordAbstract: "", group: null, - publishToAll: false + publishToAll: false, + schema: "iso19115-3.2018" }; scope.mapProps = angular.extend({}, defaultMapProps); diff --git a/web-ui/src/main/resources/catalog/components/viewer/searchlayerformap/SearchLayerForMapDirective.js b/web-ui/src/main/resources/catalog/components/viewer/searchlayerformap/SearchLayerForMapDirective.js index 7deaaa7c25b1..ed4f17c7ea5c 100644 --- a/web-ui/src/main/resources/catalog/components/viewer/searchlayerformap/SearchLayerForMapDirective.js +++ b/web-ui/src/main/resources/catalog/components/viewer/searchlayerformap/SearchLayerForMapDirective.js @@ -75,7 +75,7 @@ } }; if ($scope.mode === "map") { - $scope.searchObj.params.type = "map/interactive"; + $scope.searchObj.params.type = "map-interactive"; } else { $scope.searchObj.params.linkProtocol = "OGC:WMS*"; } diff --git a/web-ui/src/main/resources/catalog/components/viewer/wfsfilter/partials/wfsfilterfacet.html b/web-ui/src/main/resources/catalog/components/viewer/wfsfilter/partials/wfsfilterfacet.html index 8c75135b90da..e10fd98de86d 100644 --- a/web-ui/src/main/resources/catalog/components/viewer/wfsfilter/partials/wfsfilterfacet.html +++ b/web-ui/src/main/resources/catalog/components/viewer/wfsfilter/partials/wfsfilterfacet.html @@ -17,21 +17,15 @@
-
- -
+

+ {{count | number}} / {{countTotal | number}} features - {{count | number}} / {{countTotal | number}} features -

- + +
- - - +
+
+ +
+ + + +
+
+
@@ -185,7 +182,7 @@ data-ng-class="{'text-primary': isFilterActive(field.name, field)}" > ") + .attr({ + type: "hidden", + id: name, + name: name, + value: value + }) + .appendTo($(gnCurrentEdit.formId)); + } + $scope.switchToTab( + gnCurrentEdit.tab, + $("#flat")[0].value === "true" ? "flat" : "" + ); + }; + /** * Update the form according to the target tab * properties and save. diff --git a/web-ui/src/main/resources/catalog/js/edit/NewMetadataController.js b/web-ui/src/main/resources/catalog/js/edit/NewMetadataController.js index 8e74109e32a6..724bdded2137 100644 --- a/web-ui/src/main/resources/catalog/js/edit/NewMetadataController.js +++ b/web-ui/src/main/resources/catalog/js/edit/NewMetadataController.js @@ -79,7 +79,7 @@ featureCatalog: "gn-icon-featureCatalog", service: "gn-icon-service", map: "gn-icon-maps", - staticMap: "gn-icon-staticMap", + "map-static": "gn-icon-map-static", dataset: "gn-icon-dataset", series: "gn-icon-series" }; @@ -117,7 +117,7 @@ resourceType: { terms: { field: "resourceType", - exclude: ["map/static", "theme", "place"], + exclude: ["map-static", "theme", "place"], missing: "other" } } diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.az.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.az.min.js new file mode 100644 index 000000000000..aa1edbf4f80f --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.az.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.az={days:["Bazar","Bazar ertəsi","Çərşənbə axşamı","Çərşənbə","Cümə axşamı","Cümə","Şənbə"],daysShort:["B.","B.e","Ç.a","Ç.","C.a","C.","Ş."],daysMin:["B.","B.e","Ç.a","Ç.","C.a","C.","Ş."],months:["Yanvar","Fevral","Mart","Aprel","May","İyun","İyul","Avqust","Sentyabr","Oktyabr","Noyabr","Dekabr"],monthsShort:["Yan","Fev","Mar","Apr","May","İyun","İyul","Avq","Sen","Okt","Noy","Dek"],today:"Bu gün",weekStart:1,clear:"Təmizlə",monthsTitle:"Aylar"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ca.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ca.min.js new file mode 100644 index 000000000000..d21351866dc2 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ca.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.ca={days:["diumenge","dilluns","dimarts","dimecres","dijous","divendres","dissabte"],daysShort:["dg.","dl.","dt.","dc.","dj.","dv.","ds."],daysMin:["dg","dl","dt","dc","dj","dv","ds"],months:["gener","febrer","març","abril","maig","juny","juliol","agost","setembre","octubre","novembre","desembre"],monthsShort:["gen.","febr.","març","abr.","maig","juny","jul.","ag.","set.","oct.","nov.","des."],today:"Avui",monthsTitle:"Mesos",clear:"Esborra",weekStart:1,format:"dd/mm/yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cs.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cs.min.js new file mode 100644 index 000000000000..42dfd1a29d83 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cs.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.cs={days:["Neděle","Pondělí","Úterý","Středa","Čtvrtek","Pátek","Sobota"],daysShort:["Ned","Pon","Úte","Stř","Čtv","Pát","Sob"],daysMin:["Ne","Po","Út","St","Čt","Pá","So"],months:["Leden","Únor","Březen","Duben","Květen","Červen","Červenec","Srpen","Září","Říjen","Listopad","Prosinec"],monthsShort:["Led","Úno","Bře","Dub","Kvě","Čer","Čnc","Srp","Zář","Říj","Lis","Pro"],today:"Dnes",clear:"Vymazat",monthsTitle:"Měsíc",weekStart:1,format:"dd.mm.yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cy.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cy.min.js new file mode 100644 index 000000000000..f85ea031dd01 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.cy.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.cy={days:["Sul","Llun","Mawrth","Mercher","Iau","Gwener","Sadwrn"],daysShort:["Sul","Llu","Maw","Mer","Iau","Gwe","Sad"],daysMin:["Su","Ll","Ma","Me","Ia","Gwe","Sa"],months:["Ionawr","Chewfror","Mawrth","Ebrill","Mai","Mehefin","Gorfennaf","Awst","Medi","Hydref","Tachwedd","Rhagfyr"],monthsShort:["Ion","Chw","Maw","Ebr","Mai","Meh","Gor","Aws","Med","Hyd","Tach","Rha"],today:"Heddiw"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.da.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.da.min.js new file mode 100644 index 000000000000..53c81805282d --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.da.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.da={days:["Søndag","Mandag","Tirsdag","Onsdag","Torsdag","Fredag","Lørdag"],daysShort:["Søn","Man","Tir","Ons","Tor","Fre","Lør"],daysMin:["Sø","Ma","Ti","On","To","Fr","Lø"],months:["Januar","Februar","Marts","April","Maj","Juni","Juli","August","September","Oktober","November","December"],monthsShort:["Jan","Feb","Mar","Apr","Maj","Jun","Jul","Aug","Sep","Okt","Nov","Dec"],today:"I Dag",weekStart:1,clear:"Nulstil",format:"dd/mm/yyyy",monthsTitle:"Måneder"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.de.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.de.min.js new file mode 100644 index 000000000000..c76f75d37f48 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.de.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.de={days:["Sonntag","Montag","Dienstag","Mittwoch","Donnerstag","Freitag","Samstag"],daysShort:["So","Mo","Di","Mi","Do","Fr","Sa"],daysMin:["So","Mo","Di","Mi","Do","Fr","Sa"],months:["Januar","Februar","März","April","Mai","Juni","Juli","August","September","Oktober","November","Dezember"],monthsShort:["Jan","Feb","Mär","Apr","Mai","Jun","Jul","Aug","Sep","Okt","Nov","Dez"],today:"Heute",monthsTitle:"Monate",clear:"Löschen",weekStart:1,format:"dd.mm.yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.es.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.es.min.js new file mode 100644 index 000000000000..f3cef5d2b931 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.es.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.es={days:["Domingo","Lunes","Martes","Miércoles","Jueves","Viernes","Sábado"],daysShort:["Dom","Lun","Mar","Mié","Jue","Vie","Sáb"],daysMin:["Do","Lu","Ma","Mi","Ju","Vi","Sa"],months:["Enero","Febrero","Marzo","Abril","Mayo","Junio","Julio","Agosto","Septiembre","Octubre","Noviembre","Diciembre"],monthsShort:["Ene","Feb","Mar","Abr","May","Jun","Jul","Ago","Sep","Oct","Nov","Dic"],today:"Hoy",monthsTitle:"Meses",clear:"Borrar",weekStart:1,format:"dd/mm/yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.fi.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.fi.min.js new file mode 100644 index 000000000000..33af3d3ebc6f --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.fi.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.fi={days:["sunnuntai","maanantai","tiistai","keskiviikko","torstai","perjantai","lauantai"],daysShort:["sun","maa","tii","kes","tor","per","lau"],daysMin:["su","ma","ti","ke","to","pe","la"],months:["tammikuu","helmikuu","maaliskuu","huhtikuu","toukokuu","kesäkuu","heinäkuu","elokuu","syyskuu","lokakuu","marraskuu","joulukuu"],monthsShort:["tammi","helmi","maalis","huhti","touko","kesä","heinä","elo","syys","loka","marras","joulu"],today:"tänään",clear:"Tyhjennä",weekStart:1,format:"d.m.yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.hy.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.hy.min.js new file mode 100644 index 000000000000..a1cf653d3804 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.hy.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.hy={days:["Կիրակի","Երկուշաբթի","Երեքշաբթի","Չորեքշաբթի","Հինգշաբթի","Ուրբաթ","Շաբաթ"],daysShort:["Կիր","Երկ","Երե","Չոր","Հին","Ուրբ","Շաբ"],daysMin:["Կի","Եկ","Եք","Չո","Հի","Ու","Շա"],months:["Հունվար","Փետրվար","Մարտ","Ապրիլ","Մայիս","Հունիս","Հուլիս","Օգոստոս","Սեպտեմբեր","Հոկտեմբեր","Նոյեմբեր","Դեկտեմբեր"],monthsShort:["Հնվ","Փետ","Մար","Ապր","Մայ","Հուն","Հուլ","Օգս","Սեպ","Հոկ","Նոյ","Դեկ"],today:"Այսօր",clear:"Ջնջել",format:"dd.mm.yyyy",weekStart:1,monthsTitle:"Ամիսնէր"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.is.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.is.min.js new file mode 100644 index 000000000000..f49bd18cc23f --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.is.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.is={days:["Sunnudagur","Mánudagur","Þriðjudagur","Miðvikudagur","Fimmtudagur","Föstudagur","Laugardagur"],daysShort:["Sun","Mán","Þri","Mið","Fim","Fös","Lau"],daysMin:["Su","Má","Þr","Mi","Fi","Fö","La"],months:["Janúar","Febrúar","Mars","Apríl","Maí","Júní","Júlí","Ágúst","September","Október","Nóvember","Desember"],monthsShort:["Jan","Feb","Mar","Apr","Maí","Jún","Júl","Ágú","Sep","Okt","Nóv","Des"],today:"Í Dag"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.it.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.it.min.js new file mode 100644 index 000000000000..cc30766ffa05 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.it.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.it={days:["Domenica","Lunedì","Martedì","Mercoledì","Giovedì","Venerdì","Sabato"],daysShort:["Dom","Lun","Mar","Mer","Gio","Ven","Sab"],daysMin:["Do","Lu","Ma","Me","Gi","Ve","Sa"],months:["Gennaio","Febbraio","Marzo","Aprile","Maggio","Giugno","Luglio","Agosto","Settembre","Ottobre","Novembre","Dicembre"],monthsShort:["Gen","Feb","Mar","Apr","Mag","Giu","Lug","Ago","Set","Ott","Nov","Dic"],today:"Oggi",monthsTitle:"Mesi",clear:"Cancella",weekStart:1,format:"dd/mm/yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ka.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ka.min.js new file mode 100644 index 000000000000..84f14c0e90e1 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ka.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.ka={days:["კვირა","ორშაბათი","სამშაბათი","ოთხშაბათი","ხუთშაბათი","პარასკევი","შაბათი"],daysShort:["კვი","ორშ","სამ","ოთხ","ხუთ","პარ","შაბ"],daysMin:["კვ","ორ","სა","ოთ","ხუ","პა","შა"],months:["იანვარი","თებერვალი","მარტი","აპრილი","მაისი","ივნისი","ივლისი","აგვისტო","სექტემბერი","ოქტომბერი","ნოემბერი","დეკემბერი"],monthsShort:["იან","თებ","მარ","აპრ","მაი","ივნ","ივლ","აგვ","სექ","ოქტ","ნოე","დეკ"],today:"დღეს",clear:"გასუფთავება",weekStart:1,format:"dd.mm.yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ko.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ko.min.js new file mode 100644 index 000000000000..9751ee5c228e --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ko.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.ko={days:["일요일","월요일","화요일","수요일","목요일","금요일","토요일"],daysShort:["일","월","화","수","목","금","토"],daysMin:["일","월","화","수","목","금","토"],months:["1월","2월","3월","4월","5월","6월","7월","8월","9월","10월","11월","12월"],monthsShort:["1월","2월","3월","4월","5월","6월","7월","8월","9월","10월","11월","12월"],today:"오늘",clear:"삭제",format:"yyyy-mm-dd",titleFormat:"yyyy년mm월",weekStart:0}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.pt.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.pt.min.js new file mode 100644 index 000000000000..e2b4e64d7744 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.pt.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.pt={days:["Domingo","Segunda","Terça","Quarta","Quinta","Sexta","Sábado"],daysShort:["Dom","Seg","Ter","Qua","Qui","Sex","Sáb"],daysMin:["Do","Se","Te","Qu","Qu","Se","Sa"],months:["Janeiro","Fevereiro","Março","Abril","Maio","Junho","Julho","Agosto","Setembro","Outubro","Novembro","Dezembro"],monthsShort:["Jan","Fev","Mar","Abr","Mai","Jun","Jul","Ago","Set","Out","Nov","Dez"],today:"Hoje",monthsTitle:"Meses",clear:"Limpar",format:"dd/mm/yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ro.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ro.min.js new file mode 100644 index 000000000000..5fff2986df12 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ro.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.ro={days:["Duminică","Luni","Marţi","Miercuri","Joi","Vineri","Sâmbătă"],daysShort:["Dum","Lun","Mar","Mie","Joi","Vin","Sâm"],daysMin:["Du","Lu","Ma","Mi","Jo","Vi","Sâ"],months:["Ianuarie","Februarie","Martie","Aprilie","Mai","Iunie","Iulie","August","Septembrie","Octombrie","Noiembrie","Decembrie"],monthsShort:["Ian","Feb","Mar","Apr","Mai","Iun","Iul","Aug","Sep","Oct","Nov","Dec"],today:"Astăzi",clear:"Șterge",weekStart:1,format:"dd/mm/yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ru.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ru.min.js new file mode 100644 index 000000000000..52bc010b97cf --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.ru.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.ru={days:["Воскресенье","Понедельник","Вторник","Среда","Четверг","Пятница","Суббота"],daysShort:["Вск","Пнд","Втр","Срд","Чтв","Птн","Суб"],daysMin:["Вс","Пн","Вт","Ср","Чт","Пт","Сб"],months:["Январь","Февраль","Март","Апрель","Май","Июнь","Июль","Август","Сентябрь","Октябрь","Ноябрь","Декабрь"],monthsShort:["Янв","Фев","Мар","Апр","Май","Июн","Июл","Авг","Сен","Окт","Ноя","Дек"],today:"Сегодня",clear:"Очистить",format:"dd.mm.yyyy",weekStart:1,monthsTitle:"Месяцы"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sk.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sk.min.js new file mode 100644 index 000000000000..79a9267fd52b --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sk.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.sk={days:["Nedeľa","Pondelok","Utorok","Streda","Štvrtok","Piatok","Sobota"],daysShort:["Ned","Pon","Uto","Str","Štv","Pia","Sob"],daysMin:["Ne","Po","Ut","St","Št","Pia","So"],months:["Január","Február","Marec","Apríl","Máj","Jún","Júl","August","September","Október","November","December"],monthsShort:["Jan","Feb","Mar","Apr","Máj","Jún","Júl","Aug","Sep","Okt","Nov","Dec"],today:"Dnes",clear:"Vymazať",weekStart:1,format:"d.m.yyyy"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sv.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sv.min.js new file mode 100644 index 000000000000..7ab6becb9251 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.sv.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.sv={days:["söndag","måndag","tisdag","onsdag","torsdag","fredag","lördag"],daysShort:["sön","mån","tis","ons","tor","fre","lör"],daysMin:["sö","må","ti","on","to","fr","lö"],months:["januari","februari","mars","april","maj","juni","juli","augusti","september","oktober","november","december"],monthsShort:["jan","feb","mar","apr","maj","jun","jul","aug","sep","okt","nov","dec"],today:"Idag",format:"yyyy-mm-dd",weekStart:1,clear:"Rensa"}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.uk.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.uk.min.js new file mode 100644 index 000000000000..a555be8008ab --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.uk.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates.uk={days:["Неділя","Понеділок","Вівторок","Середа","Четвер","П'ятниця","Субота"],daysShort:["Нед","Пнд","Втр","Срд","Чтв","Птн","Суб"],daysMin:["Нд","Пн","Вт","Ср","Чт","Пт","Сб"],months:["Січень","Лютий","Березень","Квітень","Травень","Червень","Липень","Серпень","Вересень","Жовтень","Листопад","Грудень"],monthsShort:["Січ","Лют","Бер","Кві","Тра","Чер","Лип","Сер","Вер","Жов","Лис","Гру"],today:"Сьогодні",clear:"Очистити",format:"dd.mm.yyyy",weekStart:1}}(jQuery); \ No newline at end of file diff --git a/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.zh.min.js b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.zh.min.js new file mode 100644 index 000000000000..5688b92ea611 --- /dev/null +++ b/web-ui/src/main/resources/catalog/lib/bootstrap.ext/datepicker/bootstrap-datepicker.zh.min.js @@ -0,0 +1 @@ +!function(a){a.fn.datepicker.dates["zh"]={days:["星期日","星期一","星期二","星期三","星期四","星期五","星期六"],daysShort:["周日","周一","周二","周三","周四","周五","周六"],daysMin:["日","一","二","三","四","五","六"],months:["一月","二月","三月","四月","五月","六月","七月","八月","九月","十月","十一月","十二月"],monthsShort:["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],today:"今天",monthsTitle:"选择月份",clear:"清除",format:"yyyy-mm-dd",titleFormat:"yyyy年mm月",weekStart:1}}(jQuery); diff --git a/web-ui/src/main/resources/catalog/lib/dom-to-image/dom-to-image.min.js b/web-ui/src/main/resources/catalog/lib/dom-to-image/dom-to-image.min.js deleted file mode 100644 index bc73227434d7..000000000000 --- a/web-ui/src/main/resources/catalog/lib/dom-to-image/dom-to-image.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! dom-to-image 10-06-2017 */ -!function(a){"use strict";function b(a,b){function c(a){return b.bgcolor&&(a.style.backgroundColor=b.bgcolor),b.width&&(a.style.width=b.width+"px"),b.height&&(a.style.height=b.height+"px"),b.style&&Object.keys(b.style).forEach(function(c){a.style[c]=b.style[c]}),a}return b=b||{},g(b),Promise.resolve(a).then(function(a){return i(a,b.filter,!0)}).then(j).then(k).then(c).then(function(c){return l(c,b.width||q.width(a),b.height||q.height(a))})}function c(a,b){return h(a,b||{}).then(function(b){return b.getContext("2d").getImageData(0,0,q.width(a),q.height(a)).data})}function d(a,b){return h(a,b||{}).then(function(a){return a.toDataURL()})}function e(a,b){return b=b||{},h(a,b).then(function(a){return a.toDataURL("image/jpeg",b.quality||1)})}function f(a,b){return h(a,b||{}).then(q.canvasToBlob)}function g(a){"undefined"==typeof a.imagePlaceholder?v.impl.options.imagePlaceholder=u.imagePlaceholder:v.impl.options.imagePlaceholder=a.imagePlaceholder,"undefined"==typeof a.cacheBust?v.impl.options.cacheBust=u.cacheBust:v.impl.options.cacheBust=a.cacheBust}function h(a,c){function d(a){var b=document.createElement("canvas");if(b.width=c.width||q.width(a),b.height=c.height||q.height(a),c.bgcolor){var d=b.getContext("2d");d.fillStyle=c.bgcolor,d.fillRect(0,0,b.width,b.height)}return b}return b(a,c).then(q.makeImage).then(q.delay(100)).then(function(b){var c=d(a);return c.getContext("2d").drawImage(b,0,0),c})}function i(a,b,c){function d(a){return a instanceof HTMLCanvasElement?q.makeImage(a.toDataURL()):a.cloneNode(!1)}function e(a,b,c){function d(a,b,c){var d=Promise.resolve();return b.forEach(function(b){d=d.then(function(){return i(b,c)}).then(function(b){b&&a.appendChild(b)})}),d}var e=a.childNodes;return 0===e.length?Promise.resolve(b):d(b,q.asArray(e),c).then(function(){return b})}function f(a,b){function c(){function c(a,b){function c(a,b){q.asArray(a).forEach(function(c){b.setProperty(c,a.getPropertyValue(c),a.getPropertyPriority(c))})}a.cssText?b.cssText=a.cssText:c(a,b)}c(window.getComputedStyle(a),b.style)}function d(){function c(c){function d(a,b,c){function d(a){var b=a.getPropertyValue("content");return a.cssText+" content: "+b+";"}function e(a){function b(b){return b+": "+a.getPropertyValue(b)+(a.getPropertyPriority(b)?" !important":"")}return q.asArray(a).map(b).join("; ")+";"}var f="."+a+":"+b,g=c.cssText?d(c):e(c);return document.createTextNode(f+"{"+g+"}")}var e=window.getComputedStyle(a,c),f=e.getPropertyValue("content");if(""!==f&&"none"!==f){var g=q.uid();b.className=b.className+" "+g;var h=document.createElement("style");h.appendChild(d(g,c,e)),b.appendChild(h)}}[":before",":after"].forEach(function(a){c(a)})}function e(){a instanceof HTMLTextAreaElement&&(b.innerHTML=a.value),a instanceof HTMLInputElement&&b.setAttribute("value",a.value)}function f(){b instanceof SVGElement&&(b.setAttribute("xmlns","http://www.w3.org/2000/svg"),b instanceof SVGRectElement&&["width","height"].forEach(function(a){var c=b.getAttribute(a);c&&b.style.setProperty(a,c)}))}return b instanceof Element?Promise.resolve().then(c).then(d).then(e).then(f).then(function(){return b}):b}return c||!b||b(a)?Promise.resolve(a).then(d).then(function(c){return e(a,c,b)}).then(function(b){return f(a,b)}):Promise.resolve()}function j(a){return s.resolveAll().then(function(b){var c=document.createElement("style");return a.appendChild(c),c.appendChild(document.createTextNode(b)),a})}function k(a){return t.inlineAll(a).then(function(){return a})}function l(a,b,c){return Promise.resolve(a).then(function(a){return a.setAttribute("xmlns","http://www.w3.org/1999/xhtml"),(new XMLSerializer).serializeToString(a)}).then(q.escapeXhtml).then(function(a){return''+a+""}).then(function(a){return''+a+""}).then(function(a){return"data:image/svg+xml;charset=utf-8,"+a})}function m(){function a(){var a="application/font-woff",b="image/jpeg";return{woff:a,woff2:a,ttf:"application/font-truetype",eot:"application/vnd.ms-fontobject",png:"image/png",jpg:b,jpeg:b,gif:"image/gif",tiff:"image/tiff",svg:"image/svg+xml"}}function b(a){var b=/\.([^\.\/]*?)$/g.exec(a);return b?b[1]:""}function c(c){var d=b(c).toLowerCase();return a()[d]||""}function d(a){return a.search(/^(data:)/)!==-1}function e(a){return new Promise(function(b){for(var c=window.atob(a.toDataURL().split(",")[1]),d=c.length,e=new Uint8Array(d),f=0;fDataCite website.", - "system/publication/doi/doipublicurl": "The final DOI URL prefix", - "system/publication/doi/doipublicurl-help": "Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API.", - "system/publication/doi/doiurl": "The DataCite API endpoint", - "system/publication/doi/doiurl-help": "Usually https://mds.datacite.org or https://mds.test.datacite.org for testing.", - "system/publication/doi/doiusername": "Your DataCite username", - "system/publication/doi/doipassword": "Your DataCite password", - "system/publication/doi/doipassword-help": "All requests to the MDS API require authentication. For this reason, only traffic via a secure connection (HTTPS) is supported. The DataCite Metadata Store (MDS) uses HTTP Basic authentication. You can obtain an account here.", - "system/publication/doi/doikey": "Your DataCite prefix", - "system/publication/doi/doikey-help": "Usually looks like 10.xxxx. You will be allowed to register DOI names only under the prefixes that have been assigned to you.", - "system/publication/doi/doilandingpagetemplate": "DOI landing page URL template", - "system/publication/doi/doilandingpagetemplate-help": "The URL to use to register the DOI. A good default for GeoNetwork is http://localhost:8080/geonetwork/srv/resources/records/\\{\\{uuid\\}\\}. The landing page URL MUST contains the UUID of the record.", + "system/documentation/url-help": "Base application manual url. Defaults to the official manual page (https://docs.geonetwork-opensource.org/{version}/{lang}) and can be customised to use a self hosted documentation with a custom branding. The url can contain \\{\\{lang\\}\\} placeholder, to display the manual in different languages when available, \\{\\{version\\}\\} placeholder to use the application version, and \\{\\{section\\}\\} placeholder to parse sub section url from manual.json of the current page. When the \\{\\{section\\}\\} placeholder is not provided, the sub section value is automatically appended to the end of the url.", "system/csw": "Catalog Service for the Web (CSW)", "system/csw/capabilityRecordUuid": "Record to use for GetCapabilities", "system/csw/capabilityRecordUuid-help": "Choose the record to be used to build custom GetCapabilities document. If none exist, create a service metadata record (using ISO19139 or 19115-3 standards). To have a capabilities document with the main information required, set title, abstract, point of contact, keywords, constraints. If you need INSPIRE support also set properly the record main language and additional languages, INSPIRE themes and INSPIRE conformity.", @@ -866,6 +851,12 @@ "system/xlinkResolver/localXlinkEnable-help": "Local XLinks are using local:/// URL to make references to related sections instead of HTTP URL. Local XLinks are usually faster than HTTP XLinks.", "system/xlinkResolver/ignore": "Elements to ignore by XLink resolution", "system/xlinkResolver/ignore-help": "Comma separated list of elements to ignore by XLink resolution", + "system/banner": "Application banner", + "system/banner/enable": "Enable", + "system/banner/enable-help": "If set, an application banner is displayed with the message configured. To configure the message, go to Language and translations and configure a translation with the key application-banner", + "system/auditable": "Audit changes", + "system/auditable/enable": "Allow auditing changes", + "system/auditable/enable-help": "When enabled, audits changes in users configuration", "metadata/workflow": "Metadata workflow", "metadata/workflow/automaticUnpublishInvalidMd": "Automatic unpublication of invalid metadata", "metadata/workflow/automaticUnpublishInvalidMd-help": " Automatically unpublishes metadata that is edited that becomes not valid according to xsd or schematron rules.", @@ -884,7 +875,7 @@ "metadata/workflow/forceValidationOnMdSave-help": "When the metadata is saved force validation check", "metadata/import": "Metadata import", "metadata/import/restrict": "Restrict import to schemas", - "metadata/import/restrict-help": "List of all allowed schemas for metadata to be imported. If the metadata schema is not allowed, then the import is not done. No value means all schemas allowed.", + "metadata/import/restrict-help": "Comma separated list of all allowed schemas for metadata to be imported. If the metadata schema is not allowed, then the import is not done. No value means all schemas allowed.", "metadata/import/userprofile": "Minimum user profile allowed to import metadata", "metadata/import/userprofile-help": "Minimum user profile allowed to import metadata (Editor, Reviewer or Administrator). The default value is Editor.", "metadata/delete": "Metadata delete", @@ -901,6 +892,8 @@ "metadata/history": "Metadata History", "metadata/history/accesslevel": "Select the minimum user profile allowed to view metadata history", "metadata/history/accesslevel-help": "Select the user profile allowed to view metadata history (Registered User, Editor or Administrator). The Registered User configuration can view the history with view permission granted to the metadata record. The Editor configuration can view the history with editing permission granted to the metadata record. The default value is Editor.", + "minimumProfileForPrivileges": "Minimum user profile allowed to set privileges", + "minimumProfileForPrivilegesHelp": "Specifies the lowest user profile needed within a group to assign privileges for that group on a record. Users below this profile cannot set privileges for the group, and the group will be disabled in the privilege assignment interface. Default: No Restrictions (any editor for a record can set privileges for this group).", "filterStatusByAuthor":"Status author", "filterStatusByOwner":"Status owner", "filterStatusByRecordId":"Record identifier", @@ -1098,6 +1091,7 @@ "ui-mod-header": "Top toolbar", "ui-mod-footer": "Footer", "ui-mod-cookieWarning": "Cookie warning", + "ui-mod-directory": "Directory", "ui-createPageTpl": "New metadata page layout", "ui-createPageTpl-horizontal": "Horizontal", "ui-createPageTpl-vertical": "Vertical", @@ -1512,7 +1506,40 @@ "fieldTooShort": "The value is too short", "fieldEmailNotValid": "A valid email address is required", "formConfirmExit": "The form has changes, if you exit the changes will be lost. Do you want to exit on the page?", + "manageDoiServers": "DOI servers", + "doiservers": "DOI servers", + "doiservers-description": "A Digital Object Identifier (DOI) is an alphanumeric string assigned to uniquely identify an object. It is tied to a metadata description of the object as well as to a digital location, such as a URL, where all the details about the object are accessible. More information available on DataCite website.", + "newDoiServer": "New server", + "updateDoiServer": "Update server", + "doiserver-name": "Server name", + "doiserver-description": "Description", + "doiserver-url": "DataCite API endpoint", + "doiserver-url-help": "Usually https://mds.datacite.org or https://mds.test.datacite.org for testing.", + "doiserver-apiKey": "API Key", + "doiserver-landingPageTemplate": "Landing page URL template", + "doiserver-landingPageTemplate-help": "The URL to use to register the DOI. A good default for GeoNetwork is http://localhost:8080/geonetwork/srv/resources/records/\\{\\{uuid\\}\\}. The landing page URL MUST contains the UUID of the record.", + "doiserver-publicUrl": "Final DOI URL prefix", + "doiserver-publicUrl-help": "Keep it empty to use the default https://doi.org prefix. Use https://mds.test.datacite.org/doi when using the test API.", + "doiserver-username": "DataCite username", + "doiserver-password": "DataCite password", + "doiserver-password-help": "All requests to the MDS API require authentication. For this reason, only traffic via a secure connection (HTTPS) is supported. The DataCite Metadata Store (MDS) uses HTTP Basic authentication. You can obtain an account here.", + "doiserver-pattern": "DOI pattern", + "doiserver-pattern-help": "Default is '\\{\\{uuid\\}\\}' but the DOI structure can be customized with database id and/or record group eg. 'example-\\{\\{groupOwner\\}\\}-\\{\\{id\\}\\}'", + "doiserver-prefix": "DataCite prefix", + "doiserver-prefix-help": "Usually looks like 10.xxxx. You will be allowed to register DOI names only under the prefixes that have been assigned to you.", + "doiserver-recordGroups": "Record groups", + "doiserver-recordGroups-help": "When creating a DOI, only DOI server(s) associated with the record group are proposed. If record group is not associated with any DOI servers, then DOI servers with no group are proposed.", + "doiserver-defaultApiText": "DataCite API", + "doiserver-testApiText": "DataCite API test", + "doiserver-euApiText": "Publication Office of the European Union", + "confirmDoiServerDelete": "Are you sure you want to delete this DOI server?", "NoTranslationProvider": "No translation provider", - "LibreTranslate": "Libretranslate" + "LibreTranslate": "Libretranslate", + "userHistory": "User history", + "userHistoryRevision": "Updated by {{revisionUser}} on {{revisionDate}}:", + "userHistoryFieldUpdate": "Field '{{fieldName}}' changed from '{{oldValue}}' to '{{newValue}}'", + "userHistoryFieldSet": "Field '{{fieldName}}' set to '{{newValue}}'", + "userHistoryFieldUnset": "Field '{{fieldName}}' unset", + "noUserHistory": "No user history available" } diff --git a/web-ui/src/main/resources/catalog/locales/en-core.json b/web-ui/src/main/resources/catalog/locales/en-core.json index cd03dca04eb6..f79fa4df581a 100644 --- a/web-ui/src/main/resources/catalog/locales/en-core.json +++ b/web-ui/src/main/resources/catalog/locales/en-core.json @@ -268,7 +268,7 @@ "spatialRepresentationType": "Representation type", "cl_spatialRepresentationType": "Representation type", "state": "State", - "staticMap": "Static map", + "map-static": "Static map", "surname": "Surname", "title": "Title", "to": "To", diff --git a/web-ui/src/main/resources/catalog/locales/en-editor.json b/web-ui/src/main/resources/catalog/locales/en-editor.json index dd6dc4a1fd87..8245a616e85a 100644 --- a/web-ui/src/main/resources/catalog/locales/en-editor.json +++ b/web-ui/src/main/resources/catalog/locales/en-editor.json @@ -236,6 +236,7 @@ "onlineUseDQReport": "Data quality report", "onlineUseDQTOR": "Data quality specification", "onlineUseDQProdReport": "Data quality production report", + "onlineUseMap": "Map", "onlineUseLegend": "Legend for the resource", "onlineUseLegendLYR": "Style for the resource for ArcGIS (LYR)", "onlineUseStyleSLD": "Style for the resource using SLD", @@ -441,6 +442,8 @@ "addOnlinesrc#API-help": "eg. view service, REST API", "addOnlinesrc#onlineDownload|localNetwork": "Add download", "addOnlinesrc#onlineDownload|localNetwork-help": "eg. file, download service, local network links", + "addOnlinesrc#onlineUseMap": "Add map", + "addOnlinesrc#onlineUseMap-help": "eg. PDF static maps or OGC Web Map Context interactive maps", "addOnlinesrc#onlineUseLegend": "Add portrayal", "addOnlinesrc#onlineUseLegend-help": "eg. LYR, QML, SLD files", "addOnlinesrc#links": "Add links", @@ -454,5 +457,10 @@ "associated-fcats": "Feature catalog", "associated-siblings": "Associated resources", "associated-hasfeaturecats": "Using this feature catalog", - "associatedResourcesPanel": "Associated resources" + "associatedResourcesPanel": "Associated resources", + "validationSuccessLabel": "success", + "validationErrorLabel": "errors", + "metadataDuplicatedField-title": "The metadata title is used in another metadata record.", + "metadataDuplicatedField-altTitle": "The metadata alternate title is used in another metadata record.", + "metadataDuplicatedField-identifier": "The metadata resource identifier is used in another metadata record." } diff --git a/web-ui/src/main/resources/catalog/locales/en-search.json b/web-ui/src/main/resources/catalog/locales/en-search.json index 65a4d70eba6f..f10d36df37ec 100644 --- a/web-ui/src/main/resources/catalog/locales/en-search.json +++ b/web-ui/src/main/resources/catalog/locales/en-search.json @@ -366,7 +366,8 @@ "shareOnLinkedIn": "Share on LinkedIn", "shareByEmail": "Share by email", "zoomto": "Zoom To", - "recordNotFound": "The record with identifier {{uuid}} was not found or is not shared with you. Try to sign in if you've an account.", + "recordNotFound": "The record with identifier {{uuid}} was not found or is not shared with you.", + "trySignIn": "Try to log in if you have an account.", "intersectWith": "Intersects with", "fullyOutsideOf": "Fully outside of", "encloses": "Enclosing", diff --git a/web-ui/src/main/resources/catalog/locales/en-v4.json b/web-ui/src/main/resources/catalog/locales/en-v4.json index e412c79f7007..465796064856 100644 --- a/web-ui/src/main/resources/catalog/locales/en-v4.json +++ b/web-ui/src/main/resources/catalog/locales/en-v4.json @@ -164,12 +164,14 @@ "dropIndexAndRebuild": "Delete index and reindex", "rebuildIndexHelp": "While rebuilding index, search may return incomplete results and the CSW GetRecords operation can be disabled (if you selected the option in the settings). Use this function, when catalog traffic is low. It's recommended to rebuild index manually from here when making changes directly in the database. If you change index mapping (cf. records.json), then you have to click on 'Delete index and reindex'.", "indexInEsDoneError": "There is an error with the index. See the logs for details", - "indexInEsDone": "The indexing operation was successfull", + "indexInEsDone": "The indexing operation was successful", "indexCommit": "Commit index changes", "indexCommit-help": "To use only if indexing task is hanging.", "indexCommitError": "Error while committing index changes.", "ui-moreLikeThisConfig": "More like this configuration", - "ui-moreLikeThisConfig-help": "Configuration must have a more_like_this.like which will be set with the record title to search for similar records.", + "ui-moreLikeThisConfig-help": "Configuration must have a more_like_this.like which will be set with the record title to search for similar records. (See Elasticsearch API).", + "ui-moreLikeThisFilter": "More like this query filter", + "ui-moreLikeThisFilter-help": "Optional filter expression to apply on the more like this query (eg.
-cl_status.key:(obsolete OR historicalArchive OR superseded)
to exclude obsolete records).", "ui-autocompleteConfig": "Autocompletion configuration", "ui-autocompleteConfig-help": "Configuration must have a query.multi_match.query which will be set on autocompletion.", "ui-facetConfig": "Facets configuration", @@ -399,12 +401,17 @@ "setServiceConnectPoint": "Add service connect point", "mimeType": "Format", "uploadedResourceAlreadyExistException": "File {{file}} already exist in this record data store. Remove it first.", + "uploadedResourceSizeExceededException": "File {{file}} too large ({{humanizedSize}}).", + "uploadNetworkErrorException": "File {{file}} failed to upload due to network error or connection reset.", "qualityMeasures": "Quality", "measureType": "Type", "measureName": "Measure", "measureDescription": "Description", "measureValue": "Value", "measureDate": "Date", + "nextUpdateDate": "Next update", + "userDefinedFrequency": "Update frequency", + "maintenanceNote": "Maintenance note", "switchPortals": "Switch to another Portal", "dataPreview": "Discover data", "tableOfContents": "Table of Contents", @@ -414,6 +421,7 @@ "staticPageFormat-TEXT": "Plain text content", "staticPageStatus-HIDDEN": "Visible only to the administrator", "staticPageStatus-PRIVATE": "Visible to logged users", + "staticPageStatus-GROUPS": "Visible to users belonging to the groups", "staticPageStatus-PUBLIC": "Visible to everyone", "pageLink": "Link", "pageSection-help": "Currently, the default UI view only supports TOP and FOOTER values. Custom UI views can make use of additional values.", @@ -432,7 +440,7 @@ "overviewUrl": "Overview URL", "restApiUrl": "REST API URL", "filterHelp": "Please click on one of the buttons below to activate the filter", - "selectDOIResource": "Choose a DOI resource", + "selectDOIResource": "Search for a DOI", "httpStatus--200": "Invalid status", "httpStatus-200": "200: Valid status", "httpStatus-404": "404: Not found", @@ -449,5 +457,10 @@ "linkStatus": "Link status", "requestStatus": "Request status", "linkUrl": "Link url", - "associatedUuid": "Associated to metadata UUID" + "associatedUuid": "Associated to metadata UUID", + "mdEmail": "Email", + "mdWebsite": "Website", + "mdOrganization": "Organization", + "mdAddress": "Address", + "mdPhone": "Phone" } diff --git a/web-ui/src/main/resources/catalog/locales/es-core.json b/web-ui/src/main/resources/catalog/locales/es-core.json index 2880dd241432..cbaa915f22c4 100644 --- a/web-ui/src/main/resources/catalog/locales/es-core.json +++ b/web-ui/src/main/resources/catalog/locales/es-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Tipo de Representación", "cl_spatialRepresentationType": "Tipo de Representación", "state": "Estado", - "staticMap": "Mapa estático", + "map-static": "Mapa estático", "surname": "Apellido", "title": "Título", "to": "A", @@ -587,4 +587,4 @@ "quality": "Calidad", "download": "Download", "links": "Enlaces" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/fi-core.json b/web-ui/src/main/resources/catalog/locales/fi-core.json index 760a79bf284c..1ed8f492590d 100644 --- a/web-ui/src/main/resources/catalog/locales/fi-core.json +++ b/web-ui/src/main/resources/catalog/locales/fi-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Esitysmuoto", "cl_spatialRepresentationType": "Esitysmuoto", "state": "Tila", - "staticMap": "Staattinen kartta", + "map-static": "Staattinen kartta", "surname": "Sukunimi", "title": "Titteli", "to": "Vastaanottaja", @@ -587,4 +587,4 @@ "quality": "Laatu", "download": "Download", "links": "Linkit" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/fr-core.json b/web-ui/src/main/resources/catalog/locales/fr-core.json index 3434b7768e1e..d6cd2a56bb52 100644 --- a/web-ui/src/main/resources/catalog/locales/fr-core.json +++ b/web-ui/src/main/resources/catalog/locales/fr-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Type de représentation", "cl_spatialRepresentationType": "Type de représentation", "state": "Région", - "staticMap": "Carte statique", + "map-static": "Carte statique", "surname": "Nom", "title": "Titre", "to": "à", @@ -587,4 +587,4 @@ "quality": "Qualité", "download": "Téléchargement", "links": "Liens" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/hy-core.json b/web-ui/src/main/resources/catalog/locales/hy-core.json index 93af8974b84b..eef8e06066eb 100644 --- a/web-ui/src/main/resources/catalog/locales/hy-core.json +++ b/web-ui/src/main/resources/catalog/locales/hy-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Ներկայացման տեսակը", "cl_spatialRepresentationType": "Ներկայացման տեսակը", "state": "Պետություն", - "staticMap": "Ստատիկ քարտեզ", + "map-static": "Ստատիկ քարտեզ", "surname": "Ազգանունը", "title": "Կոչում", "to": "Դեպի", @@ -587,4 +587,4 @@ "quality": "Որակ", "download": "Բեռնել", "links": "Հղումներ" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/is-core.json b/web-ui/src/main/resources/catalog/locales/is-core.json index 4e2a6103b54a..ef7c448a2c76 100644 --- a/web-ui/src/main/resources/catalog/locales/is-core.json +++ b/web-ui/src/main/resources/catalog/locales/is-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Framsetning tegund", "cl_spatialRepresentationType": "Framsetning tegund", "state": "Ástand", - "staticMap": "Kyrrstætt kort", + "map-static": "Kyrrstætt kort", "surname": "Eftirnafn", "title": "Titill", "to": "Til", @@ -587,4 +587,4 @@ "quality": "Gæði", "download": "Niðurhal", "links": "Links" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/it-core.json b/web-ui/src/main/resources/catalog/locales/it-core.json index 803f562287b1..5c78a605d4a5 100644 --- a/web-ui/src/main/resources/catalog/locales/it-core.json +++ b/web-ui/src/main/resources/catalog/locales/it-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Tipo di rappresentazione", "cl_spatialRepresentationType": "Tipo di rappresentazione", "state": "Stato", - "staticMap": "Mappa statica", + "map-static": "Mappa statica", "surname": "Cognome", "title": "Titolo", "to": "A", @@ -587,4 +587,4 @@ "quality": "Qualità", "download": "Scaricare", "links": "Collegamenti" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/ka-core.json b/web-ui/src/main/resources/catalog/locales/ka-core.json index d37d389498aa..27bd4c9146d7 100644 --- a/web-ui/src/main/resources/catalog/locales/ka-core.json +++ b/web-ui/src/main/resources/catalog/locales/ka-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "წარმომადგენლობის ტიპი", "cl_spatialRepresentationType": "წარმომადგენლობის ტიპი", "state": "სახელმწიფო", - "staticMap": "სტატიკური რუკა", + "map-static": "სტატიკური რუკა", "surname": "გვარი", "title": "სათაური", "to": "რომ", @@ -587,4 +587,4 @@ "quality": "ხარისხიანი", "download": "ჩამოტვირთვა", "links": "ბმულები" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/ko-core.json b/web-ui/src/main/resources/catalog/locales/ko-core.json index 17e553255091..c935187d77e6 100644 --- a/web-ui/src/main/resources/catalog/locales/ko-core.json +++ b/web-ui/src/main/resources/catalog/locales/ko-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "표현 형태", "cl_spatialRepresentationType": "표현 형태", "state": "주", - "staticMap": "정적 지도", + "map-static": "정적 지도", "surname": "성", "title": "제목", "to": "종료", @@ -587,4 +587,4 @@ "quality": "품질", "download": "Download", "links": "링크" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/nl-core.json b/web-ui/src/main/resources/catalog/locales/nl-core.json index ca82b2f980bb..073e10f99cef 100644 --- a/web-ui/src/main/resources/catalog/locales/nl-core.json +++ b/web-ui/src/main/resources/catalog/locales/nl-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Representatie type", "cl_spatialRepresentationType": "Representatie type", "state": "Staat", - "staticMap": "Vaste kaart", + "map-static": "Vaste kaart", "surname": "Achternaam", "title": "Titel", "to": "Tot", @@ -587,4 +587,4 @@ "quality": "Kwaliteit", "download": "Download bestand", "links": "Links" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/pt-core.json b/web-ui/src/main/resources/catalog/locales/pt-core.json index 4d168294cef1..7c670bf19092 100644 --- a/web-ui/src/main/resources/catalog/locales/pt-core.json +++ b/web-ui/src/main/resources/catalog/locales/pt-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Tipo de representação", "cl_spatialRepresentationType": "Tipo de representação", "state": "Estado", - "staticMap": "Mapa estático", + "map-static": "Mapa estático", "surname": "Sobrenome", "title": "Título", "to": "Até", @@ -587,4 +587,4 @@ "quality": "Qualidade", "download": "Download", "links": "Links" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/ro-core.json b/web-ui/src/main/resources/catalog/locales/ro-core.json index 692d62290a2d..1461214bc020 100644 --- a/web-ui/src/main/resources/catalog/locales/ro-core.json +++ b/web-ui/src/main/resources/catalog/locales/ro-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Tipul de reprezentare", "cl_spatialRepresentationType": "Tipul de reprezentare", "state": "Stat", - "staticMap": "Harta statica", + "map-static": "Harta statica", "surname": "Nume de familie", "title": "Titlu", "to": "La", @@ -587,4 +587,4 @@ "quality": "Calitate", "download": "Descarca", "links": "Legături" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/ru-core.json b/web-ui/src/main/resources/catalog/locales/ru-core.json index 2642a35b831b..de09fb132234 100644 --- a/web-ui/src/main/resources/catalog/locales/ru-core.json +++ b/web-ui/src/main/resources/catalog/locales/ru-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Типы представлений", "cl_spatialRepresentationType": "Типы представлений", "state": "Государство", - "staticMap": "Статическая карта", + "map-static": "Статическая карта", "surname": "Фамилия", "title": "Титул", "to": "К", @@ -587,4 +587,4 @@ "quality": "Качество", "download": "Download", "links": "Ссылки" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/sk-core.json b/web-ui/src/main/resources/catalog/locales/sk-core.json index 6042ab0fc42d..d47127c17c3e 100644 --- a/web-ui/src/main/resources/catalog/locales/sk-core.json +++ b/web-ui/src/main/resources/catalog/locales/sk-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Priestorové typy", "cl_spatialRepresentationType": "Priestorový typ", "state": "Štát", - "staticMap": "Statická mapa", + "map-static": "Statická mapa", "surname": "Priezvisko", "title": "Názov", "to": "Komu", @@ -587,4 +587,4 @@ "quality": "Kvalita", "download": "Stiahnuť", "links": "Webová adresa" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/sv-core.json b/web-ui/src/main/resources/catalog/locales/sv-core.json index 0454efe30750..9ed6113a41c3 100644 --- a/web-ui/src/main/resources/catalog/locales/sv-core.json +++ b/web-ui/src/main/resources/catalog/locales/sv-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Representationstyp", "cl_spatialRepresentationType": "Representationstyp", "state": "Län", - "staticMap": "Statisk karta", + "map-static": "Statisk karta", "surname": "Efternamn", "title": "Titel", "to": "Till", @@ -587,4 +587,4 @@ "quality": "Kvalitet", "download": "Download", "links": "Länkar" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/uk-core.json b/web-ui/src/main/resources/catalog/locales/uk-core.json index 8898b6b2c094..e542dfad10b2 100644 --- a/web-ui/src/main/resources/catalog/locales/uk-core.json +++ b/web-ui/src/main/resources/catalog/locales/uk-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "Тип представництва", "cl_spatialRepresentationType": "Тип представництва", "state": "Держава", - "staticMap": "Статична карта", + "map-static": "Статична карта", "surname": "Прізвище", "title": "Назва", "to": "до", @@ -587,4 +587,4 @@ "quality": "якість", "download": "Завантажити", "links": "Посилання" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/locales/zh-core.json b/web-ui/src/main/resources/catalog/locales/zh-core.json index 3cc4c0b5ad26..b7f0280f6b32 100644 --- a/web-ui/src/main/resources/catalog/locales/zh-core.json +++ b/web-ui/src/main/resources/catalog/locales/zh-core.json @@ -266,7 +266,7 @@ "spatialRepresentationType": "表示类型", "cl_spatialRepresentationType": "表示类型", "state": "州", - "staticMap": "静态地图", + "map-static": "静态地图", "surname": "姓", "title": "标题", "to": "至", @@ -587,4 +587,4 @@ "quality": "质量", "download": "Download", "links": "链接" -} \ No newline at end of file +} diff --git a/web-ui/src/main/resources/catalog/style/gn.less b/web-ui/src/main/resources/catalog/style/gn.less index c0f4782dfb64..9262198a246e 100644 --- a/web-ui/src/main/resources/catalog/style/gn.less +++ b/web-ui/src/main/resources/catalog/style/gn.less @@ -1604,6 +1604,9 @@ gn-indexing-task-status { .text-large { font-size: 30px; } +.width-auto { + width: auto; +} .width-100 { width: 100%; } diff --git a/web-ui/src/main/resources/catalog/style/gn_admin.less b/web-ui/src/main/resources/catalog/style/gn_admin.less index bdb3df8caebe..df08bc00014d 100644 --- a/web-ui/src/main/resources/catalog/style/gn_admin.less +++ b/web-ui/src/main/resources/catalog/style/gn_admin.less @@ -219,7 +219,8 @@ ul.pager { #gn-mapservers-container, #gn-sources-container, #gn-metadatatemplates-container, -#gn-static-pages-container { +#gn-static-pages-container, +#gn-doiservers-container { // Fixes gn-modal windows; TODO: fix this globally in gn-popup style [gn-modal] { max-width: none; diff --git a/web-ui/src/main/resources/catalog/style/gn_editor.less b/web-ui/src/main/resources/catalog/style/gn_editor.less index 649201352078..b32bd5b872bf 100644 --- a/web-ui/src/main/resources/catalog/style/gn_editor.less +++ b/web-ui/src/main/resources/catalog/style/gn_editor.less @@ -1024,3 +1024,9 @@ gn-bounding-polygon { border-radius: 50% !important; } } + +#gn-editor-validation-panel { + button.inactive { + opacity: 0.65; + } +} diff --git a/web-ui/src/main/resources/catalog/style/gn_icons.less b/web-ui/src/main/resources/catalog/style/gn_icons.less index 9425c45040f7..2fd57538a1c1 100644 --- a/web-ui/src/main/resources/catalog/style/gn_icons.less +++ b/web-ui/src/main/resources/catalog/style/gn_icons.less @@ -17,13 +17,11 @@ content: @fa-var-cog; } .gn-icon-map:before, -.gn-icon-staticMap:before, -.gn-icon-maps:before { +.gn-icon-map-static:before, +.gn-icon-maps:before, +.gn-icon-map-interactive:before { content: @fa-var-map; } -.gn-icon-interactiveMap:before { - content: @fa-var-globe; -} .gn-icon-featureCatalog:before { content: @fa-var-table; } diff --git a/web-ui/src/main/resources/catalog/style/gn_metadata.less b/web-ui/src/main/resources/catalog/style/gn_metadata.less index 33ac3d631407..4e788c8e3803 100644 --- a/web-ui/src/main/resources/catalog/style/gn_metadata.less +++ b/web-ui/src/main/resources/catalog/style/gn_metadata.less @@ -1,4 +1,5 @@ @import "gn_search.less"; +@import "gn_variables.less"; .panel-body .gn-metadata-view { width: 100%; @@ -551,8 +552,6 @@ ul.container-list { margin-bottom: 0.5em; } td { - padding-left: 40px; - word-break: break-word; ul { padding-left: 0; } @@ -619,24 +618,46 @@ ul.container-list { } .col-md-6, .col-md-4 { - padding-left: 0px; - padding-right: 0px; + padding-left: 0; + padding-right: 0; } - .gn-contact-card-org { + .gn-contact-card-org, + .gn-contact-card-org-group { padding: 5px 0; margin-bottom: 10px; } .gn-contact-card-role { - font-style: italic; - font-weight: bolder; - } - .gn-contact-card-org-group { - font-style: italic; font-weight: bolder; } .gn-contact-card-group-role { padding: 5px; } + @media print { + svg { + max-width: 75px; + margin: 15px; + } + [gn-popover-content] { + .make-md-column-offset(3); + padding: 0 @gn-spacing-lg; + display: block !important; + label, + a { + display: block; + margin-bottom: @gn-spacing; + } + [data-label="focusOnFrom"] { + display: none; + } + .fa { + display: none; + } + [data-ng-href] { + margin-bottom: @gn-spacing; + display: inline-block; + } + } + } } } } diff --git a/web-ui/src/main/resources/catalog/style/gn_search.less b/web-ui/src/main/resources/catalog/style/gn_search.less index 990fb699d595..88a286e9383a 100644 --- a/web-ui/src/main/resources/catalog/style/gn_search.less +++ b/web-ui/src/main/resources/catalog/style/gn_search.less @@ -608,6 +608,15 @@ } } +.application-banner { + background-color: #fff8c5; + border: 1px solid #d4a72c66; + margin: 15px; + padding: 15px; + border-radius: 4px; + color: #000; +} + button:focus [role="tooltip"], button:hover [role="tooltip"] { clip: auto; diff --git a/web-ui/src/main/resources/catalog/style/gn_variables.less b/web-ui/src/main/resources/catalog/style/gn_variables.less new file mode 100644 index 000000000000..7d663982363d --- /dev/null +++ b/web-ui/src/main/resources/catalog/style/gn_variables.less @@ -0,0 +1,6 @@ +// variables used in custom styles +// ------------------------------- + +@gn-spacing: 10px; +@gn-spacing-sm: 5px; +@gn-spacing-lg: 15px; diff --git a/web-ui/src/main/resources/catalog/style/gn_viewer.less b/web-ui/src/main/resources/catalog/style/gn_viewer.less index c346e805fef1..e2e1deddcc92 100644 --- a/web-ui/src/main/resources/catalog/style/gn_viewer.less +++ b/web-ui/src/main/resources/catalog/style/gn_viewer.less @@ -265,9 +265,6 @@ } li[gn-layermanager-item] { .fa-arrows-alt, - .gn-layer-ordering { - visibility: hidden; - } input[type="radio"], input[type="checkbox"] { margin-top: 6px; @@ -275,16 +272,19 @@ label { padding: 4px 20px 4px 4px; display: block; - margin-bottom: 0px; + margin-bottom: 0; } .gn-layer-radio { - padding-left: 0px; - padding-right: 0px; + padding-left: 0; + padding-right: 0; } .tab-content { background-color: white; padding: 5px; - margin: 0px; + margin: 0; + } + .text-muted { + color: @btn-default-border; } } .gn-layer-outofrange > label { @@ -307,6 +307,40 @@ } } } + .dropdown-left { + @toggleWidth: 32px; + @toggleHeight: 32px; + + .dropdown-toggle { + width: @toggleWidth; + padding: 5px; + } + .dropdown-menu { + min-width: calc(~"(3 * @{toggleWidth}) + 6px") !important; + width: auto; + padding: 0; + margin: 0; + right: @toggleWidth; + top: 0; + box-shadow: none; + border: 0; + li { + float: left; + .btn { + width: @toggleWidth; + height: @toggleHeight; + padding: 5px; + margin-right: 2px; + border-radius: 3px !important; + &[disabled] { + color: @btn-link-disabled-color; + border-color: @input-bg-disabled; + } + } + } + } + } + .gn-searchlayer-list { margin: 0; padding: 0; @@ -341,11 +375,9 @@ .list-group-item { transition: max-height @transition-params; padding: 5px 15px; - min-height: 42px; &:hover, &:focus { - .fa-arrows-alt, - .gn-layer-ordering { + .fa-arrows-alt { visibility: visible; } } @@ -354,12 +386,16 @@ margin-bottom: 0; } &:focus-within { - .fa-arrows-alt, - .gn-layer-ordering { + .fa-arrows-alt { visibility: visible; } } } + .gn-facet-container { + .list-group-item { + border: 0; + } + } .gn-baselayer-switcher-menu { list-style: none; .list-group-item { @@ -418,10 +454,11 @@ } } h4 { - margin: 1em 0 0.5em 0; + margin: 1em 0 0 0; + font-size: 16px; } h5 { - margin: 0.2em 0; + margin: 0.7em 0; } [data-gn-layer-dimensions] { overflow: unset !important; @@ -436,28 +473,6 @@ background-color: @gray-lighter; border-color: @list-group-border; } - .gn-layer-ordering { - margin-right: -10px; - position: absolute; - right: 15px; - .fa { - padding: 0 8px; - } - &.btn-group-xs { - .btn { - white-space: nowrap; - padding: 4px; - opacity: 1; - &[disabled] { - color: @btn-link-disabled-color; - border-color: @input-bg-disabled; - } - .caret { - margin-right: 3px; - } - } - } - } .dropdown { .dropdown-menu { min-width: 16em; @@ -841,11 +856,48 @@ gn-features-tables, .tab-content { background: white; min-height: 5em; - padding: 1em; + padding: 0 15px; } .gn-features-table { - padding: 0.25em; - box-shadow: 0px 0px 4px 2px rgba(0, 0, 0, 0.1); + box-shadow: 0 0 4px 2px rgba(0, 0, 0, 0.1); + .bootstrap-table { + display: grid; + grid-template-columns: auto 0fr; + grid-template-rows: auto; + grid-template-areas: + "main toolbar" + "footer toolbar"; + .fixed-table-toolbar { + grid-area: toolbar; + .columns-right { + margin: 0 0 0 10px; + display: inline-block; + vertical-align: middle; + .btn, + .btn-group { + display: block; + float: none; + width: 100%; + max-width: 100%; + margin-top: -1px; + margin-left: 0; + border-radius: 0; + } + .btn:first-child:not(:last-child) { + border-radius: 4px 4px 0 0; + } + .btn-group:last-child:not(:first-child) > .btn:first-child { + border-radius: 0 0 4px 4px; + } + } + } + .fixed-table-container { + grid-area: main; + } + .fixed-table-pagination { + grid-area: footer; + } + } } .layername { display: inline-block; @@ -878,6 +930,11 @@ gn-features-tables, } .gn-md-view { + .tab-content { + background: white; + min-height: 5em; + padding: 0 0 15px 0; + } gn-features-tables { position: unset; .gn-features-table { @@ -891,6 +948,32 @@ gn-features-tables, [data-gn-wfs-filter-facets] .gn-facet-container { overflow: auto; max-height: 550px; + .list-group { + margin-bottom: 0; + .list-group-item { + border: 0; + padding: 8px 15px; + } + } + } +} +.gn-editor-sidebar { + .gn-related-resources { + p { + margin-top: 5px; + } + } + .gn-related-item { + h4 { + font-size: 14px; + } + } + .wfs-filter-group { + margin-top: 10px; + margin-bottom: 0; + .btn { + .btn-xs(); + } } } diff --git a/web-ui/src/main/resources/catalog/templates/admin/harvest/type/webdav.html b/web-ui/src/main/resources/catalog/templates/admin/harvest/type/webdav.html index 8057edba0338..77ea9d96c9c3 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/harvest/type/webdav.html +++ b/web-ui/src/main/resources/catalog/templates/admin/harvest/type/webdav.html @@ -98,6 +98,21 @@
filteringAndProcessing +
+ +
+

geonetwork-xslfilterHelp

+
+
diff --git a/web-ui/src/main/resources/catalog/templates/admin/settings/doiservers.html b/web-ui/src/main/resources/catalog/templates/admin/settings/doiservers.html new file mode 100644 index 000000000000..6b6628751ca8 --- /dev/null +++ b/web-ui/src/main/resources/catalog/templates/admin/settings/doiservers.html @@ -0,0 +1,488 @@ +
+
+
+
doiservers
+
+ + + + + + +
+
+ +
+
+ +
+
+
+ updateDoiServer + newDoiServer + {{doiServerSelected.name}} +
+ + + +
+
+
+
+ + + +
+ + +
+ + +
+

fieldRequired

+
+
+
+ +
+ + +
+ +
+
+ +
+ + +
+ + +
+

fieldRequired

+
+
+ +
+

doiserver-url-help

+
+
+ +
+ + +
+ + +
+

fieldRequired

+
+
+
+ +
+
+ + +
+ + +
+

fieldRequired

+
+
+ +
+

doiserver-password-help

+
+
+
+ +
+ + +
+ + +
+

fieldRequired

+
+
+ +
+

+ doiserver-landingPageTemplate-help +

+
+
+ +
+ + +
+ +
+ +
+

doiserver-publicUrl-help

+
+
+ +
+ + +
+ + +
+

fieldRequired

+
+
+ +
+

doiserver-pattern-help

+
+
+ +
+ + +
+ + +
+

fieldRequired

+
+
+ +
+

doiserver-prefix-help

+
+
+ +
+ +
+
+
+ +
+

doiserver-recordGroups-help

+
+
+
+
+
+
+ + + +
+

confirmDoiServerDelete

+
+
diff --git a/web-ui/src/main/resources/catalog/templates/admin/settings/mapservers.html b/web-ui/src/main/resources/catalog/templates/admin/settings/mapservers.html index 62ee52e5ee7a..671b60bbcc96 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/settings/mapservers.html +++ b/web-ui/src/main/resources/catalog/templates/admin/settings/mapservers.html @@ -51,6 +51,7 @@
-
- useAccount - -
- - -
- -
-
- -
- - -
- -
-
-
-
@@ -222,7 +192,7 @@ class="form-control" required="" data-ng-model="mapserverSelected.wmsurl" - placeholder="http://" + placeholder="http://localhost/geoserer/wms" />
@@ -237,7 +207,7 @@ class="form-control" required="" data-ng-model="mapserverSelected.wfsurl" - placeholder="http://" + placeholder="http://localhost/geoserver/wfs" />
@@ -252,7 +222,7 @@ class="form-control" required="" data-ng-model="mapserverSelected.wcsurl" - placeholder="http://" + placeholder="http://localhost/geoserver/wcs" />
diff --git a/web-ui/src/main/resources/catalog/templates/admin/settings/sources.html b/web-ui/src/main/resources/catalog/templates/admin/settings/sources.html index a0315d50b0f1..56edf2a2bb63 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/settings/sources.html +++ b/web-ui/src/main/resources/catalog/templates/admin/settings/sources.html @@ -50,6 +50,7 @@ {{::s.name}} @@ -169,6 +170,20 @@

+ + + + + +
{{key | translate}} + +
+

sourceFilter-help

@@ -177,130 +192,116 @@ displayInHeaderSwitcher

displayInHeaderSwitcher-help

- -
- + + +

sourceUiConfig-help

-
-
- -
-
- - - -
+
+ + +
+ +

+ {{'system/csw/capabilityRecordUuid-help' | translate}} +

- -
-
- selectExistingLogo -
-
-
-
+
+ +
+ +

subPortalGroupOwnerHelp

- -
- -
-
- addNewLogo +
+ + +
+
+
-
-
-
upload
-
- - - chooseLogos - - -
    -
  • -
    - {{file.name}} ({{file.type}} / {{file.size | formatFileSize}}) - -
  • -
-
-
+
- - -

sourceLogo-help

-
- - - -

sourceUiConfig-help

- -
- - -
+
+ -

- {{'system/csw/capabilityRecordUuid-help' | translate}} -

+ +
+
+ selectExistingLogo +
+
+
+
-
- -
+ +
+ +
+
+ addNewLogo +
+
+
+
upload
+
+ + + chooseLogos + + +
    +
  • +
    + {{file.name}} ({{file.type}} / {{file.size | formatFileSize}}) + +
  • +
+
+
+
+
+
-

subPortalGroupOwnerHelp

-
+

sourceLogo-help

- - - - - - -
{{key | translate}} - -
diff --git a/web-ui/src/main/resources/catalog/templates/admin/settings/static-pages.html b/web-ui/src/main/resources/catalog/templates/admin/settings/static-pages.html index 90eec4c04494..7e469580a40b 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/settings/static-pages.html +++ b/web-ui/src/main/resources/catalog/templates/admin/settings/static-pages.html @@ -306,15 +306,36 @@ class="form-control" required="" data-ng-model="staticPageSelected.status" + data-ng-change="updateGroupSelection()" > +
+ +
+ + +
+ +
+
diff --git a/web-ui/src/main/resources/catalog/templates/admin/settings/system.html b/web-ui/src/main/resources/catalog/templates/admin/settings/system.html index 6adfb7f93a15..cb22bbd21c22 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/settings/system.html +++ b/web-ui/src/main/resources/catalog/templates/admin/settings/system.html @@ -397,7 +397,6 @@

{{section2.name | translate}}

> {{s.value}} - -
+
-
+
-
+
@@ -104,7 +106,10 @@ data-ng-show="uiConfiguration.configuration !== undefined" >
-

ui {{uiConfiguration.id}}

+

+ ui + {{uiConfiguration.id}} +

diff --git a/web-ui/src/main/resources/catalog/templates/admin/usergroup/groups.html b/web-ui/src/main/resources/catalog/templates/admin/usergroup/groups.html index 61741b2208d5..602b99728a88 100644 --- a/web-ui/src/main/resources/catalog/templates/admin/usergroup/groups.html +++ b/web-ui/src/main/resources/catalog/templates/admin/usergroup/groups.html @@ -259,6 +259,24 @@
+
+ + +

minimumProfileForPrivilegesHelp

+
+
+ +
+
+   + userHistory +
+
+
+
+
+
0; }, isApplicable: function (md) { // TODO: Would be good to return why a task is not applicable as tooltip @@ -265,6 +288,14 @@ scope.$watch(attrs.gnMdActionsMenu, function (a) { scope.md = a; + + if (scope.md) { + $http + .get("../api/doiservers/metadata/" + scope.md.id) + .then(function (response) { + scope.doiServers = response.data; + }); + } }); scope.getScope = function () { diff --git a/web-ui/src/main/resources/catalog/views/default/directives/partials/applicationBanner.html b/web-ui/src/main/resources/catalog/views/default/directives/partials/applicationBanner.html new file mode 100644 index 000000000000..06dbba219ad6 --- /dev/null +++ b/web-ui/src/main/resources/catalog/views/default/directives/partials/applicationBanner.html @@ -0,0 +1,3 @@ +
+ application-banner +
diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_admin_default.less b/web-ui/src/main/resources/catalog/views/default/less/gn_admin_default.less index 461ec2fa5c13..c9b868d0b27b 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_admin_default.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_admin_default.less @@ -515,6 +515,16 @@ ul.gn-resultview li.list-group-item { margin-bottom: 10px; border-color: #ccc; } + .col-lg-4, .col-md-6 { + @media (max-width: @screen-sm-max) { + padding-right: 0 !important; + } + } + .col-md-12, #gn-uiconfig-customize { + padding-right: 0 !important; + margin-bottom: @gn-spacing; + } + // checkbox (Bootstrap 5.2) input[type="checkbox"], input[type="radio"] { @@ -656,7 +666,16 @@ ul.gn-resultview li.list-group-item { z-index: 901; } #gn-uiconfig-customize { - margin-top: -85px; + + @media (max-width: @screen-md-max) { + padding-left: 0 !important; + } + + @media (min-width: @screen-lg-min) { + margin-top: -85px; + } + + .dropdown-menu { padding: 0 !important; li { diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_editor_default.less b/web-ui/src/main/resources/catalog/views/default/less/gn_editor_default.less index 79345fda1afe..f36b116924f7 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_editor_default.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_editor_default.less @@ -689,3 +689,20 @@ form.gn-editor.gn-indent-bluescale { content: ""; } } + +// Mock dropdown for typeahead +[data-show-hints-on-focus="true"]:not(.tt-hint) { + background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAMCAYAAABSgIzaAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNSBNYWNpbnRvc2giIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6NDZFNDEwNjlGNzFEMTFFMkJEQ0VDRTM1N0RCMzMyMkIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6NDZFNDEwNkFGNzFEMTFFMkJEQ0VDRTM1N0RCMzMyMkIiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo0NkU0MTA2N0Y3MUQxMUUyQkRDRUNFMzU3REIzMzIyQiIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo0NkU0MTA2OEY3MUQxMUUyQkRDRUNFMzU3REIzMzIyQiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuGsgwQAAAA5SURBVHjaYvz//z8DOYCJgUxAf42MQIzTk0D/M+KzkRGPoQSdykiKJrBGpOhgJFYTWNEIiEeAAAMAzNENEOH+do8AAAAASUVORK5CYII=); + background-repeat: no-repeat; + background-position: right; + padding-right: 1.5em; +} + +// Move mock dropdown caret in multilingual mode +.gn-multilingual-field { + div:not(:has(.tt-input[data-show-hints-on-focus="true"].hidden)) { + .tt-input[data-show-hints-on-focus="true"] { + background-position-y: 90%; + } + } +} diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_map_default.less b/web-ui/src/main/resources/catalog/views/default/less/gn_map_default.less index 3fff351c769c..0b44f6fcd49d 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_map_default.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_map_default.less @@ -97,7 +97,7 @@ margin: 0; padding: 0; position: absolute; - width: 16.5em; + width: 24.5em; // value = maptools panel width - label width - panel tools opener button width (34em - 7em - 2.5em) border-radius: 0; li { width: calc(~"100% -30px"); diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_print_default.less b/web-ui/src/main/resources/catalog/views/default/less/gn_print_default.less index 52b5c123b9f5..01eecbe8eef2 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_print_default.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_print_default.less @@ -1,4 +1,6 @@ @import "../../../lib/style/bootstrap/less/variables.less"; +@import "../../../lib/style/bootstrap/less/mixins/grid.less"; +@import "gn_view.less"; // special print classes .gn-new-page { @@ -105,6 +107,26 @@ a[href]:after { svg { max-width: 100px; } + [gn-popover-content] { + .make-md-column-offset(3); + padding: 0 @gn-spacing-lg; + display: block !important; + label, + a { + display: block; + margin-bottom: @gn-spacing; + } + [data-label="focusOnFrom"] { + display: none; + } + .fa { + display: none; + } + [data-ng-href] { + margin-bottom: @gn-spacing; + display: inline-block; + } + } } .gn-md-side-crs { a { diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_result_default.less b/web-ui/src/main/resources/catalog/views/default/less/gn_result_default.less index 4e26b7506c46..df97e05a5178 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_result_default.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_result_default.less @@ -1,4 +1,5 @@ @import "../../../style/gn_search.less"; +@import "../../../style/gn_variables.less"; // variables for manipulating the theme @import "gn_variables_default.less"; // must be last @@ -97,6 +98,39 @@ background-color: @brand-danger !important; } } + // address card + .panel-address { + padding-bottom: 0; + box-shadow: none; + border: 1px solid @panel-default-border; + .panel-heading { + padding: @gn-spacing-lg; + border-bottom: 1px solid @panel-default-border; + background-color: @panel-default-heading-bg; + height: auto; + line-height: normal; + h3 { + margin: 0; + padding: 0; + font-size: 14px; + font-weight: bold; + } + } + .panel-body { + padding: 0; + address { + padding: @gn-spacing-lg; + + border-radius: @panel-border-radius; + margin: 0; + label { + min-width: 20%; + margin-right: @gn-spacing; + } + } + } + + } // card .gn-card-view-header { padding: 15px; @@ -147,7 +181,7 @@ .img-thumbnail { padding: 0; border: none; - max-height: 300px; + max-height: 500px; min-height: 150px; max-width: 100%; } @@ -258,14 +292,20 @@ display: inline; line-break: auto; word-break: break-word; + letter-spacing: -1em; + * { + letter-spacing: normal; + } a { line-break: normal; } &:after { content: ", "; + letter-spacing: normal; } &:last-child:after { content: ""; + letter-spacing: normal; } } } @@ -325,6 +365,9 @@ } } // keywords + .gn-thesaurus:not(:has(button)) { + display: none !important; + } [data-gn-keyword-badges] { .btn { word-break: break-word; diff --git a/web-ui/src/main/resources/catalog/views/default/less/gn_view.less b/web-ui/src/main/resources/catalog/views/default/less/gn_view.less index 7d2cbf3c9cc9..65696fb2894b 100644 --- a/web-ui/src/main/resources/catalog/views/default/less/gn_view.less +++ b/web-ui/src/main/resources/catalog/views/default/less/gn_view.less @@ -1,13 +1,10 @@ @import "../../../lib/style/bootstrap/less/variables.less"; +@import "../../../style/gn_variables.less"; /* Defined here any custom style for the view which has to be applied for all apps (ie. admin, search, login, editor). */ -@gn-spacing: 10px; -@gn-spacing-sm: 5px; -@gn-spacing-lg: 15px; - // padding .gn-padding-top { padding-top: @gn-spacing !important; diff --git a/web-ui/src/main/resources/catalog/views/default/module.js b/web-ui/src/main/resources/catalog/views/default/module.js index a3e72354245c..380d6b1ed5dc 100644 --- a/web-ui/src/main/resources/catalog/views/default/module.js +++ b/web-ui/src/main/resources/catalog/views/default/module.js @@ -95,7 +95,7 @@ filters: [ { query_string: { - query: '+resourceType:"map/interactive"' + query: '+resourceType:"map-interactive"' } } ], @@ -412,7 +412,7 @@ msg: $translate.instant("layerProtocolNotSupported", { type: link.protocol }), - delay: 20000, + delay: 20, type: "warning" }); return; @@ -536,7 +536,7 @@ setActiveTab(); $scope.$on("$locationChangeSuccess", setActiveTab); - $scope.$on("$locationChangeSuccess", function (next, current) { + $scope.$on("$locationChangeSuccess", function (event, next, current) { if ( gnSearchLocation.isSearch() && (!angular.isArray(searchMap.getSize()) || searchMap.getSize()[0] < 0) @@ -545,6 +545,15 @@ searchMap.updateSize(); }, 0); } + + // Changing from the map to search pages, hide alerts + var currentUrlHash = + current.indexOf("#") > -1 ? current.slice(current.indexOf("#") + 1) : ""; + if (gnSearchLocation.isMap(currentUrlHash)) { + setTimeout(function () { + gnAlertService.closeAlerts(); + }, 0); + } }); var sortConfig = gnSearchSettings.sortBy.split("#"); diff --git a/web-ui/src/main/resources/catalog/views/default/templates/home.html b/web-ui/src/main/resources/catalog/views/default/templates/home.html index c6aa22348aad..5893def2802b 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/home.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/home.html @@ -119,11 +119,15 @@

topMaps

>
-
+

- browseBy + browseBy + + {{::getFacetLabel(searchInfo.aggregations[homeFacet.list[0]], 'facet-' + + homeFacet.list[0])}} +

-
+
@@ -154,7 +158,8 @@

- {{('facet-' + homeFacet.lastKey) | facetKeyTranslator}} + {{getFacetLabel(searchInfo.aggregations[homeFacet.lastKey], 'facet-' + + homeFacet.lastKey)}}

diff --git a/web-ui/src/main/resources/catalog/views/default/templates/index.html b/web-ui/src/main/resources/catalog/views/default/templates/index.html index 326f7e809d00..03af24598242 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/index.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/index.html @@ -38,6 +38,8 @@ xmlns="http://www.w3.org/1999/html" > + +
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/featurecatalogue.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/featurecatalogue.html index 73cf1f7aa285..c9cdc743f88c 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/featurecatalogue.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/featurecatalogue.html @@ -49,7 +49,7 @@

featureAliases - {{featureType.aliases}} + {{featureType.aliases.join(", ")}} lineage

-
+

sourceDescription

-

+ +

diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/maintenance.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/maintenance.html new file mode 100644 index 000000000000..a2672e5df091 --- /dev/null +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/maintenance.html @@ -0,0 +1,38 @@ +
+
+ + + +
+

updateFrequency

+

{{maintenance.frequency | translate}}

+
+
+
+ + + +
+

maintenanceNote

+

{{maintenance.noteObject.default}}

+
+
+
+ + + +
+

nextUpdateDate

+

{{maintenance.nextUpdateDate}}

+
+
+
+ + + +
+

userDefinedFrequency

+

+
+
+
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/metadata.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/metadata.html index 2823fdaf407a..92c79a4dd8a2 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/metadata.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/metadata.html @@ -20,7 +20,9 @@

updatedOn

metadataLanguage

    -
  • {{l | translate}}
  • +
  • + {{l | translate}} +
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/recordView.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/recordView.html index 846ef56531af..091570e1dc58 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/recordView.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/recordView.html @@ -35,10 +35,20 @@
- recordNotFound + + recordNotFound + + + trySignIn +
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/spatial.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/spatial.html index b99638e5c766..8f75ae1d8d5e 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/spatial.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/spatial.html @@ -35,7 +35,7 @@

scale

data-ng-repeat="d in mdView.current.record.resolutionScaleDenominator" class="gn-scale" > - {{d}} + {{d}}
@@ -51,7 +51,9 @@

scale

resolution

    -
  • {{r}}
  • +
  • + {{r}} +
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/technical.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/technical.html index a6bfc157cba9..0fdc21a87378 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/technical.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/technical.html @@ -22,6 +22,10 @@

{{date.type | translate}}

+
+
@@ -49,21 +53,6 @@

cl_couplingType

{{c.default}}

- -
- - - -
-

updateFrequency

-

- {{c.default}} -

-
-
@@ -73,7 +62,7 @@

updateFrequency

&& viewConfig.internalThesaurus && viewConfig.internalThesaurus.indexOf(key) !== -1) || highlightedThesaurus.indexOf(key) === -1" - class="gn-margin-bottom flex-row" + class="gn-margin-bottom flex-row gn-thesaurus" > @@ -133,11 +122,8 @@

resourceEdition

language

    -
  • - {{l}} +
  • + {{l}}
diff --git a/web-ui/src/main/resources/catalog/views/default/templates/recordView/thumbnails.html b/web-ui/src/main/resources/catalog/views/default/templates/recordView/thumbnails.html index 87fab96123fc..6338bf0f4d5f 100644 --- a/web-ui/src/main/resources/catalog/views/default/templates/recordView/thumbnails.html +++ b/web-ui/src/main/resources/catalog/views/default/templates/recordView/thumbnails.html @@ -1,13 +1,22 @@
  • {{'overview' | translate}} -

    {{img.name}}

    + {{'overview' | translate}}
diff --git a/web/pom.xml b/web/pom.xml index 960e20c07729..cf4d1abe4e25 100644 --- a/web/pom.xml +++ b/web/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.6-SNAPSHOT + 4.4.7-SNAPSHOT @@ -417,6 +417,12 @@ ${project.version} + + ${project.groupId} + gn-auditable + ${project.version} + + dlib diff --git a/web/src/main/java/org/fao/geonet/proxy/URITemplateProxyServlet.java b/web/src/main/java/org/fao/geonet/proxy/URITemplateProxyServlet.java index b9a78fccdf18..ecc0bb8a06be 100644 --- a/web/src/main/java/org/fao/geonet/proxy/URITemplateProxyServlet.java +++ b/web/src/main/java/org/fao/geonet/proxy/URITemplateProxyServlet.java @@ -72,6 +72,8 @@ import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; +import static org.apache.commons.lang3.StringUtils.isBlank; + /** * This is a class extending the real proxy to make sure we can tweak specifics like removing the CSRF token on requests * @@ -95,12 +97,16 @@ public class URITemplateProxyServlet extends ProxyServlet { private static final long serialVersionUID = 4847856943273604410L; private static final String P_SECURITY_MODE = "securityMode"; private static final String P_IS_SECURED = "isSecured"; + private static final String P_DISALLOW_HEADERS = "disallowHeaders"; + private static final String TARGET_URI_NAME = "targetUri"; private static final String P_EXCLUDE_HOSTS = "excludeHosts"; private static final String P_ALLOW_PORTS = "allowPorts"; private static final String ATTR_QUERY_STRING = URITemplateProxyServlet.class.getSimpleName() + ".queryString"; + protected List disallowHeaders = new ArrayList<>(); + /* * These are the "hop-by-hop" headers that should not be copied. * http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html Overriding @@ -133,6 +139,15 @@ public class URITemplateProxyServlet extends ProxyServlet { // Allowed ports allowed to access through the proxy private Set allowPorts = new HashSet<>(Arrays.asList(80, 443)); + @Override + protected void copyRequestHeader(HttpServletRequest servletRequest, HttpRequest proxyRequest, + String headerName) { + if (disallowHeaders.contains(headerName)) { + return; // dont copy + } + super.copyRequestHeader(servletRequest,proxyRequest,headerName); + } + /** * Init some properties from the servlet's init parameters. They try to be resolved the same way other GeoNetwork * configuration properties are resolved. If after checking externally no configuration can be found it relies into @@ -159,6 +174,11 @@ public class URITemplateProxyServlet extends ProxyServlet { */ @Override protected void initTarget() throws ServletException { + //parse the disallowHeaders + if (!isBlank(getConfigParam(P_DISALLOW_HEADERS))) { + disallowHeaders = Arrays.asList(getConfigParam(P_DISALLOW_HEADERS).split(",")); + } + securityMode = SECURITY_MODE.parse(getConfigParam(P_SECURITY_MODE)); String doForwardHostString = getConfigParam(P_FORWARDEDHOST); if (doForwardHostString != null) { @@ -173,7 +193,7 @@ protected void initTarget() throws ServletException { targetUriTemplate = getConfigValue(TARGET_URI_NAME); // If not set externally try to use the value from web.xml - if (StringUtils.isBlank(targetUriTemplate)) { + if (isBlank(targetUriTemplate)) { targetUriTemplate = getConfigParam(P_TARGET_URI); if (targetUriTemplate == null) { throw new ServletException(P_TARGET_URI + " is required in web.xml or set externally"); @@ -185,7 +205,7 @@ protected void initTarget() throws ServletException { this.username = getConfigValue("username"); this.password = getConfigValue("password"); - if (StringUtils.isBlank(this.username)) { + if (isBlank(this.username)) { this.username = getConfigParam("username"); this.password = getConfigParam("password"); } @@ -196,7 +216,7 @@ protected void initTarget() throws ServletException { } String excludeHosts = getConfigValue(P_EXCLUDE_HOSTS); - if (StringUtils.isBlank(excludeHosts)) { + if (isBlank(excludeHosts)) { excludeHosts = getConfigParam(P_EXCLUDE_HOSTS); } @@ -209,7 +229,7 @@ protected void initTarget() throws ServletException { } String additionalAllowPorts = getConfigValue(P_ALLOW_PORTS); - if (StringUtils.isBlank(additionalAllowPorts)) { + if (isBlank(additionalAllowPorts)) { additionalAllowPorts = getConfigParam(P_ALLOW_PORTS); } @@ -236,7 +256,7 @@ private String getConfigValue(String suffix) { result = resolveConfigValue(webappName + "." + getServletName() + "." + suffix); - if (StringUtils.isBlank(result)) { + if (isBlank(result)) { // GEONETWORK is the default prefix LOGGER.info( @@ -464,7 +484,7 @@ protected void service(HttpServletRequest servletRequest, HttpServletResponse se private boolean isUrlAllowed(HttpServletRequest servletRequest) { String url = servletRequest.getParameter("url"); - if (StringUtils.isBlank(url)) { + if (isBlank(url)) { return true; } diff --git a/web/src/main/webResources/WEB-INF/config-db/database_migration.xml b/web/src/main/webResources/WEB-INF/config-db/database_migration.xml index 0352f3a49830..aa87fe9adcb1 100644 --- a/web/src/main/webResources/WEB-INF/config-db/database_migration.xml +++ b/web/src/main/webResources/WEB-INF/config-db/database_migration.xml @@ -391,6 +391,7 @@ + java:v445.DoiServerDatabaseMigration WEB-INF/classes/setup/sql/migrate/v445/migrate- @@ -399,5 +400,10 @@ WEB-INF/classes/setup/sql/migrate/v446/migrate- + + + WEB-INF/classes/setup/sql/migrate/v447/migrate- + + diff --git a/web/src/main/webResources/WEB-INF/config.properties b/web/src/main/webResources/WEB-INF/config.properties index 6a623b39a774..6704931e4734 100644 --- a/web/src/main/webResources/WEB-INF/config.properties +++ b/web/src/main/webResources/WEB-INF/config.properties @@ -76,4 +76,5 @@ analytics.web.jscode= #analytics.web.service=matomo #analytics.web.jscode=var _paq = _paq || [];_paq.push(['trackPageView']);_paq.push(['enableLinkTracking']);(function() {var u="//localhost/";_paq.push(['setTrackerUrl', u+'piwik.php']);_paq.push(['setSiteId', '1']);var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];g.type='text/javascript'; g.async=true; g.defer=true; g.src=u+'piwik.js'; s.parentNode.insertBefore(g,s);})();var currentUrl = location.href; window.addEventListener('hashchange', function() {_paq.push(['setReferrerUrl', currentUrl]);currentUrl = window.location.href;_paq.push(['setCustomUrl', currentUrl]);_paq.push(['setDocumentTitle', currentUrl]);_paq.push(['deleteCustomVariables', 'page']);_paq.push(['trackPageView']);var content = document.getElementsByTagName('body')[0];_paq.push(['MediaAnalytics::scanForMedia', content]);_paq.push(['FormAnalytics::scanForForms', content]);_paq.push(['trackContentImpressionsWithinNode', content]);_paq.push(['enableLinkTracking']);}); - +# Configure the metadata publication notification mails to be sent as HTML (true) or TEXT (false) +metadata.publicationmail.format.html=true diff --git a/web/src/main/webResources/WEB-INF/data/config/index/records.json b/web/src/main/webResources/WEB-INF/data/config/index/records.json index f52626268668..28febb0667d0 100644 --- a/web/src/main/webResources/WEB-INF/data/config/index/records.json +++ b/web/src/main/webResources/WEB-INF/data/config/index/records.json @@ -1361,7 +1361,6 @@ { "tag": { "match": "th_*", - "match_mapping_type": "object", "mapping": { "properties": { "default": { diff --git a/web/src/main/webResources/WEB-INF/web.xml b/web/src/main/webResources/WEB-INF/web.xml index d75022d9890d..cc6ce33f60bf 100644 --- a/web/src/main/webResources/WEB-INF/web.xml +++ b/web/src/main/webResources/WEB-INF/web.xml @@ -404,6 +404,15 @@ log false + + disallowHeaders + + + gn5.to.gn4.trusted.json.auth + http.protocol.handle-redirects true diff --git a/web/src/main/webapp/WEB-INF/classes/ESAPI.properties b/web/src/main/webapp/WEB-INF/classes/ESAPI.properties index 68f7366c7116..d943542e9b4a 100644 --- a/web/src/main/webapp/WEB-INF/classes/ESAPI.properties +++ b/web/src/main/webapp/WEB-INF/classes/ESAPI.properties @@ -68,11 +68,12 @@ ESAPI.HTTPUtilities=org.owasp.esapi.reference.DefaultHTTPUtilities ESAPI.IntrusionDetector=org.owasp.esapi.reference.DefaultIntrusionDetector # Log4JFactory Requires log4j.xml or log4j.properties in classpath - http://www.laliluna.de/log4j-tutorial.html # Note that this is now considered deprecated! -ESAPI.Logger=org.owasp.esapi.logging.log4j.Log4JLogFactory +#ESAPI.Logger=org.owasp.esapi.logging.log4j.Log4JLogFactory #ESAPI.Logger=org.owasp.esapi.logging.java.JavaLogFactory # To use the new SLF4J logger in ESAPI (see GitHub issue #129), set # ESAPI.Logger=org.owasp.esapi.logging.slf4j.Slf4JLogFactory # and do whatever other normal SLF4J configuration that you normally would do for your application. +ESAPI.Logger=org.owasp.esapi.logging.slf4j.Slf4JLogFactory ESAPI.Randomizer=org.owasp.esapi.reference.DefaultRandomizer ESAPI.Validator=org.owasp.esapi.reference.DefaultValidator diff --git a/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages.properties b/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages.properties index 10b43f9b2259..accb35b96327 100644 --- a/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages.properties +++ b/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages.properties @@ -54,8 +54,7 @@ user_password_changed='%s' password was updated. user_password_notchanged=A problem occurred trying to change '%s' password. Contact the helpdesk. user_password_invalid_changekey='%s' is an invalid change key for '%s'. Change keys are only valid for one day. user_registered=User '%s' registered. -user_with_that_email_found=A user with this email or username already exists. -user_with_that_username_found=A user with this email or username already exists. +user_with_that_email_username_found=A user with this email or username already exists. register_email_admin_subject=%s / New account for %s as %s register_email_admin_message=Dear Admin,\n\ Newly registered user %s has requested %s access for %s.\n\ @@ -182,6 +181,9 @@ api.exception.resourceAlreadyExists=Resource already exists api.exception.resourceAlreadyExists.description=Resource already exists. api.exception.unsatisfiedRequestParameter=Unsatisfied request parameter api.exception.unsatisfiedRequestParameter.description=Unsatisfied request parameter. +exception.maxUploadSizeExceeded=Maximum upload size of {0} exceeded. +exception.maxUploadSizeExceeded.description=The request was rejected because its size ({0}) exceeds the configured maximum ({1}). +exception.maxUploadSizeExceededUnknownSize.description=The request was rejected because its size exceeds the configured maximum ({0}). exception.resourceNotFound.metadata=Metadata not found exception.resourceNotFound.metadata.description=Metadata with UUID ''{0}'' not found. exception.resourceNotFound.resource=Metadata resource ''{0}'' not found @@ -218,8 +220,10 @@ exception.doi.serverErrorDelete=Error deleting DOI exception.doi.serverErrorDelete.description=Error deleting DOI: {0} exception.doi.serverErrorUnregister=Error unregistering DOI exception.doi.serverErrorUnregister.description=Error unregistering DOI: {0} -exception.doi.configurationMissing=DOI configuration is not complete -exception.doi.configurationMissing.description=DOI configuration is not complete. Check System Configuration and set the DOI configuration. +exception.doi.serverCanNotHandleRecord=DOI server can not handle the metadata +exception.doi.serverCanNotHandleRecord.description=DOI server ''{0}'' can not handle the metadata with UUID ''{1}'' +exception.doi.configurationMissing=DOI server configuration is not complete +exception.doi.configurationMissing.description=DOI server configuration is not complete. Check the DOI server configuration to complete it exception.doi.notSupportedOperationError=Operation not supported exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Metadata imported with ID '%s' @@ -250,3 +254,9 @@ api.metadata.status.errorGetStatusNotAllowed=Only the owner of the metadata can api.metadata.status.errorSetStatusNotAllowed=Only the owner of the metadata can set the status of this record. User is not the owner of the metadata. feedback_subject_userFeedback=User feedback + +audit.revision=Updated by %s on %s:\n\ +%s +audit.revision.field.set=- Field '%s' set to '%s' +audit.revision.field.unset=- Field '%s' unset +audit.revision.field.updated=- Field '%s' changed from '%s' to '%s' diff --git a/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages_fre.properties b/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages_fre.properties index d78c5cb26ec7..a52fec3dc978 100644 --- a/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages_fre.properties +++ b/web/src/main/webapp/WEB-INF/classes/org/fao/geonet/api/Messages_fre.properties @@ -53,8 +53,8 @@ user_password_sent=Si l''utilisateur existe, vous recevrez un courriel contenant user_password_changed=Le mot de passe de %s a \u00E9t\u00E9 mis \u00E0 jour. user_password_notchanged=\u00C9chec lors du changement de mot de passe de %s. Contactez le support. user_password_invalid_changekey=%s est une cl\u00E9 invalide pour %s. Les cl\u00E9s ne sont valides que pendant une journ\u00E9e. -user_with_that_email_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. -user_with_that_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. +user_registered=Utilisateur '%s' enregistr\u00E9. +user_with_that_email_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. register_email_admin_subject=%s / Cr\u00E9ation de compte pour %s en tant que %s register_email_admin_message=Cher administrateur,\n\ L'utilisateur %s vient de demander une cr\u00E9ation de compte pour %s.\n\ @@ -176,6 +176,9 @@ api.exception.resourceAlreadyExists=La ressource existe d\u00E9j\u00E0 api.exception.resourceAlreadyExists.description=La ressource existe d\u00E9j\u00E0. api.exception.unsatisfiedRequestParameter=Param\u00E8tre de demande non satisfait api.exception.unsatisfiedRequestParameter.description=Param\u00E8tre de demande non satisfait. +exception.maxUploadSizeExceeded=La taille maximale du t\u00E9l\u00E9chargement de {0} a \u00E9t\u00E9 exc\u00E9d\u00E9e. +exception.maxUploadSizeExceeded.description=La demande a \u00E9t\u00E9 refus\u00E9e car sa taille ({0}) exc\u00E8de le maximum configur\u00E9 ({1}). +exception.maxUploadSizeExceededUnknownSize.description=La demande a \u00E9t\u00E9 refus\u00E9e car sa taille exc\u00E8de le maximum configur\u00E9 ({0}). exception.resourceNotFound.metadata=Fiches introuvables exception.resourceNotFound.metadata.description=La fiche ''{0}'' est introuvable. exception.resourceNotFound.resource=Ressource ''{0}'' introuvable @@ -200,17 +203,21 @@ exception.doi.recordNotConformantMissingInfo=La fiche n''est pas conforme au for exception.doi.recordNotConformantMissingInfo.description=La fiche ''{0}'' n''est pas conforme au format DataCite. {1} champ(s) obligatoire(s) manquant(s). {2} exception.doi.recordNotConformantMissingMandatory=La fiche n''est pas conforme aux r\u00E8gles de validation DataCite pour les champs obligatoires exception.doi.recordNotConformantMissingMandatory.description=La fiche ''{0}'' n''est pas conforme aux r\u00E8gles de validation DataCite pour les champs obligatoires. L''erreur est: {1}. Les champs obligatoires dans DataCite sont : identifiant, cr\u00E9ateurs, titres, \u00E9diteur, publicationYear, resourceType. V\u00E9rifiez la sortie au format DataCite et adaptez le contenu de la fiche pour ajouter les informations manquantes. -exception.doi.recordInvalid=Le fiche converti n''est pas conforme au format DataCite -exception.doi.recordInvalid.description=Le fiche ''{0}'' converti n''est pas conforme au format DataCite. L''erreur est: {1}. Les champs obligatoires dans DataCite sont : identifiant, cr\u00E9ateurs, titres, \u00E9diteur, ann\u00E9e de publication, type de ressource. V\u00E9rifier la sortie au format DataCite et adapter le contenu de la fiche pour ajouter les informations manquantes. -exception.doi.serverErrorCreate=Error creating DOI -exception.doi.serverErrorCreate.description=Error creating DOI: {0} -exception.doi.serverErrorRetrieve=Error retrieving DOI -exception.doi.serverErrorRetrieve.description=Error retrieving DOI: {0} -exception.doi.serverErrorDelete=Error deleting DOI -exception.doi.serverErrorDelete.description=Error deleting DOI: {0} -exception.doi.serverErrorUnregister=Error unregistering DOI -exception.doi.serverErrorUnregister.description=Error unregistering DOI: {0} -exception.doi.notSupportedOperationError=Operation not supported +exception.doi.recordInvalid=La fiche n''est pas conforme au format DataCite +exception.doi.recordInvalid.description=La fiche ''{0}'' n''est pas conforme au format DataCite. L''erreur est: {1}. Les champs obligatoires dans DataCite sont : identifiant, cr\u00E9ateurs, titres, \u00E9diteur, ann\u00E9e de publication, type de ressource. V\u00E9rifier la sortie au format DataCite et adapter le contenu de la fiche pour ajouter les informations manquantes.\ +exception.doi.serverErrorCreate=Erreur lors de la cr\u00E9ation du DOI +exception.doi.serverErrorCreate.description=Erreur lors de la cr\u00E9ation du DOI : {0} +exception.doi.serverErrorRetrieve=Erreur lors de la r\u00E9cup\u00E9ration du DOI +exception.doi.serverErrorRetrieve.description=Erreur lors de la r\u00E9cup\u00E9ration du DOI : {0} +exception.doi.serverErrorDelete=Erreur lors de la suppression du DOI +exception.doi.serverErrorDelete.description=Erreur lors de la suppression du DOI : {0} +exception.doi.serverErrorUnregister=Erreur lors de la d\u00E9sinscription du DOI +exception.doi.serverErrorUnregister.description=Erreur lors de la d\u00E9sinscription du DOI {0} +exception.doi.serverCanNotHandleRecord=DOI server can not handle the metadata +exception.doi.serverCanNotHandleRecord.description=DOI server ''{0}'' can not handle the metadata with UUID ''{1}'' +exception.doi.configurationMissing=DOI server configuration is not complete +exception.doi.configurationMissing.description=DOI server configuration is not complete. Check the DOI server configuration to complete it +exception.doi.notSupportedOperationError=Op\u00E9ration non prise en charge exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Fiche import\u00E9e avec l'ID '%s' api.metadata.import.importedWithUuid=Fiche import\u00E9e avec l'UUID '%s' @@ -240,3 +247,9 @@ api.metadata.status.errorGetStatusNotAllowed=Seul le propri\u00E9taire des m\u00 api.metadata.status.errorSetStatusNotAllowed=Seul le propri\u00E9taire des m\u00E9tadonn\u00E9es peut d\u00E9finir le statut de cet enregistrement. L'utilisateur n'est pas le propri\u00E9taire des m\u00E9tadonn\u00E9es feedback_subject_userFeedback=Commentaire de l'utilisateur + +audit.revision=Mise \u00E0 jour par %s le %s:\n\ +%s +audit.revision.field.set=- Champ '%s' d\u00E9fini \u00E0 '%s' +audit.revision.field.unset=- Champ '%s' d\u00E9sactiv\u00E9 +audit.revision.field.updated=- Champ '%s' modifi\u00E9 de '%s' \u00E0 '%s' diff --git a/web/src/main/webapp/WEB-INF/classes/setup/sql/data/data-db-default.sql b/web/src/main/webapp/WEB-INF/classes/setup/sql/data/data-db-default.sql index 1545d0e57e4d..d9a267e3d93b 100644 --- a/web/src/main/webapp/WEB-INF/classes/setup/sql/data/data-db-default.sql +++ b/web/src/main/webapp/WEB-INF/classes/setup/sql/data/data-db-default.sql @@ -574,7 +574,7 @@ INSERT INTO Operations (id, name) VALUES (6,'featured'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/site/name', 'My GeoNetwork catalogue', 0, 110, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/site/siteId', '', 0, 120, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/site/organization', 'My organization', 0, 130, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/platform/version', '4.4.6', 0, 150, 'n'); +INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/platform/version', '4.4.7', 0, 150, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/platform/subVersion', 'SNAPSHOT', 0, 160, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/site/svnUuid', '', 0, 170, 'y'); @@ -735,19 +735,18 @@ INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/userSelfRegistration/domainsAllowed', '', 0, 1911, 'y'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doienabled', 'false', 2, 100191, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doiurl', '', 0, 100192, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doiusername', '', 0, 100193, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal, encrypted) VALUES ('system/publication/doi/doipassword', '', 0, 100194, 'y', 'y'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doikey', '', 0, 110095, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doilandingpagetemplate', 'http://localhost:8080/geonetwork/srv/resources/records/{{uuid}}', 0, 100195, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doipublicurl', '', 0, 100196, 'n'); -INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/publication/doi/doipattern', '{{uuid}}', 0, 100197, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/security/passwordEnforcement/minLength', '6', 1, 12000, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/security/passwordEnforcement/maxLength', '20', 1, 12001, 'n'); INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/security/passwordEnforcement/usePattern', 'true', 2, 12002, 'n'); INSERT INTO Settings (name, value, datatype, position, internal, editable) VALUES ('system/security/passwordEnforcement/pattern', '^((?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*(_|[^\w])).*)$', 0, 12003, 'n', 'n'); +INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/banner/enable', 'false', 2, 1920, 'n'); + +INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/auditable/enable', 'false', 2, 12010, 'n'); + + + -- WARNING: Security / Add this settings only if you need to allow admin -- users to be able to reset user password. If you have mail server configured -- user can reset password directly. If not, then you may want to add that settings diff --git a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/DoiServerDatabaseMigration.java b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/DoiServerDatabaseMigration.java new file mode 100644 index 000000000000..b2a80efedc4f --- /dev/null +++ b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/DoiServerDatabaseMigration.java @@ -0,0 +1,150 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package v445; + +import org.fao.geonet.DatabaseMigrationTask; +import org.fao.geonet.constants.Geonet; +import org.fao.geonet.migration.DatabaseMigrationException; +import org.fao.geonet.utils.Log; +import org.springframework.util.StringUtils; + + +import java.sql.*; + +public class DoiServerDatabaseMigration extends DatabaseMigrationTask { + @Override + public void update(Connection connection) throws SQLException, DatabaseMigrationException { + Log.debug(Geonet.DB, "DoiServerDatabaseMigration"); + + boolean doiEnabled = false; + String doiUrl = ""; + String doiUsername = ""; + String doiPassword = ""; + String doiKey = ""; + String doiLandingPageTemplate = ""; + String doiPublicUrl = ""; + String doiPattern = ""; + + try (Statement statement = connection.createStatement()) { + final String selectDoiSerttingsSQL = "SELECT name, value FROM Settings WHERE name LIKE 'system/publication/doi%'"; + + String columnForName = "name"; + String columnForValue = "value"; + + final ResultSet resultSet = statement.executeQuery(selectDoiSerttingsSQL); + while (resultSet.next()) { + if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doienabled")) { + doiEnabled = resultSet.getString(columnForValue).equalsIgnoreCase("true"); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doiurl")) { + doiUrl = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doiusername")) { + doiUsername = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doipassword")) { + doiPassword = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doikey")) { + doiKey = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doilandingpagetemplate")) { + doiLandingPageTemplate = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doipublicurl")) { + doiPublicUrl = resultSet.getString(columnForValue); + } else if (resultSet.getString(columnForName).equalsIgnoreCase("system/publication/doi/doipattern")) { + doiPattern = resultSet.getString(columnForValue); + } + + } + } + + if (doiEnabled) { + + // Check the information is filled + boolean createDoiServer = StringUtils.hasLength(doiUrl) && + StringUtils.hasLength(doiUsername) && + StringUtils.hasLength(doiPassword) && + StringUtils.hasLength(doiKey) && + StringUtils.hasLength(doiPattern); + + if (createDoiServer) { + try (PreparedStatement update = connection.prepareStatement( + "INSERT INTO doiservers " + + "(id, isdefault, landingpagetemplate, name, url, username, password, pattern, prefix, publicurl) " + + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") + ) { + + update.setInt(1, 1); + update.setString(2, "y"); + update.setString(3, doiLandingPageTemplate); + update.setString(4, "Default DOI server"); + update.setString(5, doiUrl); + update.setString(6, doiUsername); + update.setString(7, doiPassword); + update.setString(8, doiPattern); + update.setString(9, doiKey); + update.setString(10, doiPublicUrl); + + update.execute(); + + } catch (java.sql.BatchUpdateException e) { + connection.rollback(); + Log.error(Geonet.GEONETWORK, "Error occurred while creating the DOI server:" + e.getMessage(), e); + SQLException next = e.getNextException(); + while (next != null) { + Log.error(Geonet.GEONETWORK, "Next error: " + next.getMessage(), next); + next = e.getNextException(); + } + + throw new RuntimeException(e); + } catch (Exception e) { + connection.rollback(); + + throw new Error(e); + } + + + try (PreparedStatement delete = connection.prepareStatement( + "DELETE FROM Settings WHERE name LIKE 'system/publication/doi%' and name != 'system/publication/doi/doienabled'") + ) { + delete.execute(); + } catch (java.sql.BatchUpdateException e) { + connection.rollback(); + Log.error(Geonet.GEONETWORK, "Error occurred while creating the DOI server:" + e.getMessage(), e); + SQLException next = e.getNextException(); + while (next != null) { + Log.error(Geonet.GEONETWORK, "Next error: " + next.getMessage(), next); + next = e.getNextException(); + } + + throw new RuntimeException(e); + } catch (Exception e) { + connection.rollback(); + + throw new Error(e); + } + + connection.commit(); + + Log.info(Geonet.DB, "Migration: migrated DOI server"); + } + } + } +} diff --git a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/migrate-default.sql b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/migrate-default.sql index 7f56bfa6c6cc..1fbdff7dbd0f 100644 --- a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/migrate-default.sql +++ b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v445/migrate-default.sql @@ -7,3 +7,4 @@ INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system INSERT INTO Settings (name, value, datatype, position, internal) SELECT distinct 'system/feedback/languages', '', 0, 646, 'n' from settings WHERE NOT EXISTS (SELECT name FROM Settings WHERE name = 'system/feedback/languages'); INSERT INTO Settings (name, value, datatype, position, internal) SELECT distinct 'system/feedback/translationFollowsText', '', 0, 647, 'n' from settings WHERE NOT EXISTS (SELECT name FROM Settings WHERE name = 'system/feedback/translationFollowsText'); + diff --git a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v446/migrate-default.sql b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v446/migrate-default.sql index cdac905504d3..58db297c2f83 100644 --- a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v446/migrate-default.sql +++ b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v446/migrate-default.sql @@ -1,4 +1,4 @@ UPDATE Settings SET value='4.4.6' WHERE name='system/platform/version'; -UPDATE Settings SET value='SNAPSHOT' WHERE name='system/platform/subVersion'; +UPDATE Settings SET value='0' WHERE name='system/platform/subVersion'; INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/userSelfRegistration/domainsAllowed', '', 0, 1911, 'y'); diff --git a/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v447/migrate-default.sql b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v447/migrate-default.sql new file mode 100644 index 000000000000..a2a75e37d549 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v447/migrate-default.sql @@ -0,0 +1,5 @@ +UPDATE Settings SET value='4.4.7' WHERE name='system/platform/version'; +UPDATE Settings SET value='SNAPSHOT' WHERE name='system/platform/subVersion'; + +INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/banner/enable', 'false', 2, 1920, 'n'); +INSERT INTO Settings (name, value, datatype, position, internal) VALUES ('system/auditable/enable', 'false', 2, 12010, 'n'); diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-core.xml b/web/src/main/webapp/WEB-INF/config-security/config-security-core.xml index f83fa3e0bc98..c769833cefa3 100644 --- a/web/src/main/webapp/WEB-INF/config-security/config-security-core.xml +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-core.xml @@ -65,8 +65,14 @@ + + + + + diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-gn5-overrides.properties b/web/src/main/webapp/WEB-INF/config-security/config-security-gn5-overrides.properties new file mode 100644 index 000000000000..4a40a8c6c400 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-gn5-overrides.properties @@ -0,0 +1,51 @@ +# Copyright (C) 2024 Food and Agriculture Organization of the +# United Nations (FAO-UN), United Nations World Food Programme (WFP) +# and United Nations Environment Programme (UNEP) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or (at +# your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +# +# Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +# Rome - Italy. email: geonetwork@osgeo.org +jwtheadersConfiguration.JwtConfiguration.userNameHeaderAttributeName=${JWTHEADERS_UserNameHeaderName:gn5.to.gn4.trusted.json.auth} +jwtheadersConfiguration.JwtConfiguration.userNameFormatChoice=${JWTHEADERS_UserNameFormat:JSON} + +jwtheadersConfiguration.JwtConfiguration.UserNameJsonPath=${JWTHEADERS_UserNameJsonPath:username} + + +jwtheadersConfiguration.JwtConfiguration.rolesJsonPath=${JWTHEADERS_RolesJsonPath:resource_access.live-key2.roles} +jwtheadersConfiguration.JwtConfiguration.rolesHeaderName=${JWTHEADERS_RolesHeaderName:OIDC_id_token_payload} +jwtheadersConfiguration.JwtConfiguration.jwtHeaderRoleSource=${JWTHEADERS_JwtHeaderRoleSource:DB} + +jwtheadersConfiguration.JwtConfiguration.roleConverterString=${JWTHEADERS_RoleConverterString:"GeonetworkAdministrator=ADMINISTRATOR"} +jwtheadersConfiguration.JwtConfiguration.onlyExternalListedRoles=${JWTHEADERS_OnlyExternalListedRoles:false} + +jwtheadersConfiguration.JwtConfiguration.validateToken=${JWTHEADERS_ValidateToken:false} + +jwtheadersConfiguration.JwtConfiguration.validateTokenExpiry=${JWTHEADERS_ValidateTokenExpiry:false} + + +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURL=${JWTHEADERS_ValidateTokenAgainstURL:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURLEndpoint=${JWTHEADERS_ValidateTokenAgainstURLEndpoint:} +jwtheadersConfiguration.JwtConfiguration.validateSubjectWithEndpoint=${JWTHEADERS_ValidateSubjectWithEndpoint:true} + +jwtheadersConfiguration.JwtConfiguration.validateTokenAudience=${JWTHEADERS_ValidateTokenAudience:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimName=${JWTHEADERS_ValidateTokenAudienceClaimName:""} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimValue=${JWTHEADERS_ValidateTokenAudienceClaimValue:""} + +jwtheadersConfiguration.JwtConfiguration.validateTokenSignature=${JWTHEADERS_ValidateTokenSignature:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenSignatureURL=${JWTHEADERS_ValidateTokenSignatureURL:""} + +jwtHeadersSecurityConfig.UpdateProfile=${JWTHEADERS_UpdateProfile:false} +jwtHeadersSecurityConfig.UpdateGroup=${JWTHEADERS_UpdateGroup:false} diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-gn5.xml b/web/src/main/webapp/WEB-INF/config-security/config-security-gn5.xml new file mode 100644 index 000000000000..2838c0eda775 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-gn5.xml @@ -0,0 +1,36 @@ + + + + + + + diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-base.xml b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-base.xml new file mode 100644 index 000000000000..8e2545deb14f --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-base.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi-overrides.properties b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi-overrides.properties new file mode 100644 index 000000000000..43c21e07fbee --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi-overrides.properties @@ -0,0 +1,85 @@ +# Copyright (C) 2024 Food and Agriculture Organization of the +# United Nations (FAO-UN), United Nations World Food Programme (WFP) +# and United Nations Environment Programme (UNEP) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or (at +# your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +# +# Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +# Rome - Italy. email: geonetwork@osgeo.org + +## This contains configuration options for TWO Jwt-Headers auth filters. + + +## configuration for the FIRST filter + + +jwtheadersConfiguration.JwtConfiguration.userNameHeaderAttributeName=${JWTHEADERS_UserNameHeaderName:OIDC_id_token_payload} +jwtheadersConfiguration.JwtConfiguration.userNameFormatChoice=${JWTHEADERS_UserNameFormat:JSON} + +jwtheadersConfiguration.JwtConfiguration.UserNameJsonPath=${JWTHEADERS_UserNameJsonPath:preferred_username} + +jwtheadersConfiguration.JwtConfiguration.rolesJsonPath=${JWTHEADERS_RolesJsonPath:resource_access.live-key2.roles} +jwtheadersConfiguration.JwtConfiguration.rolesHeaderName=${JWTHEADERS_RolesHeaderName:OIDC_id_token_payload} +jwtheadersConfiguration.JwtConfiguration.jwtHeaderRoleSource=${JWTHEADERS_JwtHeaderRoleSource:JSON} + +jwtheadersConfiguration.JwtConfiguration.roleConverterString=${JWTHEADERS_RoleConverterString:"GeonetworkAdministrator=ADMINISTRATOR"} +jwtheadersConfiguration.JwtConfiguration.onlyExternalListedRoles=${JWTHEADERS_OnlyExternalListedRoles:false} + +jwtheadersConfiguration.JwtConfiguration.validateToken=${JWTHEADERS_ValidateToken:false} + +jwtheadersConfiguration.JwtConfiguration.validateTokenExpiry=${JWTHEADERS_ValidateTokenExpiry:false} + +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURL=${JWTHEADERS_ValidateTokenAgainstURL:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURLEndpoint=${JWTHEADERS_ValidateTokenAgainstURLEndpoint:} +jwtheadersConfiguration.JwtConfiguration.validateSubjectWithEndpoint=${JWTHEADERS_ValidateSubjectWithEndpoint:true} + +jwtheadersConfiguration.JwtConfiguration.validateTokenAudience=${JWTHEADERS_ValidateTokenAudience:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimName=${JWTHEADERS_ValidateTokenAudienceClaimName:""} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimValue=${JWTHEADERS_ValidateTokenAudienceClaimValue:""} + +jwtheadersConfiguration.JwtConfiguration.validateTokenSignature=${JWTHEADERS_ValidateTokenSignature:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenSignatureURL=${JWTHEADERS_ValidateTokenSignatureURL:""} + + +## configuration for the SECOND filter. The only diffence between this and the above (first filter) is that +## this is configuring the 2nd filter configuration (jwtheadersConfiguration2) +## all the environment variables are the same EXCEPT they end in "2" + +jwtheadersConfiguration2.JwtConfiguration.userNameHeaderAttributeName=${JWTHEADERS_UserNameHeaderFormat2:OIDC_id_token_payload} +jwtheadersConfiguration2.JwtConfiguration.userNameFormatChoice=${JWTHEADERS_UserNameFormat2:JSON} + +jwtheadersConfiguration2.JwtConfiguration.UserNameJsonPath=${JWTHEADERS_UserNameJsonPath2:preferred_username} + +jwtheadersConfiguration2.JwtConfiguration.rolesJsonPath=${JWTHEADERS_RolesJsonPath2:resource_access.live-key2.roles} +jwtheadersConfiguration2.JwtConfiguration.rolesHeaderName=${JWTHEADERS_RolesHeaderName2:OIDC_id_token_payload} +jwtheadersConfiguration2.JwtConfiguration.jwtHeaderRoleSource=${JWTHEADERS_JwtHeaderRoleSource2:JSON} + +jwtheadersConfiguration2.JwtConfiguration.roleConverterString=${JWTHEADERS_RoleConverterString2:"GeonetworkAdministrator=ADMINISTRATOR"} +jwtheadersConfiguration2.JwtConfiguration.onlyExternalListedRoles=${JWTHEADERS_OnlyExternalListedRoles2:false} + +jwtheadersConfiguration2.JwtConfiguration.validateToken=${JWTHEADERS_ValidateToken2:false} + +jwtheadersConfiguration2.JwtConfiguration.validateTokenExpiry=${JWTHEADERS_ValidateTokenExpiry2:false} + +jwtheadersConfiguration2.JwtConfiguration.validateTokenAgainstURL=${JWTHEADERS_ValidateTokenAgainstURL2:true} +jwtheadersConfiguration2.JwtConfiguration.validateTokenAgainstURLEndpoint=${JWTHEADERS_ValidateTokenAgainstURLEndpoint2:} +jwtheadersConfiguration2.JwtConfiguration.validateSubjectWithEndpoint=${JWTHEADERS_ValidateSubjectWithEndpoint2:true} + +jwtheadersConfiguration2.JwtConfiguration.validateTokenAudience=${JWTHEADERS_ValidateTokenAudience2:true} +jwtheadersConfiguration2.JwtConfiguration.validateTokenAudienceClaimName=${JWTHEADERS_ValidateTokenAudienceClaimName2:""} +jwtheadersConfiguration2.JwtConfiguration.validateTokenAudienceClaimValue=${JWTHEADERS_ValidateTokenAudienceClaimValue2:""} + +jwtheadersConfiguration2.JwtConfiguration.validateTokenSignature=${JWTHEADERS_ValidateTokenSignature2:true} +jwtheadersConfiguration2.JwtConfiguration.validateTokenSignatureURL=${JWTHEADERS_ValidateTokenSignatureURL2:""} diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi.xml b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi.xml new file mode 100644 index 000000000000..4f388202810c --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-multi.xml @@ -0,0 +1,108 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-overrides.properties b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-overrides.properties new file mode 100644 index 000000000000..9b23af78bed0 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers-overrides.properties @@ -0,0 +1,51 @@ +# Copyright (C) 2024 Food and Agriculture Organization of the +# United Nations (FAO-UN), United Nations World Food Programme (WFP) +# and United Nations Environment Programme (UNEP) +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or (at +# your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +# +# Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +# Rome - Italy. email: geonetwork@osgeo.org +jwtheadersConfiguration.JwtConfiguration.userNameHeaderAttributeName=${JWTHEADERS_UserNameHeaderName:OIDC_id_token_payload} +jwtheadersConfiguration.JwtConfiguration.userNameFormatChoice=${JWTHEADERS_UserNameFormat:JSON} + +jwtheadersConfiguration.JwtConfiguration.UserNameJsonPath=${JWTHEADERS_UserNameJsonPath:preferred_username} + + +jwtheadersConfiguration.JwtConfiguration.rolesJsonPath=${JWTHEADERS_RolesJsonPath:resource_access.live-key2.roles} +jwtheadersConfiguration.JwtConfiguration.rolesHeaderName=${JWTHEADERS_RolesHeaderName:OIDC_id_token_payload} +jwtheadersConfiguration.JwtConfiguration.jwtHeaderRoleSource=${JWTHEADERS_JwtHeaderRoleSource:JSON} + +jwtheadersConfiguration.JwtConfiguration.roleConverterString=${JWTHEADERS_RoleConverterString:"GeonetworkAdministrator=ADMINISTRATOR"} +jwtheadersConfiguration.JwtConfiguration.onlyExternalListedRoles=${JWTHEADERS_OnlyExternalListedRoles:false} + +jwtheadersConfiguration.JwtConfiguration.validateToken=${JWTHEADERS_ValidateToken:false} + +jwtheadersConfiguration.JwtConfiguration.validateTokenExpiry=${JWTHEADERS_ValidateTokenExpiry:false} + + +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURL=${JWTHEADERS_ValidateTokenAgainstURL:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAgainstURLEndpoint=${JWTHEADERS_ValidateTokenAgainstURLEndpoint:} +jwtheadersConfiguration.JwtConfiguration.validateSubjectWithEndpoint=${JWTHEADERS_ValidateSubjectWithEndpoint:true} + +jwtheadersConfiguration.JwtConfiguration.validateTokenAudience=${JWTHEADERS_ValidateTokenAudience:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimName=${JWTHEADERS_ValidateTokenAudienceClaimName:""} +jwtheadersConfiguration.JwtConfiguration.validateTokenAudienceClaimValue=${JWTHEADERS_ValidateTokenAudienceClaimValue:""} + +jwtheadersConfiguration.JwtConfiguration.validateTokenSignature=${JWTHEADERS_ValidateTokenSignature:true} +jwtheadersConfiguration.JwtConfiguration.validateTokenSignatureURL=${JWTHEADERS_ValidateTokenSignatureURL:""} + +jwtHeadersSecurityConfig.UpdateProfile=${JWTHEADERS_UpdateProfile:true} +jwtHeadersSecurityConfig.UpdateGroup=${JWTHEADERS_UpdateGroup:true} diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers.xml b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers.xml new file mode 100644 index 000000000000..b0b03ef0423a --- /dev/null +++ b/web/src/main/webapp/WEB-INF/config-security/config-security-jwt-headers.xml @@ -0,0 +1,36 @@ + + + + + + + diff --git a/web/src/main/webapp/WEB-INF/config-security/config-security.xml b/web/src/main/webapp/WEB-INF/config-security/config-security.xml index 817d79df5ede..c99f3b6a244f 100644 --- a/web/src/main/webapp/WEB-INF/config-security/config-security.xml +++ b/web/src/main/webapp/WEB-INF/config-security/config-security.xml @@ -54,6 +54,12 @@ keycloak - Keycloak security (see config-security-keycloak.xml for more details) openidconnect - OAUTH2 Open ID Connect (see config-security-openidconnect.xml and -overrides.properties for details) + openidconnect can be used instead of the keycloak provider + jwt-headers - Support for JSON/JWT headers for username & roles + Access Token validation + + (see config-security-jwt-headers.xml and -overrides.properties for details) + jwt-headers-multi - This adds two configurable jwt-headers filters. This is useful for BOTH OIDC and OAUTH2 (access token) + at the same time. + + (see config-security-jwt-headers-multi.xml and -overrides.properties for details) + gn5 - GeoNetwork 5 security based on jwt-headers ldap - ldap security (see config-security-ldap.xml for more details) ldap-recursive - ldap-recursive security (see config-security-ldap-recursive.xml for more details) ecas - ecas security (see config-security-ecas.xml for more details) diff --git a/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_distance.rdf b/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_distance.rdf new file mode 100644 index 000000000000..30132db524a0 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_distance.rdf @@ -0,0 +1,44 @@ + + + + Units of measurements / Distance + Unités de mesure / Distance + theme + codelist_unit_distance + + + + + km + km + Kilomètre + Kilometer + + + + + m + m + Mètre + Meter + + + + + cm + cm + Centimètre + Centimeter + + + + + ° + ° + Degré + Degree + + + + diff --git a/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_time.rdf b/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_time.rdf new file mode 100644 index 000000000000..a0eb0b4b3c86 --- /dev/null +++ b/web/src/main/webapp/WEB-INF/data/config/codelist/local/thesauri/theme/codelist_unit_time.rdf @@ -0,0 +1,59 @@ + + + + Units of measurements / Time + Unités de mesure / Temps + theme + codelist_unit_time + + + + y + y + Année + Year + + + + + M + M + Month + Mois + + + + + d + d + Day + Jour + + + + + h + h + Hour + Heure + + + + + min + min + Minute + Minute + + + + + s + s + Second + Seconde + + + + diff --git a/web/src/main/webapp/xsl/xml/harvesting/webdav.xsl b/web/src/main/webapp/xsl/xml/harvesting/webdav.xsl index bc04b113524d..2f6c3b9d2010 100644 --- a/web/src/main/webapp/xsl/xml/harvesting/webdav.xsl +++ b/web/src/main/webapp/xsl/xml/harvesting/webdav.xsl @@ -17,6 +17,9 @@ + + + diff --git a/web/src/main/webapp/xslt/base-layout-cssjs-loader.xsl b/web/src/main/webapp/xslt/base-layout-cssjs-loader.xsl index a06f9d6948db..627d639e8f2c 100644 --- a/web/src/main/webapp/xslt/base-layout-cssjs-loader.xsl +++ b/web/src/main/webapp/xslt/base-layout-cssjs-loader.xsl @@ -187,6 +187,48 @@ src="{$uiResourcesPath}lib/bootstrap.ext/datepicker/bootstrap-datepicker.fr.min.js?v={$buildNumber}"> + + + + + + + + + + + + + + + + + + + + + + @@ -200,7 +242,6 @@ - diff --git a/web/src/main/webapp/xslt/common/base-variables-metadata.xsl b/web/src/main/webapp/xslt/common/base-variables-metadata.xsl index 1ea889b82356..73b2f683f623 100644 --- a/web/src/main/webapp/xslt/common/base-variables-metadata.xsl +++ b/web/src/main/webapp/xslt/common/base-variables-metadata.xsl @@ -106,10 +106,11 @@ then /root/request/currTab else if (/root/gui/currTab != '') then /root/gui/currTab - else $editorConfig/editor/views/view[@default]/tab[ + else ($editorConfig/editor/views/view[@default and + gn-fn-metadata:check-elementandsession-visibility($schema, $metadata, $serviceInfo, @displayIfRecord, @displayIfServiceInfo)]/tab[ @default and gn-fn-metadata:check-elementandsession-visibility($schema, $metadata, $serviceInfo, @displayIfRecord, @displayIfServiceInfo) - ]/@id"/> + ]/@id)[1]"/> diff --git a/web/src/main/webapp/xslt/common/base-variables.xsl b/web/src/main/webapp/xslt/common/base-variables.xsl index 0385a3252f70..e66718bab731 100644 --- a/web/src/main/webapp/xslt/common/base-variables.xsl +++ b/web/src/main/webapp/xslt/common/base-variables.xsl @@ -44,6 +44,7 @@ + diff --git a/web/src/main/webapp/xslt/common/index-utils.xsl b/web/src/main/webapp/xslt/common/index-utils.xsl index 41f73c406f7d..100389f8936c 100644 --- a/web/src/main/webapp/xslt/common/index-utils.xsl +++ b/web/src/main/webapp/xslt/common/index-utils.xsl @@ -244,24 +244,30 @@ + gn-fn-index:add-field languages + gn-fn-index:add-field mainLanguage --> - + + + + - - + + - @@ -707,10 +713,10 @@ - + diff --git a/web/src/main/webapp/xslt/common/utility-tpl.xsl b/web/src/main/webapp/xslt/common/utility-tpl.xsl index bd470fe94dac..6b9d15bb6537 100644 --- a/web/src/main/webapp/xslt/common/utility-tpl.xsl +++ b/web/src/main/webapp/xslt/common/utility-tpl.xsl @@ -22,6 +22,7 @@ --> @@ -125,8 +126,10 @@ + ((http|https|ftp)://[^\s()]+[^\s\[\]`!(){};:'\\".,?«»“”‘’]) + + regex="{$regex}"> diff --git a/web/src/main/webapp/xslt/services/thesaurus/ldregistry-to-skos.xsl b/web/src/main/webapp/xslt/services/thesaurus/ldregistry-to-skos.xsl index 1cec16ae7309..02cdb7aa395b 100644 --- a/web/src/main/webapp/xslt/services/thesaurus/ldregistry-to-skos.xsl +++ b/web/src/main/webapp/xslt/services/thesaurus/ldregistry-to-skos.xsl @@ -71,7 +71,6 @@ - diff --git a/web/src/main/webapp/xslt/services/thesaurus/owl-to-skos.xsl b/web/src/main/webapp/xslt/services/thesaurus/owl-to-skos.xsl index 449ee6d70d51..b7a713803310 100644 --- a/web/src/main/webapp/xslt/services/thesaurus/owl-to-skos.xsl +++ b/web/src/main/webapp/xslt/services/thesaurus/owl-to-skos.xsl @@ -24,9 +24,9 @@ @@ -40,12 +40,38 @@ - + + + + + - + + + + + + + + + + + + + + + + + + - + + + + + + + + diff --git a/web/src/main/webapp/xslt/services/thesaurus/registry-to-skos.xsl b/web/src/main/webapp/xslt/services/thesaurus/registry-to-skos.xsl index 10ee275efd3c..8f12f4bedbc3 100644 --- a/web/src/main/webapp/xslt/services/thesaurus/registry-to-skos.xsl +++ b/web/src/main/webapp/xslt/services/thesaurus/registry-to-skos.xsl @@ -126,7 +126,6 @@ - diff --git a/web/src/main/webapp/xslt/services/thesaurus/to-jskos.xsl b/web/src/main/webapp/xslt/services/thesaurus/to-jskos.xsl index e8f4e575c7e4..8493f48ca600 100644 --- a/web/src/main/webapp/xslt/services/thesaurus/to-jskos.xsl +++ b/web/src/main/webapp/xslt/services/thesaurus/to-jskos.xsl @@ -22,7 +22,10 @@ ~ Rome - Italy. email: geonetwork@osgeo.org --> - + @@ -45,8 +48,8 @@ - - + + diff --git a/web/src/main/webapp/xslt/ui-metadata/form-builder.xsl b/web/src/main/webapp/xslt/ui-metadata/form-builder.xsl index c37260a275d8..576061a5fd43 100644 --- a/web/src/main/webapp/xslt/ui-metadata/form-builder.xsl +++ b/web/src/main/webapp/xslt/ui-metadata/form-builder.xsl @@ -78,6 +78,10 @@ Rendered hidden in a block below the input. --> + + + @@ -106,6 +110,8 @@ + + + + + + + + - - - @@ -273,20 +282,39 @@ - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+
+ + + +
+
@@ -562,7 +590,7 @@ true false - +   @@ -929,9 +957,9 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+
+ + + +
+
+
+
@@ -1635,7 +1701,7 @@ id="gn-attr-add-button-{$fieldName}" data-gn-click-and-spin="add('{$ref}', '{@name}', '{$insertRef}', null, true)" title="{$attributeLabel/description}"> - + @@ -1654,7 +1720,7 @@ data-params="{$process-params}" data-icon="{$btnClass}" data-name="{normalize-space($strings/*[name() = $process-label-key])}" - data-help="{normalize-space($strings/*[name() = concat($process-name, 'Help')])}"/> + data-help="{normalize-space($strings/*[name() = concat($process-name, 'Help')])}">

@@ -1671,7 +1737,7 @@ data-params="{$process-params}" data-icon="{$btnClass}" data-name="{normalize-space($strings/*[name() = $process-name])}" - data-help="{normalize-space($strings/*[name() = concat($process-name, 'Help')])}"/> + data-help="{normalize-space($strings/*[name() = concat($process-name, 'Help')])}">
@@ -1770,6 +1836,14 @@ + + + + + + + diff --git a/web/src/main/webapp/xslt/ui-metadata/form-configurator.xsl b/web/src/main/webapp/xslt/ui-metadata/form-configurator.xsl index 2e423b7ea752..d342960844c3 100644 --- a/web/src/main/webapp/xslt/ui-metadata/form-configurator.xsl +++ b/web/src/main/webapp/xslt/ui-metadata/form-configurator.xsl @@ -26,6 +26,7 @@ xmlns:gn-fn-metadata="http://geonetwork-opensource.org/xsl/functions/metadata" xmlns:gn="http://www.fao.org/geonetwork" xmlns:xs="http://www.w3.org/2001/XMLSchema" + xmlns:util="java:org.fao.geonet.util.XslUtil" xmlns:saxon="http://saxon.sf.net/" extension-element-prefixes="saxon" exclude-result-prefixes="#all" version="2.0"> @@ -262,6 +263,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + +
+
+
diff --git a/web/src/main/webapp/xslt/ui-metadata/menu-builder.xsl b/web/src/main/webapp/xslt/ui-metadata/menu-builder.xsl index 682ec672407b..5fa12d2ca5c8 100644 --- a/web/src/main/webapp/xslt/ui-metadata/menu-builder.xsl +++ b/web/src/main/webapp/xslt/ui-metadata/menu-builder.xsl @@ -40,7 +40,7 @@
+ data-all-depth="{if ($isFlatMode) then 'true' else 'false'}">