From 48753188901bfbe4e8fbe1a78760653456a2f277 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Thu, 7 May 2026 12:22:41 +0200 Subject: [PATCH 01/17] fix(build): use stable GW CRDs for Traefik --- .github/workflows/run-e2e-tests.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index e7b1c645..fceaf2df 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -74,6 +74,9 @@ jobs: - name: "Install Traefik Gateway controller" run: |- + # install Gateway API CRDs + kubectl apply --server-side --force-conflicts -f https://github.com/kubernetes-sigs/gateway-api/releases/download/v1.5.1/standard-install.yaml + helm repo add traefik https://traefik.github.io/charts helm repo update helm upgrade --install --namespace traefik traefik traefik/traefik --create-namespace -f values.yaml @@ -84,10 +87,7 @@ jobs: # forward port 80 -> 8080 kubectl -n traefik port-forward svc/traefik 8080:80 & - # install Gateway API CRDs - kubectl apply --server-side --force-conflicts -f https://github.com/kubernetes-sigs/gateway-api/releases/download/v1.4.1/experimental-install.yaml - - sleep 5 # to be safe` + sleep 5 # to be safe - name: "Deploy MVD" run: |- From 2ed832a8837b132f3a1296479737b0bd0e336d51 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Thu, 7 May 2026 13:03:33 +0200 Subject: [PATCH 02/17] use standard channel --- .github/workflows/run-e2e-tests.yml | 6 +++--- values.yaml | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index fceaf2df..1390cc42 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -74,9 +74,6 @@ jobs: - name: "Install Traefik Gateway controller" run: |- - # install Gateway API CRDs - kubectl apply --server-side --force-conflicts -f https://github.com/kubernetes-sigs/gateway-api/releases/download/v1.5.1/standard-install.yaml - helm repo add traefik https://traefik.github.io/charts helm repo update helm upgrade --install --namespace traefik traefik traefik/traefik --create-namespace -f values.yaml @@ -84,6 +81,9 @@ jobs: # Wait for traefik to be ready kubectl rollout status deployment/traefik -n traefik --timeout=120s + # install Gateway API CRDs + kubectl apply --server-side --force-conflicts -f https://github.com/kubernetes-sigs/gateway-api/releases/download/v1.5.1/standard-install.yaml + # forward port 80 -> 8080 kubectl -n traefik port-forward svc/traefik 8080:80 & diff --git a/values.yaml b/values.yaml index ec91f027..6115c796 100644 --- a/values.yaml +++ b/values.yaml @@ -46,7 +46,6 @@ ports: providers: kubernetesGateway: enabled: true - experimentalChannel: true kubernetesCRD: # -- Load Kubernetes IngressRoute provider enabled: true From 4bcb97f025e4fa08227dc984097968196d2f44ef Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Thu, 7 May 2026 18:24:15 +0200 Subject: [PATCH 03/17] add dataplane for consumer --- .../application/controlplane-seed.yaml | 30 +++- .../application/dataplane-config.yaml | 45 ++++++ k8s/consumer/application/dataplane.yaml | 150 ++++++++++++++++++ k8s/consumer/kustomization.yml | 2 + .../src/test/resources/transfer-request.json | 1 - 5 files changed, 220 insertions(+), 8 deletions(-) create mode 100644 k8s/consumer/application/dataplane-config.yaml create mode 100644 k8s/consumer/application/dataplane.yaml diff --git a/k8s/consumer/application/controlplane-seed.yaml b/k8s/consumer/application/controlplane-seed.yaml index 57a50c5c..42f16158 100644 --- a/k8s/consumer/application/controlplane-seed.yaml +++ b/k8s/consumer/application/controlplane-seed.yaml @@ -53,16 +53,19 @@ spec: set -e MGMT_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v5beta/celexpressions" + DATAPLANE_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes" - # Posts to the management API, treating 409 (already exists) as success. - create_cel() { - HTTP_STATUS=$(curl -sS -o /dev/null -w "%{http_code}" -X POST "${MGMT_URL}" \ + post() { + RESPONSE=$(curl -sS -w "\n%{http_code}" -X "$2" "$1" \ -H "Content-Type: application/json" \ - -d "$1") + -d "$3") + HTTP_STATUS=$(echo "${RESPONSE}" | tail -n1) + BODY=$(echo "${RESPONSE}" | sed '$d') if [ "${HTTP_STATUS}" -eq 409 ]; then - echo "⚠ CEL Expression already exists (HTTP 409), skipping." + echo "⚠ Already exists (HTTP 409), skipping." elif [ "${HTTP_STATUS}" -lt 200 ] || [ "${HTTP_STATUS}" -ge 300 ]; then echo "✗ Unexpected HTTP status: ${HTTP_STATUS}" + echo "Response body: ${BODY}" exit 1 fi } @@ -71,7 +74,7 @@ spec: echo "Step 1: Create Membership CEL Expression " echo "================================================" - create_cel '{ + post "${MGMT_URL}" "POST" '{ "@context": [ "https://w3id.org/edc/connector/management/v2" ], @@ -94,7 +97,7 @@ spec: echo "Step 2: Create ManufacturerCredential CEL Expression" echo "================================================" - create_cel '{ + post "${MGMT_URL}" "POST" '{ "@context": [ "https://w3id.org/edc/connector/management/v2" ], @@ -112,6 +115,19 @@ spec: echo "✓ ManufacturerCredential CEL Expression done" + echo "" + echo "================================================" + echo "Step 3: Register dataplane" + echo "================================================" + + post "${DATAPLANE_URL}" "POST" '{ + "allowedSourceTypes": [ "HttpData", "HttpCertData" ], + "allowedTransferTypes": [ "HttpData-PULL" ], + "url": "http://dataplane.consumer.svc.cluster.local:8083/api/control/v1/dataflows" + }' + + echo "✓ dataplane registered" + echo "" echo "================================================" echo "Controlplane seeding completed successfully!" diff --git a/k8s/consumer/application/dataplane-config.yaml b/k8s/consumer/application/dataplane-config.yaml new file mode 100644 index 00000000..3216fec9 --- /dev/null +++ b/k8s/consumer/application/dataplane-config.yaml @@ -0,0 +1,45 @@ +# +# Copyright (c) 2026 Metaform Systems, Inc. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Metaform Systems, Inc. - initial API and implementation +# + +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: dataplane-config + namespace: consumer +data: + edc.hostname: "dataplane.consumer.svc.cluster.local" + + edc.transfer.proxy.token.verifier.publickey.alias: "dataplane-private" + edc.transfer.proxy.token.signer.privatekey.alias: "dataplane-public" + + edc.dpf.selector.url: "http://controlplane.consumer.svc.cluster.local:8083/api/control/v1/dataplanes" + + web.http.port: "8080" + web.http.path: "/api" + web.http.control.port: "8083" + web.http.control.path: "/api/control" + web.http.public.port: "11002" + web.http.public.path: "/api/public" + web.http.certs.port: "8186" + web.http.certs.path: "/api/data" + + edc.vault.hashicorp.url: "http://vault.consumer.svc.cluster.local:8200" + edc.vault.hashicorp.token: "root" + + JAVA_TOOL_OPTIONS: "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=1044" + + edc.datasource.default.url: "jdbc:postgresql://postgres.consumer.svc.cluster.local:5432/dataplane" + edc.datasource.default.user: "dp" + edc.datasource.default.password: "dp" + edc.sql.schema.autocreate: "true" diff --git a/k8s/consumer/application/dataplane.yaml b/k8s/consumer/application/dataplane.yaml new file mode 100644 index 00000000..4bad1c67 --- /dev/null +++ b/k8s/consumer/application/dataplane.yaml @@ -0,0 +1,150 @@ +# +# Copyright (c) 2026 Metaform Systems, Inc. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Metaform Systems, Inc. - initial API and implementation +# + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: dataplane + namespace: consumer + labels: + app: dataplane + type: edcv-app +spec: + replicas: 1 + selector: + matchLabels: + app: dataplane + template: + metadata: + labels: + app: dataplane + platform: edcv + type: edcv-app + spec: + containers: + - name: dataplane + image: ghcr.io/eclipse-edc/minimumviabledataspace/dataplane:latest + imagePullPolicy: Always + envFrom: + - configMapRef: + name: dataplane-config + ports: + - containerPort: 1044 + name: debug-port + livenessProbe: + httpGet: + path: /api/check/liveness + port: 8080 + failureThreshold: 10 + periodSeconds: 5 + timeoutSeconds: 30 + readinessProbe: + httpGet: + path: /api/check/readiness + port: 8080 + failureThreshold: 10 + periodSeconds: 5 + timeoutSeconds: 30 + startupProbe: + httpGet: + path: /api/check/startup + port: 8080 + failureThreshold: 10 + periodSeconds: 5 + timeoutSeconds: 30 + +--- +apiVersion: v1 +kind: Service +metadata: + name: dataplane + namespace: consumer +spec: + type: ClusterIP + selector: + app: dataplane + ports: + - name: health + port: 8080 + targetPort: 8080 + - name: control + port: 8083 + targetPort: 8083 + - name: certs + port: 8186 + targetPort: 8186 + - name: public + port: 11002 + targetPort: 11002 + +--- +apiVersion: gateway.networking.k8s.io/v1 +kind: HTTPRoute +metadata: + name: httproute-dataplane + namespace: consumer +spec: + parentRefs: + - name: consumer-gateway + namespace: consumer + hostnames: + - dp.consumer.localhost + rules: + # /dp/public → public port 11002 + - matches: + - path: + type: PathPrefix + value: /public + filters: + - type: URLRewrite + urlRewrite: + path: + type: ReplacePrefixMatch + replacePrefixMatch: / + backendRefs: + - name: dataplane + port: 11002 + weight: 1 + + # /app/internal → control port 8083 + - matches: + - path: + type: PathPrefix + value: /app/internal + filters: + - type: URLRewrite + urlRewrite: + path: + type: ReplacePrefixMatch + replacePrefixMatch: / + backendRefs: + - name: dataplane + port: 8083 + weight: 1 + + # /app/public → certs port 8186 + - matches: + - path: + type: PathPrefix + value: /app/public + filters: + - type: URLRewrite + urlRewrite: + path: + type: ReplacePrefixMatch + replacePrefixMatch: / + backendRefs: + - name: dataplane + port: 8186 + weight: 1 \ No newline at end of file diff --git a/k8s/consumer/kustomization.yml b/k8s/consumer/kustomization.yml index 89a29f07..bf02deca 100644 --- a/k8s/consumer/kustomization.yml +++ b/k8s/consumer/kustomization.yml @@ -19,6 +19,8 @@ resources: - application/controlplane-config.yaml - application/controlplane.yaml - application/controlplane-seed.yaml + - application/dataplane-config.yaml + - application/dataplane.yaml - application/identityhub-config.yaml - application/identityhub.yaml - application/identityhub-seed.yaml \ No newline at end of file diff --git a/tests/end2end/src/test/resources/transfer-request.json b/tests/end2end/src/test/resources/transfer-request.json index f4806ce2..ec73698a 100644 --- a/tests/end2end/src/test/resources/transfer-request.json +++ b/tests/end2end/src/test/resources/transfer-request.json @@ -3,7 +3,6 @@ "odrl": "http://www.w3.org/ns/odrl/2/" }, "@type": "TransferRequest", - "assetId": "asset-1", "counterPartyAddress": "{{PROVIDER_DSP_URL}}", "connectorId": "{{PROVIDER_ID}}", "contractId": "{{CONTRACT_ID}}", From 55eed08dbfa083644f0991fe017f1388b651b07c Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Thu, 7 May 2026 18:28:51 +0200 Subject: [PATCH 04/17] fix url paths in Bruno --- .../Get EDR DataAddress for TransferId.bru | 2 +- Requests/ControlPlane Management/Get cached EDRs.bru | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru b/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru index 115d1b24..3558f782 100644 --- a/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru +++ b/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru @@ -5,7 +5,7 @@ meta { } get { - url: {{CONSUMER_CP}}/api/mgmt/v4/edrs/{{TRANSFER_PROCESS_ID}}/dataaddress + url: {{CONSUMER_CP}}/api/mgmt/v3/edrs/{{TRANSFER_PROCESS_ID}}/dataaddress body: none auth: inherit } diff --git a/Requests/ControlPlane Management/Get cached EDRs.bru b/Requests/ControlPlane Management/Get cached EDRs.bru index efd93cad..ca77e9ac 100644 --- a/Requests/ControlPlane Management/Get cached EDRs.bru +++ b/Requests/ControlPlane Management/Get cached EDRs.bru @@ -5,7 +5,7 @@ meta { } post { - url: {{CONSUMER_CP}}/api/mgmt/v4/edrs/request + url: {{CONSUMER_CP}}/api/mgmt/v3/edrs/request body: json auth: inherit } From 330f46d6cbd5adfa53e99133e1dcd5366cef7356 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Thu, 7 May 2026 21:35:24 +0200 Subject: [PATCH 05/17] patch DP registration --- .../controller/DataplaneRegistrationApiController.java | 9 +++++++-- k8s/consumer/application/controlplane-seed.yaml | 2 +- k8s/provider/application/controlplane-seed.yaml | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java b/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java index 641f7327..ec0f2043 100644 --- a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java +++ b/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java @@ -17,10 +17,13 @@ import jakarta.ws.rs.Consumes; import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; import jakarta.ws.rs.Produces; import org.eclipse.edc.connector.dataplane.selector.spi.DataPlaneSelectorService; import org.eclipse.edc.connector.dataplane.selector.spi.instance.DataPlaneInstance; +import java.util.Base64; + import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON; import static org.eclipse.edc.web.spi.exception.ServiceResultHandler.exceptionMapper; @@ -36,8 +39,10 @@ public DataplaneRegistrationApiController(DataPlaneSelectorService dataPlaneSele } @POST - public void registerDataplane(DataPlaneInstance instance) { - dataPlaneSelectorService.register(instance.toBuilder().build()) // ugly, but will initialize all internal objects e.g. clock + @Path("/{participantContextId}") + public void registerDataplane(DataPlaneInstance instance, @PathParam("participantContextId") String participantContextId) { + var decoded = new String(Base64.getUrlDecoder().decode(participantContextId)); + dataPlaneSelectorService.register(instance.toBuilder().participantContextId(decoded).build()) // ugly, but will initialize all internal objects e.g. clock .orElseThrow(exceptionMapper(DataPlaneInstance.class)); } diff --git a/k8s/consumer/application/controlplane-seed.yaml b/k8s/consumer/application/controlplane-seed.yaml index 42f16158..97e8a5dd 100644 --- a/k8s/consumer/application/controlplane-seed.yaml +++ b/k8s/consumer/application/controlplane-seed.yaml @@ -53,7 +53,7 @@ spec: set -e MGMT_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v5beta/celexpressions" - DATAPLANE_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes" + DATAPLANE_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes/ZGlkOndlYjppZGVudGl0eWh1Yi5jb25zdW1lci5zdmMuY2x1c3Rlci5sb2NhbCUzQTcwODM6Y29uc3VtZXI=" post() { RESPONSE=$(curl -sS -w "\n%{http_code}" -X "$2" "$1" \ diff --git a/k8s/provider/application/controlplane-seed.yaml b/k8s/provider/application/controlplane-seed.yaml index 8de7101b..32ee3231 100644 --- a/k8s/provider/application/controlplane-seed.yaml +++ b/k8s/provider/application/controlplane-seed.yaml @@ -57,7 +57,7 @@ spec: ASSETS_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4/assets" POLICIES_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4/policydefinitions" CONTRACTDEFS_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4/contractdefinitions" - DATAPLANE_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes" + DATAPLANE_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes/ZGlkOndlYjppZGVudGl0eWh1Yi5wcm92aWRlci5zdmMuY2x1c3Rlci5sb2NhbCUzQTcwODM6cHJvdmlkZXI=" CEL_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v5beta/celexpressions" # Posts to the management API, treating 409 (already exists) as success. From b6fa21ef76c9557cdd46dc02fa60b044e1470db2 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 10:47:14 +0200 Subject: [PATCH 06/17] add registration to seed jobs --- gradle/libs.versions.toml | 4 + .../application/controlplane-config.yaml | 2 + .../application/controlplane-seed.yaml | 30 +- .../application/dataplane-config.yaml | 5 +- k8s/consumer/application/dataplane.yaml | 5 + .../application/controlplane-config.yaml | 2 + .../application/controlplane-seed.yaml | 28 +- .../application/dataplane-config.yaml | 3 + k8s/provider/application/dataplane.yaml | 5 + launchers/dataplane/build.gradle.kts | 17 +- .../dataplane/keyseed/KeySeedExtension.java | 4 +- .../SignalingDataPlaneRuntimeExtension.java | 262 ++++++++++++++++++ ...rg.eclipse.edc.spi.system.ServiceExtension | 3 +- 13 files changed, 347 insertions(+), 23 deletions(-) create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 278dbd93..a6cd917f 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -3,10 +3,12 @@ format.version = "1.1" [versions] awaitility = "4.3.0" +dataplane-sdk = "0.0.10-SNAPSHOT" edc = "0.18.0-SNAPSHOT" edc-build = "1.5.0" jackson = "2.21.2" jakarta-json = "2.1.3" +nimbus = "10.8" parsson = "1.1.6" postgres = "42.7.10" restAssured = "6.0.0" @@ -108,6 +110,8 @@ jakarta-rsApi = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "rsA tink = { module = "com.google.crypto.tink:tink", version = "1.21.0" } jackson-datatype-jakarta-jsonp = { module = "com.fasterxml.jackson.datatype:jackson-datatype-jakarta-jsonp", version.ref = "jackson" } parsson = { module = "org.eclipse.parsson:parsson", version.ref = "parsson" } +dataplane-sdk = { module = "org.eclipse.dataplane-core:dataplane-sdk", version.ref = "dataplane-sdk" } +nimbus-jwt = { module = "com.nimbusds:nimbus-jose-jwt", version.ref = "nimbus" } # BOM modules edc-bom-controlplane = { module = "org.eclipse.edc:controlplane-dcp-bom", version.ref = "edc" } diff --git a/k8s/consumer/application/controlplane-config.yaml b/k8s/consumer/application/controlplane-config.yaml index c69107b2..6cc44823 100644 --- a/k8s/consumer/application/controlplane-config.yaml +++ b/k8s/consumer/application/controlplane-config.yaml @@ -39,6 +39,8 @@ data: web.http.management.path: "/api/mgmt" web.http.control.port: "8083" web.http.control.path: "/api/control" + web.http.signaling.port: "8182" + web.http.signaling.path: "/api/signaling" # dataplane config edc.datasource.default.url: "jdbc:postgresql://postgres.consumer.svc.cluster.local:5432/controlplane" diff --git a/k8s/consumer/application/controlplane-seed.yaml b/k8s/consumer/application/controlplane-seed.yaml index 97e8a5dd..38955dd5 100644 --- a/k8s/consumer/application/controlplane-seed.yaml +++ b/k8s/consumer/application/controlplane-seed.yaml @@ -53,8 +53,6 @@ spec: set -e MGMT_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v5beta/celexpressions" - DATAPLANE_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4beta/dataplanes/ZGlkOndlYjppZGVudGl0eWh1Yi5jb25zdW1lci5zdmMuY2x1c3Rlci5sb2NhbCUzQTcwODM6Y29uc3VtZXI=" - post() { RESPONSE=$(curl -sS -w "\n%{http_code}" -X "$2" "$1" \ -H "Content-Type: application/json" \ @@ -117,16 +115,32 @@ spec: echo "" echo "================================================" - echo "Step 3: Register dataplane" + echo "Step 3a: Register dataplane with controlplane" + echo "================================================" + + REG_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4/dataplanes" + post "${REG_URL}" "PUT" '{ + "dataplaneId": "consumer-participant-dataplane", + "endpoint": "http://dataplane.consumer.svc.cluster.local:8182/api/signaling", + "transferTypes": [ "HttpData-PULL" ], + "labels": [] + }' + + echo "✓ dataplane registered with controlplane" + + echo "" + echo "================================================" + echo "Step 3b: Register controlplane with dataplane" echo "================================================" - post "${DATAPLANE_URL}" "POST" '{ - "allowedSourceTypes": [ "HttpData", "HttpCertData" ], - "allowedTransferTypes": [ "HttpData-PULL" ], - "url": "http://dataplane.consumer.svc.cluster.local:8083/api/control/v1/dataflows" + REG_URL="http://dataplane.consumer.svc.cluster.local:8182/api/signaling/v1/controlplanes" + post "${REG_URL}" "PUT" '{ + "controlplaneId": "consumer-participant-controlplane", + "endpoint": "http://controlplane.consumer.svc.cluster.local:8182/api/signaling", + "authorizationProfile": null }' - echo "✓ dataplane registered" + echo "✓ controlplane registered with dataplane" echo "" echo "================================================" diff --git a/k8s/consumer/application/dataplane-config.yaml b/k8s/consumer/application/dataplane-config.yaml index 3216fec9..a1977380 100644 --- a/k8s/consumer/application/dataplane-config.yaml +++ b/k8s/consumer/application/dataplane-config.yaml @@ -19,7 +19,7 @@ metadata: namespace: consumer data: edc.hostname: "dataplane.consumer.svc.cluster.local" - + dataplane.id: "consumer-dataplane" edc.transfer.proxy.token.verifier.publickey.alias: "dataplane-private" edc.transfer.proxy.token.signer.privatekey.alias: "dataplane-public" @@ -27,6 +27,9 @@ data: web.http.port: "8080" web.http.path: "/api" + web.http.signaling.port: "8182" + web.http.signaling.path: "/api/signaling" + web.http.control.port: "8083" web.http.control.path: "/api/control" web.http.public.port: "11002" diff --git a/k8s/consumer/application/dataplane.yaml b/k8s/consumer/application/dataplane.yaml index 4bad1c67..6ceb5bdf 100644 --- a/k8s/consumer/application/dataplane.yaml +++ b/k8s/consumer/application/dataplane.yaml @@ -42,6 +42,8 @@ spec: ports: - containerPort: 1044 name: debug-port + - containerPort: 8182 + name: signaling-port livenessProbe: httpGet: path: /api/check/liveness @@ -78,6 +80,9 @@ spec: - name: health port: 8080 targetPort: 8080 + - name: signaling + port: 8182 + targetPort: 8182 - name: control port: 8083 targetPort: 8083 diff --git a/k8s/provider/application/controlplane-config.yaml b/k8s/provider/application/controlplane-config.yaml index cb756c65..d26123d8 100644 --- a/k8s/provider/application/controlplane-config.yaml +++ b/k8s/provider/application/controlplane-config.yaml @@ -38,6 +38,8 @@ data: web.http.management.path: "/api/mgmt" web.http.control.port: "8083" web.http.control.path: "/api/control" + web.http.signaling.port: "8182" + web.http.signaling.path: "/api/signaling" # dataplane config edc.datasource.default.url: "jdbc:postgresql://postgres.provider.svc.cluster.local:5432/controlplane" diff --git a/k8s/provider/application/controlplane-seed.yaml b/k8s/provider/application/controlplane-seed.yaml index 32ee3231..d6875aa4 100644 --- a/k8s/provider/application/controlplane-seed.yaml +++ b/k8s/provider/application/controlplane-seed.yaml @@ -305,16 +305,32 @@ spec: echo "" echo "================================================" - echo "Step 8: Register dataplane" + echo "Step 3a: Register dataplane with controlplane" echo "================================================" - post "${DATAPLANE_URL}" "POST" '{ - "allowedSourceTypes": [ "HttpData", "HttpCertData" ], - "allowedTransferTypes": [ "HttpData-PULL" ], - "url": "http://dataplane.provider.svc.cluster.local:8083/api/control/v1/dataflows" + REG_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4/dataplanes" + post "${REG_URL}" "PUT" '{ + "dataplaneId": "provider-participant-dataplane", + "endpoint": "http://dataplane.provider.svc.cluster.local:8182/api/signaling", + "transferTypes": [ "HttpData-PULL" ], + "labels": [] }' - echo "✓ dataplane registered" + echo "✓ dataplane registered with controlplane" + + echo "" + echo "================================================" + echo "Step 3b: Register controlplane with dataplane" + echo "================================================" + + REG_URL="http://dataplane.provider.svc.cluster.local:8182/api/signaling/v1/controlplanes" + post "${REG_URL}" "PUT" '{ + "controlplaneId": "provider-participant-controlplane", + "endpoint": "http://controlplane.provider.svc.cluster.local:8182/api/signaling", + "authorizationProfile": null + }' + + echo "✓ controlplane registered with dataplane" echo "" echo "================================================" diff --git a/k8s/provider/application/dataplane-config.yaml b/k8s/provider/application/dataplane-config.yaml index 42ddce06..dea70bc9 100644 --- a/k8s/provider/application/dataplane-config.yaml +++ b/k8s/provider/application/dataplane-config.yaml @@ -19,6 +19,7 @@ metadata: namespace: provider data: edc.hostname: "dataplane.provider.svc.cluster.local" + dataplane.id: "provider-dataplane" edc.transfer.proxy.token.verifier.publickey.alias: "dataplane-private" edc.transfer.proxy.token.signer.privatekey.alias: "dataplane-public" @@ -33,6 +34,8 @@ data: web.http.public.path: "/api/public" web.http.certs.port: "8186" web.http.certs.path: "/api/data" + web.http.signaling.port: "8182" + web.http.signaling.path: "/api/signaling" edc.vault.hashicorp.url: "http://vault.provider.svc.cluster.local:8200" edc.vault.hashicorp.token: "root" diff --git a/k8s/provider/application/dataplane.yaml b/k8s/provider/application/dataplane.yaml index d1d35cfe..a2a4cd6b 100644 --- a/k8s/provider/application/dataplane.yaml +++ b/k8s/provider/application/dataplane.yaml @@ -40,6 +40,8 @@ spec: - configMapRef: name: dataplane-config ports: + - containerPort: 8182 + name: signaling-port - containerPort: 1044 name: debug-port livenessProbe: @@ -87,6 +89,9 @@ spec: - name: public port: 11002 targetPort: 11002 + - name: signaling + port: 8182 + targetPort: 8182 --- apiVersion: gateway.networking.k8s.io/v1 diff --git a/launchers/dataplane/build.gradle.kts b/launchers/dataplane/build.gradle.kts index 3bd0b7f8..e09de33b 100644 --- a/launchers/dataplane/build.gradle.kts +++ b/launchers/dataplane/build.gradle.kts @@ -19,14 +19,21 @@ plugins { } dependencies { + implementation(libs.edc.boot) + implementation(libs.edc.core.runtime) + implementation(libs.edc.ext.http) + implementation(libs.dataplane.sdk) + + // needed for the key seed extension runtimeOnly(libs.tink) - implementation(libs.edc.bom.dataplane) { - exclude("org.eclipse.edc", "data-plane-self-registration") - } - runtimeOnly(project(":extensions:data-plane-public-api-v2")) + implementation(libs.nimbus.jwt) + + // for kubernetes probes + runtimeOnly(libs.edc.api.observability) +// runtimeOnly(project(":extensions:data-plane-public-api-v2")) runtimeOnly(libs.edc.core.participantcontext.config) - runtimeOnly(libs.edc.vault.hashicorp) +// runtimeOnly(libs.edc.vault.hashicorp) runtimeOnly(libs.edc.bom.dataplane.sql) } diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/keyseed/KeySeedExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/keyseed/KeySeedExtension.java index beb51972..67495dc8 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/keyseed/KeySeedExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/keyseed/KeySeedExtension.java @@ -28,10 +28,10 @@ public class KeySeedExtension implements ServiceExtension { - @Setting(key = "edc.transfer.proxy.token.signer.privatekey.alias") + @Setting(key = "edc.transfer.proxy.token.signer.privatekey.alias", defaultValue = "privatekey-alias") private String tokenSignerPrivateKeyAlias; - @Setting(key = "edc.transfer.proxy.token.verifier.publickey.alias") + @Setting(key = "edc.transfer.proxy.token.verifier.publickey.alias", defaultValue = "publickey-alias") private String tokenVerifierPublicKeyAlias; @Inject diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java new file mode 100644 index 00000000..ba6bdeb3 --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java @@ -0,0 +1,262 @@ +/* + * Copyright (c) 2025 Think-it GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Think-it GmbH - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane.signaling; + +import dev.failsafe.Failsafe; +import dev.failsafe.RetryPolicy; +import org.eclipse.dataplane.Dataplane; +import org.eclipse.dataplane.domain.DataAddress; +import org.eclipse.dataplane.domain.Result; +import org.eclipse.dataplane.domain.dataflow.DataFlow; +import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; +import org.eclipse.dataplane.logic.OnPrepare; +import org.eclipse.edc.runtime.metamodel.annotation.Configuration; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Setting; +import org.eclipse.edc.runtime.metamodel.annotation.Settings; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.web.spi.WebService; +import org.eclipse.edc.web.spi.configuration.ApiContext; +import org.eclipse.edc.web.spi.configuration.PortMapping; +import org.eclipse.edc.web.spi.configuration.PortMappingRegistry; +import org.eclipse.edc.web.spi.exception.InvalidRequestException; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import static java.net.http.HttpResponse.BodyHandlers.ofString; +import static java.util.Collections.emptyList; + +@Extension(value = SignalingDataPlaneRuntimeExtension.NAME) +public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { + + public static final String NAME = "Signaling pata plane Extension"; + @Setting(key = "dataplane.id") + private String dataplaneId; + @Configuration + private ApiConfiguration apiConfiguration; + + @Inject + private WebService webService; + @Inject + private Monitor monitor; + + private final Map> ongoingNonFiniteTransfers = new HashMap<>(); + private Dataplane dataplane; + @Inject + private PortMappingRegistry portMappingRegistry; + + @Override + public String name() { + return NAME; + } + + @Override + public void initialize(ServiceExtensionContext context) { + var builder = Dataplane.newInstance() + .id(dataplaneId) + .registerAuthorization(new Oauth2ClientCredentialsAuthorization()) + .endpoint(apiConfiguration.dataFlowEndpoint()) + .transferType("Finite-PUSH") + .transferType("Finite-PULL") + .transferType("NonFinite-PUSH") + .transferType("NonFinite-PULL") + .transferType("AsyncPrepare-PUSH") + .transferType("AsyncStart-PULL") + .onPrepare(new DataplaneOnPrepare()) + .onStart(this::startDataFlow) + .onStarted(this::receiveDataFlow) + .onSuspend(this::stopDataFlow) + .onResume(flow -> flow.getType() == DataFlow.Type.PROVIDER + ? startDataFlow(flow) + : receiveDataFlow(flow)) + .onCompleted(Result::success) + .onTerminate(this::stopDataFlow); + + dataplane = builder.build(); + + var portMapping = new PortMapping(ApiContext.SIGNALING, apiConfiguration.port(), apiConfiguration.path()); + portMappingRegistry.register(portMapping); + +// var typeTransformerRegistry = transformerRegistry.forContext("signaling-api"); +// typeTransformerRegistry.register(new DataAddressToDspDataAddressTransformer()); +// typeTransformerRegistry.register(new DataFlowStatusMessageToDataFlowResponseTransformer()); +// typeTransformerRegistry.register(new DspDataAddressToDataAddressTransformer()); + webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); + webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); + // webService.registerResource(new DataController(monitor)); + // webService.registerResource(new ControlController(monitor, dataplane, apiConfiguration)); + } + + private @NotNull Result startDataFlow(DataFlow dataFlow) { + switch (dataFlow.getTransferType()) { + case "NonFinite-PUSH" -> { + if (dataFlow.getDataAddress() == null) { + return Result.failure(new InvalidRequestException("DataAddress should not be null for PUSH transfers")); + } + + var future = Executors.newSingleThreadScheduledExecutor() + .scheduleAtFixedRate(() -> pushData(dataFlow), 0, 200, TimeUnit.MILLISECONDS); + + ongoingNonFiniteTransfers.put(dataFlow.getId(), future); + + return Result.success(dataFlow); + } + case "Finite-PUSH", "AsyncPrepare-PUSH" -> { + if (dataFlow.getDataAddress() == null) { + return Result.failure(new InvalidRequestException("DataAddress should not be null for PUSH transfers")); + } + + pushData(dataFlow) + .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); + + return Result.success(dataFlow); + } + case "NonFinite-PULL", "Finite-PULL" -> { + var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", apiConfiguration.dataSourceEndpoint(), emptyList()); + dataFlow.setDataAddress(dataAddress); + return Result.success(dataFlow); + } + case "AsyncStart-PULL" -> { + dataFlow.transitionToStarting(); + return Result.success(dataFlow); + } + default -> { + return Result.failure(new RuntimeException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); + } + } + } + + private CompletableFuture> pushData(DataFlow dataFlow) { + var destinationUri = URI.create(dataFlow.getDataAddress().endpoint()); + var request = HttpRequest.newBuilder(destinationUri).POST(HttpRequest.BodyPublishers.ofString("test-data")).build(); + return HttpClient.newHttpClient().sendAsync(request, HttpResponse.BodyHandlers.discarding()); + } + + private Result receiveDataFlow(DataFlow dataFlow) { + switch (dataFlow.getTransferType()) { + case "NonFinite-PULL" -> { + var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); + var future = Executors.newSingleThreadScheduledExecutor() + .scheduleAtFixedRate(() -> requestData(dataFlow, sourceUri), 0, 200, TimeUnit.MILLISECONDS); + + ongoingNonFiniteTransfers.put(dataFlow.getId(), future); + } + case "Finite-PULL", "AsyncStart-PULL" -> { + var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); + requestData(dataFlow, sourceUri) + .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); + } + default -> { + } + } + + return Result.success(dataFlow); + } + + private CompletableFuture> requestData(DataFlow dataFlow, URI sourceUri) { + var request = HttpRequest.newBuilder(sourceUri).GET().build(); + return HttpClient.newHttpClient().sendAsync(request, ofString()).whenComplete((response, throwable) -> { + if (throwable == null) { + if (response.statusCode() == 200) { + var body = response.body(); + monitor.info("Received data for data flow %s: %s".formatted(dataFlow.getId(), body)); + } else { + monitor.severe("Error retrieving data: %s: %s".formatted(response.statusCode(), response.body())); + } + } else { + monitor.severe("Error retrieving data", throwable); + } + }); + } + + private class DataplaneOnPrepare implements OnPrepare { + @Override + public Result action(DataFlow dataFlow) { + if (dataFlow.getTransferType().startsWith("AsyncPrepare-")) { + dataFlow.transitionToPreparing(); + return Result.success(dataFlow); + } + var destination = new DataAddress("Finite-PUSH", "http", apiConfiguration.receiveDataEndpoint(), emptyList()); + dataFlow.setDataAddress(destination); + return Result.success(dataFlow); + } + } + + private @NotNull Result stopDataFlow(DataFlow dataFlow) { + var future = ongoingNonFiniteTransfers.get(dataFlow.getId()); + if (future != null) { + future.cancel(true); + ongoingNonFiniteTransfers.remove(dataFlow.getId()); + monitor.info("Ongoing flow %s terminated".formatted(dataFlow.getId())); + } + return Result.success(dataFlow); + } + + private void notifyCompletion(DataFlow dataFlow, HttpResponse response, Throwable throwable) { + if (throwable == null) { + var statusCode = response.statusCode(); + if (statusCode >= 200 && statusCode < 300) { + notifyCompleted(dataFlow); + } else { + dataplane.notifyErrored(dataFlow.getId(), new RuntimeException("Data source/destination endpoint responded with " + statusCode)); + } + } else { + dataplane.notifyErrored(dataFlow.getId(), throwable); + } + } + + private void notifyCompleted(DataFlow dataFlow) { + var retryPolicy = RetryPolicy.builder().withMaxRetries(5).withDelay(Duration.ofSeconds(1)).build(); + Failsafe.with(retryPolicy).run(context -> { + var notifyCompleted = dataplane.notifyCompleted(dataFlow.getId()); + if (notifyCompleted.failed()) { + throw new RuntimeException("Notify Completed failed: " + notifyCompleted); + } + }); + } + + @Settings + public record ApiConfiguration( + @Setting(key = "web.http." + ApiContext.SIGNALING + ".path") String path, + @Setting(key = "web.http." + ApiContext.SIGNALING + ".port") int port + ) { + + public String receiveDataEndpoint() { + return "http://localhost:%d%s/data/receive".formatted(port, path); + } + + public String dataSourceEndpoint() { + return "http://localhost:%d%s/data/source".formatted(port, path); + } + + public URI dataFlowEndpoint() { + return URI.create("http://localhost:%d%s/v1/dataflows".formatted(port, path)); + } + } +} diff --git a/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension index 8db5d734..bd60813a 100644 --- a/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension +++ b/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -13,4 +13,5 @@ # -org.eclipse.edc.mvd.dataplane.keyseed.KeySeedExtension \ No newline at end of file +org.eclipse.edc.mvd.dataplane.keyseed.KeySeedExtension +org.eclipse.edc.mvd.dataplane.signaling.SignalingDataPlaneRuntimeExtension \ No newline at end of file From 8bc62abfd0469984a9cb0217762676d140fac88d Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 13:14:25 +0200 Subject: [PATCH 07/17] transfer process works --- .../ControlPlane Management/Get Assets.bru | 2 +- .../Initiate Transfer.bru | 1 + Requests/collection.bru | 1 + .../signaling-auth-none/build.gradle.kts | 24 ++++++ .../auth/SignalingAuthNoneExtension.java | 83 +++++++++++++++++++ ...rg.eclipse.edc.spi.system.ServiceExtension | 15 ++++ gradle/libs.versions.toml | 2 + .../application/controlplane-seed.yaml | 15 ++-- .../application/controlplane-seed.yaml | 15 ++-- launchers/controlplane/build.gradle.kts | 2 +- launchers/dataplane/build.gradle.kts | 1 + .../SignalingDataPlaneRuntimeExtension.java | 54 ++++++------ settings.gradle.kts | 1 + 13 files changed, 177 insertions(+), 39 deletions(-) create mode 100644 extensions/signaling-auth-none/build.gradle.kts create mode 100644 extensions/signaling-auth-none/src/main/java/org/eclipse/edc/mvd/signaling/auth/SignalingAuthNoneExtension.java create mode 100644 extensions/signaling-auth-none/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension diff --git a/Requests/ControlPlane Management/Get Assets.bru b/Requests/ControlPlane Management/Get Assets.bru index 798234e0..21df3373 100644 --- a/Requests/ControlPlane Management/Get Assets.bru +++ b/Requests/ControlPlane Management/Get Assets.bru @@ -5,7 +5,7 @@ meta { } post { - url: {{CONSUMER_CP}}/api/mgmt/v4/assets/request + url: {{PROVIDER_CP}}/api/mgmt/v4/assets/request body: json auth: inherit } diff --git a/Requests/ControlPlane Management/Initiate Transfer.bru b/Requests/ControlPlane Management/Initiate Transfer.bru index 7b6801eb..b03f859e 100644 --- a/Requests/ControlPlane Management/Initiate Transfer.bru +++ b/Requests/ControlPlane Management/Initiate Transfer.bru @@ -27,6 +27,7 @@ body:json { "protocol": "dataspace-protocol-http:2025-1", "transferType": "HttpData-PULL" } + } script:pre-request { diff --git a/Requests/collection.bru b/Requests/collection.bru index 3b58a45b..8b5e2ddb 100644 --- a/Requests/collection.bru +++ b/Requests/collection.bru @@ -21,4 +21,5 @@ vars:pre-request { PROVIDER_PUBLIC_URL: http://dp.provider.localhost/public PARTICIPANT_CONTEXT_ID: consumer-participant ISSUER_CONTEXT_ID: issuer + PROVIDER_CP: http://cp.provider.localhost } diff --git a/extensions/signaling-auth-none/build.gradle.kts b/extensions/signaling-auth-none/build.gradle.kts new file mode 100644 index 00000000..5caf9e78 --- /dev/null +++ b/extensions/signaling-auth-none/build.gradle.kts @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2025 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +plugins { + `java-library` +} + +dependencies { + api(libs.edc.spi.boot) + api(libs.edc.spi.dataplane.signaling) +} + + diff --git a/extensions/signaling-auth-none/src/main/java/org/eclipse/edc/mvd/signaling/auth/SignalingAuthNoneExtension.java b/extensions/signaling-auth-none/src/main/java/org/eclipse/edc/mvd/signaling/auth/SignalingAuthNoneExtension.java new file mode 100644 index 00000000..be00b379 --- /dev/null +++ b/extensions/signaling-auth-none/src/main/java/org/eclipse/edc/mvd/signaling/auth/SignalingAuthNoneExtension.java @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2026 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.signaling.auth; + +import org.eclipse.edc.connector.dataplane.selector.spi.instance.AuthorizationProfile; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Provides; +import org.eclipse.edc.signaling.spi.authorization.Header; +import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorization; +import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorizationRegistry; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; + +import java.util.Collection; +import java.util.List; +import java.util.function.Function; + +import static org.eclipse.edc.spi.result.Result.success; + +@Provides(SignalingAuthorizationRegistry.class) +public class SignalingAuthNoneExtension implements ServiceExtension { + + private static final SignalingAuthorization NONE_AUTH = new SignalingAuthorization() { + @Override + public String getType() { + return "none"; + } + + @Override + public org.eclipse.edc.spi.result.Result isAuthorized(Function headerGetter, AuthorizationProfile authorizationProfile) { + return success("dummy-token"); + } + + @Override + public org.eclipse.edc.spi.result.Result
evaluate(AuthorizationProfile authorizationProfile) { + return success(new Header("Authorization", "Bearer dummy-token")); + } + }; + + @Inject(required = false) + private SignalingAuthorizationRegistry signalingAuthorizationRegistry; + + @Override + public void initialize(ServiceExtensionContext context) { + if (signalingAuthorizationRegistry == null) { + var reg = new NoneAuthRegistry(); + context.registerService(SignalingAuthorizationRegistry.class, reg); + signalingAuthorizationRegistry = reg; + } + + signalingAuthorizationRegistry.register(NONE_AUTH); + } + + + private static class NoneAuthRegistry implements SignalingAuthorizationRegistry { + @Override + public void register(SignalingAuthorization signalingAuthorization) { + + } + + @Override + public Collection getAll() { + return List.of(NONE_AUTH); + } + + @Override + public SignalingAuthorization findByType(String type) { + return type.equals(NONE_AUTH.getType()) ? NONE_AUTH : null; + } + } +} diff --git a/extensions/signaling-auth-none/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/extensions/signaling-auth-none/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..5c142071 --- /dev/null +++ b/extensions/signaling-auth-none/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1,15 @@ +# +# Copyright (c) 2026 Metaform Systems, Inc. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Metaform Systems, Inc. - initial API and implementation +# +# + +org.eclipse.edc.mvd.signaling.auth.SignalingAuthNoneExtension \ No newline at end of file diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index a6cd917f..b7a9d3f4 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -49,6 +49,7 @@ edc-decentralized-claims-cel = { module = "org.eclipse.edc:decentralized-claims- edc-dcp-core = { module = "org.eclipse.edc:decentralized-claims-core", version.ref = "edc" } edc-vault-hashicorp = { module = "org.eclipse.edc:vault-hashicorp", version.ref = "edc" } edc-spi-identity-trust = { module = "org.eclipse.edc:decentralized-claims-spi", version.ref = "edc" } +edc-spi-boot = { module = "org.eclipse.edc:boot-spi", version.ref = "edc" } edc-spi-transform = { module = "org.eclipse.edc:transform-spi", version.ref = "edc" } edc-spi-catalog = { module = "org.eclipse.edc:catalog-spi", version.ref = "edc" } edc-spi-identity-did = { module = "org.eclipse.edc:identity-did-spi", version.ref = "edc" } @@ -56,6 +57,7 @@ edc-spi-http = { module = "org.eclipse.edc:http-spi", version.ref = "edc" } edc-spi-web = { module = "org.eclipse.edc:web-spi", version.ref = "edc" } edc-spi-dataplane = { module = "org.eclipse.edc:data-plane-spi", version.ref = "edc" } edc-spi-dataplane-selector = { module = "org.eclipse.edc:data-plane-selector-spi", version.ref = "edc" } +edc-spi-dataplane-signaling = { module = "org.eclipse.edc:data-plane-signaling-spi", version.ref = "edc" } # EDC lib dependencies edc-lib-jws2020 = { module = "org.eclipse.edc:jws2020-lib", version.ref = "edc" } diff --git a/k8s/consumer/application/controlplane-seed.yaml b/k8s/consumer/application/controlplane-seed.yaml index 38955dd5..5173e854 100644 --- a/k8s/consumer/application/controlplane-seed.yaml +++ b/k8s/consumer/application/controlplane-seed.yaml @@ -120,10 +120,13 @@ spec: REG_URL="http://controlplane.consumer.svc.cluster.local:8081/api/mgmt/v4/dataplanes" post "${REG_URL}" "PUT" '{ - "dataplaneId": "consumer-participant-dataplane", - "endpoint": "http://dataplane.consumer.svc.cluster.local:8182/api/signaling", + "dataplaneId": "anonymous", + "endpoint": "http://dataplane.consumer.svc.cluster.local:8182/api/signaling/v1/dataflows", "transferTypes": [ "HttpData-PULL" ], - "labels": [] + "labels": [], + "authorization": { + "type": "none" + } }' echo "✓ dataplane registered with controlplane" @@ -135,9 +138,11 @@ spec: REG_URL="http://dataplane.consumer.svc.cluster.local:8182/api/signaling/v1/controlplanes" post "${REG_URL}" "PUT" '{ - "controlplaneId": "consumer-participant-controlplane", + "controlplaneId": "anonymous", "endpoint": "http://controlplane.consumer.svc.cluster.local:8182/api/signaling", - "authorizationProfile": null + "authorization": { + "type": "none" + } }' echo "✓ controlplane registered with dataplane" diff --git a/k8s/provider/application/controlplane-seed.yaml b/k8s/provider/application/controlplane-seed.yaml index d6875aa4..5d2a9e65 100644 --- a/k8s/provider/application/controlplane-seed.yaml +++ b/k8s/provider/application/controlplane-seed.yaml @@ -310,10 +310,13 @@ spec: REG_URL="http://controlplane.provider.svc.cluster.local:8081/api/mgmt/v4/dataplanes" post "${REG_URL}" "PUT" '{ - "dataplaneId": "provider-participant-dataplane", - "endpoint": "http://dataplane.provider.svc.cluster.local:8182/api/signaling", + "dataplaneId": "anonymous", + "endpoint": "http://dataplane.provider.svc.cluster.local:8182/api/signaling/v1/dataflows", "transferTypes": [ "HttpData-PULL" ], - "labels": [] + "labels": [], + "authorization": { + "type": "none" + } }' echo "✓ dataplane registered with controlplane" @@ -325,9 +328,11 @@ spec: REG_URL="http://dataplane.provider.svc.cluster.local:8182/api/signaling/v1/controlplanes" post "${REG_URL}" "PUT" '{ - "controlplaneId": "provider-participant-controlplane", + "controlplaneId": "anonymous", "endpoint": "http://controlplane.provider.svc.cluster.local:8182/api/signaling", - "authorizationProfile": null + "authorization": { + "type": "none" + } }' echo "✓ controlplane registered with dataplane" diff --git a/launchers/controlplane/build.gradle.kts b/launchers/controlplane/build.gradle.kts index 92c99262..e8596465 100644 --- a/launchers/controlplane/build.gradle.kts +++ b/launchers/controlplane/build.gradle.kts @@ -19,7 +19,7 @@ plugins { } dependencies { - runtimeOnly(project(":extensions:data-plane-registration")) + runtimeOnly(project(":extensions:signaling-auth-none")) runtimeOnly(libs.edc.api.cel.v5) runtimeOnly(libs.edc.core.cel) runtimeOnly(libs.edc.decentralized.claims.cel) diff --git a/launchers/dataplane/build.gradle.kts b/launchers/dataplane/build.gradle.kts index e09de33b..64769d48 100644 --- a/launchers/dataplane/build.gradle.kts +++ b/launchers/dataplane/build.gradle.kts @@ -19,6 +19,7 @@ plugins { } dependencies { + implementation(project(":extensions:signaling-auth-none")) implementation(libs.edc.boot) implementation(libs.edc.core.runtime) implementation(libs.edc.ext.http) diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java index ba6bdeb3..d772c8ac 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java @@ -20,13 +20,19 @@ import org.eclipse.dataplane.domain.DataAddress; import org.eclipse.dataplane.domain.Result; import org.eclipse.dataplane.domain.dataflow.DataFlow; +import org.eclipse.dataplane.domain.registration.Authorization; import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; import org.eclipse.dataplane.logic.OnPrepare; +import org.eclipse.edc.connector.dataplane.selector.spi.instance.AuthorizationProfile; import org.eclipse.edc.runtime.metamodel.annotation.Configuration; import org.eclipse.edc.runtime.metamodel.annotation.Extension; import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Provider; import org.eclipse.edc.runtime.metamodel.annotation.Setting; import org.eclipse.edc.runtime.metamodel.annotation.Settings; +import org.eclipse.edc.signaling.spi.authorization.Header; +import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorization; +import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorizationRegistry; import org.eclipse.edc.spi.monitor.Monitor; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; @@ -42,15 +48,19 @@ import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.time.Duration; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import static java.net.http.HttpResponse.BodyHandlers.ofString; import static java.util.Collections.emptyList; +import static org.eclipse.edc.spi.result.Result.*; @Extension(value = SignalingDataPlaneRuntimeExtension.NAME) public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { @@ -81,6 +91,22 @@ public void initialize(ServiceExtensionContext context) { var builder = Dataplane.newInstance() .id(dataplaneId) .registerAuthorization(new Oauth2ClientCredentialsAuthorization()) + .registerAuthorization(new Authorization() { + @Override + public String type() { + return "none"; + } + + @Override + public Result authorizationHeader(org.eclipse.dataplane.domain.registration.AuthorizationProfile profile) { + return Result.success("dummy-token"); + } + + @Override + public Result extractCallerId(String authorizationHeader) { + return Result.success("anonymous"); + } + }) .endpoint(apiConfiguration.dataFlowEndpoint()) .transferType("Finite-PUSH") .transferType("Finite-PULL") @@ -115,37 +141,11 @@ public void initialize(ServiceExtensionContext context) { private @NotNull Result startDataFlow(DataFlow dataFlow) { switch (dataFlow.getTransferType()) { - case "NonFinite-PUSH" -> { - if (dataFlow.getDataAddress() == null) { - return Result.failure(new InvalidRequestException("DataAddress should not be null for PUSH transfers")); - } - - var future = Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate(() -> pushData(dataFlow), 0, 200, TimeUnit.MILLISECONDS); - - ongoingNonFiniteTransfers.put(dataFlow.getId(), future); - - return Result.success(dataFlow); - } - case "Finite-PUSH", "AsyncPrepare-PUSH" -> { - if (dataFlow.getDataAddress() == null) { - return Result.failure(new InvalidRequestException("DataAddress should not be null for PUSH transfers")); - } - - pushData(dataFlow) - .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); - - return Result.success(dataFlow); - } - case "NonFinite-PULL", "Finite-PULL" -> { + case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", apiConfiguration.dataSourceEndpoint(), emptyList()); dataFlow.setDataAddress(dataAddress); return Result.success(dataFlow); } - case "AsyncStart-PULL" -> { - dataFlow.transitionToStarting(); - return Result.success(dataFlow); - } default -> { return Result.failure(new RuntimeException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); } diff --git a/settings.gradle.kts b/settings.gradle.kts index 750069f6..aedbb608 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -29,6 +29,7 @@ include(":tests:end2end") include(":extensions:data-plane-public-api-v2") include(":extensions:data-plane-registration") +include(":extensions:signaling-auth-none") // launcher modules include(":launchers:identity-hub") From 575460fa9510db613b95b31f0e42e92a2ca0ac46 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 16:22:11 +0200 Subject: [PATCH 08/17] cleanup, remove Asset.dataAddress --- k8s/consumer/application/dataplane.yaml | 6 - .../application/controlplane-seed.yaml | 14 -- k8s/provider/application/dataplane.yaml | 6 - .../data/DataPlanePublicApiController.java | 66 ++++++++++ .../SignalingDataPlaneRuntimeExtension.java | 123 +++++++----------- 5 files changed, 113 insertions(+), 102 deletions(-) create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java diff --git a/k8s/consumer/application/dataplane.yaml b/k8s/consumer/application/dataplane.yaml index 6ceb5bdf..bdf5b37d 100644 --- a/k8s/consumer/application/dataplane.yaml +++ b/k8s/consumer/application/dataplane.yaml @@ -83,12 +83,6 @@ spec: - name: signaling port: 8182 targetPort: 8182 - - name: control - port: 8083 - targetPort: 8083 - - name: certs - port: 8186 - targetPort: 8186 - name: public port: 11002 targetPort: 11002 diff --git a/k8s/provider/application/controlplane-seed.yaml b/k8s/provider/application/controlplane-seed.yaml index 5d2a9e65..a5dafe62 100644 --- a/k8s/provider/application/controlplane-seed.yaml +++ b/k8s/provider/application/controlplane-seed.yaml @@ -88,13 +88,6 @@ spec: "@type": "Asset", "properties": { "description": "This asset requires Membership to view and Manufacturer (part_types=non_critical) to negotiate." - }, - "dataAddress": { - "@type": "DataAddress", - "type": "HttpData", - "baseUrl": "https://jsonplaceholder.typicode.com/todos", - "proxyPath": "true", - "proxyQueryParams": "true" } }' @@ -113,13 +106,6 @@ spec: "@type": "Asset", "properties": { "description": "This asset requires Membership to view and Manufacturer (part_types=all) to negotiate." - }, - "dataAddress": { - "@type": "DataAddress", - "type": "HttpData", - "baseUrl": "https://jsonplaceholder.typicode.com/todos", - "proxyPath": "true", - "proxyQueryParams": "true" } }' diff --git a/k8s/provider/application/dataplane.yaml b/k8s/provider/application/dataplane.yaml index a2a4cd6b..333a9261 100644 --- a/k8s/provider/application/dataplane.yaml +++ b/k8s/provider/application/dataplane.yaml @@ -80,12 +80,6 @@ spec: - name: health port: 8080 targetPort: 8080 - - name: control - port: 8083 - targetPort: 8083 - - name: certs - port: 8186 - targetPort: 8186 - name: public port: 11002 targetPort: 11002 diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java new file mode 100644 index 00000000..6e427e16 --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2026 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane.data; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.QueryParam; +import okhttp3.Request; +import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.spi.monitor.Monitor; + +/** + * This controller serves as the public endpoint for the data plane. Its endpoints are intended for consumers to download data + */ +@Path("/data") +public class DataPlanePublicApiController { + + private final EdcHttpClient client; + private final Monitor monitor; + + public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor) { + this.client = client; + this.monitor = monitor; + } + + @GET + @Path("/source") + public String dataSource(@QueryParam("resource") String resource, @QueryParam("id") String id) { + // Use default values if parameters are not provided + if (resource == null || resource.isEmpty()) { + resource = "posts"; + } + + monitor.info("Data requested for resource: " + resource + ", id: " + id); + + var formatted = "https://jsonplaceholder.typicode.com/%s".formatted(resource); + if (id != null && !id.isEmpty()) { + formatted += "/%s".formatted(id); + } + var request = new Request.Builder() + .url(formatted) + .get() + .build(); + try (var response = client.execute(request)) { + if (response.isSuccessful()) { + return response.body().string(); + } + throw new IllegalArgumentException("Failed to fetch data from typicode: %d".formatted(response.code())); + } catch (Exception e) { + monitor.severe("Error fetching data from typicode", e); + return "error: " + e.getMessage(); + } + } +} diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java index d772c8ac..80124e55 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java @@ -23,24 +23,21 @@ import org.eclipse.dataplane.domain.registration.Authorization; import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; import org.eclipse.dataplane.logic.OnPrepare; -import org.eclipse.edc.connector.dataplane.selector.spi.instance.AuthorizationProfile; +import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.mvd.dataplane.data.DataPlanePublicApiController; import org.eclipse.edc.runtime.metamodel.annotation.Configuration; import org.eclipse.edc.runtime.metamodel.annotation.Extension; import org.eclipse.edc.runtime.metamodel.annotation.Inject; -import org.eclipse.edc.runtime.metamodel.annotation.Provider; import org.eclipse.edc.runtime.metamodel.annotation.Setting; import org.eclipse.edc.runtime.metamodel.annotation.Settings; -import org.eclipse.edc.signaling.spi.authorization.Header; -import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorization; -import org.eclipse.edc.signaling.spi.authorization.SignalingAuthorizationRegistry; import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.Hostname; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; import org.eclipse.edc.web.spi.WebService; import org.eclipse.edc.web.spi.configuration.ApiContext; import org.eclipse.edc.web.spi.configuration.PortMapping; import org.eclipse.edc.web.spi.configuration.PortMappingRegistry; -import org.eclipse.edc.web.spi.exception.InvalidRequestException; import org.jetbrains.annotations.NotNull; import java.net.URI; @@ -48,19 +45,10 @@ import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.time.Duration; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; import static java.net.http.HttpResponse.BodyHandlers.ofString; import static java.util.Collections.emptyList; -import static org.eclipse.edc.spi.result.Result.*; @Extension(value = SignalingDataPlaneRuntimeExtension.NAME) public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { @@ -69,18 +57,26 @@ public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { @Setting(key = "dataplane.id") private String dataplaneId; @Configuration - private ApiConfiguration apiConfiguration; + private SignalingApiConfig signalingApiConfig; + + @Configuration + private PublicApiConfig publicApiConfig; @Inject private WebService webService; @Inject private Monitor monitor; - private final Map> ongoingNonFiniteTransfers = new HashMap<>(); private Dataplane dataplane; @Inject private PortMappingRegistry portMappingRegistry; + @Inject + private Hostname hostname; + + @Inject + private EdcHttpClient httpClient; + @Override public String name() { return NAME; @@ -99,7 +95,7 @@ public String type() { @Override public Result authorizationHeader(org.eclipse.dataplane.domain.registration.AuthorizationProfile profile) { - return Result.success("dummy-token"); + return Result.success("Bearer dummy-token"); } @Override @@ -107,13 +103,8 @@ public Result extractCallerId(String authorizationHeader) { return Result.success("anonymous"); } }) - .endpoint(apiConfiguration.dataFlowEndpoint()) - .transferType("Finite-PUSH") - .transferType("Finite-PULL") - .transferType("NonFinite-PUSH") - .transferType("NonFinite-PULL") - .transferType("AsyncPrepare-PUSH") - .transferType("AsyncStart-PULL") + .endpoint(signalingApiConfig.dataFlowEndpoint(hostname.get())) + .transferType("HttpData-PULL") .onPrepare(new DataplaneOnPrepare()) .onStart(this::startDataFlow) .onStarted(this::receiveDataFlow) @@ -126,23 +117,27 @@ public Result extractCallerId(String authorizationHeader) { dataplane = builder.build(); - var portMapping = new PortMapping(ApiContext.SIGNALING, apiConfiguration.port(), apiConfiguration.path()); + var portMapping = new PortMapping(ApiContext.SIGNALING, signalingApiConfig.port(), signalingApiConfig.path()); portMappingRegistry.register(portMapping); -// var typeTransformerRegistry = transformerRegistry.forContext("signaling-api"); -// typeTransformerRegistry.register(new DataAddressToDspDataAddressTransformer()); -// typeTransformerRegistry.register(new DataFlowStatusMessageToDataFlowResponseTransformer()); -// typeTransformerRegistry.register(new DspDataAddressToDataAddressTransformer()); + var publicPortMapping = new PortMapping("public", publicApiConfig.port(), publicApiConfig.path()); + portMappingRegistry.register(publicPortMapping); + + // var typeTransformerRegistry = transformerRegistry.forContext("signaling-api"); + // typeTransformerRegistry.register(new DataAddressToDspDataAddressTransformer()); + // typeTransformerRegistry.register(new DataFlowStatusMessageToDataFlowResponseTransformer()); + // typeTransformerRegistry.register(new DspDataAddressToDataAddressTransformer()); + webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); - // webService.registerResource(new DataController(monitor)); + webService.registerResource("public", new DataPlanePublicApiController(httpClient, monitor)); // webService.registerResource(new ControlController(monitor, dataplane, apiConfiguration)); } private @NotNull Result startDataFlow(DataFlow dataFlow) { switch (dataFlow.getTransferType()) { case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { - var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", apiConfiguration.dataSourceEndpoint(), emptyList()); + var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", publicApiConfig.dataSourceEndpoint(hostname.get()), emptyList()); dataFlow.setDataAddress(dataAddress); return Result.success(dataFlow); } @@ -152,28 +147,13 @@ public Result extractCallerId(String authorizationHeader) { } } - private CompletableFuture> pushData(DataFlow dataFlow) { - var destinationUri = URI.create(dataFlow.getDataAddress().endpoint()); - var request = HttpRequest.newBuilder(destinationUri).POST(HttpRequest.BodyPublishers.ofString("test-data")).build(); - return HttpClient.newHttpClient().sendAsync(request, HttpResponse.BodyHandlers.discarding()); - } - private Result receiveDataFlow(DataFlow dataFlow) { - switch (dataFlow.getTransferType()) { - case "NonFinite-PULL" -> { - var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); - var future = Executors.newSingleThreadScheduledExecutor() - .scheduleAtFixedRate(() -> requestData(dataFlow, sourceUri), 0, 200, TimeUnit.MILLISECONDS); - - ongoingNonFiniteTransfers.put(dataFlow.getId(), future); - } - case "Finite-PULL", "AsyncStart-PULL" -> { - var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); - requestData(dataFlow, sourceUri) - .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); - } - default -> { - } + if (dataFlow.getTransferType().equals("HttpData-PULL")) { + var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); + requestData(dataFlow, sourceUri) + .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); + } else { + monitor.warning("TransferType %s not supported".formatted(dataFlow.getTransferType())); } return Result.success(dataFlow); @@ -195,26 +175,16 @@ private CompletableFuture> requestData(DataFlow dataFlow, U }); } - private class DataplaneOnPrepare implements OnPrepare { + private static class DataplaneOnPrepare implements OnPrepare { @Override public Result action(DataFlow dataFlow) { - if (dataFlow.getTransferType().startsWith("AsyncPrepare-")) { - dataFlow.transitionToPreparing(); - return Result.success(dataFlow); - } - var destination = new DataAddress("Finite-PUSH", "http", apiConfiguration.receiveDataEndpoint(), emptyList()); - dataFlow.setDataAddress(destination); - return Result.success(dataFlow); + return dataFlow.getTransferType().equals("HttpData-PULL") + ? Result.success(dataFlow) + : Result.failure(new UnsupportedOperationException("unsupported transfer type: " + dataFlow.getTransferType())); } } private @NotNull Result stopDataFlow(DataFlow dataFlow) { - var future = ongoingNonFiniteTransfers.get(dataFlow.getId()); - if (future != null) { - future.cancel(true); - ongoingNonFiniteTransfers.remove(dataFlow.getId()); - monitor.info("Ongoing flow %s terminated".formatted(dataFlow.getId())); - } return Result.success(dataFlow); } @@ -242,21 +212,22 @@ private void notifyCompleted(DataFlow dataFlow) { } @Settings - public record ApiConfiguration( + public record SignalingApiConfig( @Setting(key = "web.http." + ApiContext.SIGNALING + ".path") String path, @Setting(key = "web.http." + ApiContext.SIGNALING + ".port") int port ) { - public String receiveDataEndpoint() { - return "http://localhost:%d%s/data/receive".formatted(port, path); - } - - public String dataSourceEndpoint() { - return "http://localhost:%d%s/data/source".formatted(port, path); + public URI dataFlowEndpoint(String hostname) { + return URI.create("http://%s:%d%s/v1/dataflows".formatted(hostname, port, path)); } + } - public URI dataFlowEndpoint() { - return URI.create("http://localhost:%d%s/v1/dataflows".formatted(port, path)); + @Settings + public record PublicApiConfig( + @Setting(key = "web.http.public.path", defaultValue = "/api/public") String path, + @Setting(key = "web.http.public.port", defaultValue = "11002") int port) { + public String dataSourceEndpoint(String hostname) { + return "http://%s:%d%s/data/source".formatted(hostname, port, path); } } } From 4f2c194eb55ebc2eadb35845e6a5cf85298de525 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 17:41:26 +0200 Subject: [PATCH 09/17] add proxy to download data --- .../Download Data from Public API.bru | 10 +- .../Get EDR DataAddress for TransferId.bru | 33 --- .../Get cached EDRs.bru | 50 ---- .../Get open dataflows.bru | 37 +++ Requests/collection.bru | 3 +- .../application/dataplane-config.yaml | 7 +- k8s/consumer/application/dataplane.yaml | 40 +-- k8s/provider/application/dataplane.yaml | 39 +-- .../SignalingDataPlaneRuntimeExtension.java | 173 +++++++++++++ .../data/ConsumerProxyController.java | 47 ++++ .../data/DataPlanePublicApiController.java | 30 ++- .../signaling/ConsumerDataHandler.java | 81 ++++++ .../signaling/NoneAuthorization.java | 35 +++ .../SignalingDataPlaneRuntimeExtension.java | 233 ------------------ ...rg.eclipse.edc.spi.system.ServiceExtension | 2 +- 15 files changed, 411 insertions(+), 409 deletions(-) delete mode 100644 Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru delete mode 100644 Requests/ControlPlane Management/Get cached EDRs.bru create mode 100644 Requests/ControlPlane Management/Get open dataflows.bru create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java create mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java delete mode 100644 launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java diff --git a/Requests/ControlPlane Management/Download Data from Public API.bru b/Requests/ControlPlane Management/Download Data from Public API.bru index 0222ce06..76675831 100644 --- a/Requests/ControlPlane Management/Download Data from Public API.bru +++ b/Requests/ControlPlane Management/Download Data from Public API.bru @@ -1,11 +1,11 @@ meta { name: Download Data from Public API type: http - seq: 9 + seq: 8 } get { - url: {{PROVIDER_PUBLIC_URL}}/api/public + url: {{CONSUMER_DP}}/api/proxy/flows/{{flowId}} body: none auth: none } @@ -14,12 +14,6 @@ headers { Authorization: {{AUTHORIZATION}} } -script:pre-request { - if(!(bru.getVar("AUTHORIZATION") !== undefined && bru.getVar("AUTHORIZATION") !== null)){ - throw new Error(' The authorization token is not yet available, please execute request "Get EDR DataAddress for TransferId" first!'); - } -} - script:post-response { test("Status code is >=200 and <300", function () { expect(res.getStatus() < 300 && res.getStatus() >= 200).to.be.true diff --git a/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru b/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru deleted file mode 100644 index 3558f782..00000000 --- a/Requests/ControlPlane Management/Get EDR DataAddress for TransferId.bru +++ /dev/null @@ -1,33 +0,0 @@ -meta { - name: Get EDR DataAddress for TransferId - type: http - seq: 8 -} - -get { - url: {{CONSUMER_CP}}/api/mgmt/v3/edrs/{{TRANSFER_PROCESS_ID}}/dataaddress - body: none - auth: inherit -} - -script:pre-request { - if(!(bru.getVar("TRANSFER_PROCESS_ID") !== undefined && bru.getVar("TRANSFER_PROCESS_ID") !== null)){ - throw new Error('Transfer Process ID is not yet available, please execute request "Get Transfer Processes" first!'); - } -} - -script:post-response { - // get the authorization token and save it as an environment variable - if(res.getStatus() < 300 && res.getStatus() >= 200){ - //using the first authorization token found - const authorization = res.getBody()["authorization"]; - bru.setVar("AUTHORIZATION", authorization); - } - - -} - -settings { - encodeUrl: true - timeout: 0 -} diff --git a/Requests/ControlPlane Management/Get cached EDRs.bru b/Requests/ControlPlane Management/Get cached EDRs.bru deleted file mode 100644 index ca77e9ac..00000000 --- a/Requests/ControlPlane Management/Get cached EDRs.bru +++ /dev/null @@ -1,50 +0,0 @@ -meta { - name: Get cached EDRs - type: http - seq: 7 -} - -post { - url: {{CONSUMER_CP}}/api/mgmt/v3/edrs/request - body: json - auth: inherit -} - -body:json { - { - "@context": [ - "https://w3id.org/edc/connector/management/v2" - ], - "@type": "QuerySpec" - } -} - -body:text { - { - "@context": [ - "https://w3id.org/edc/connector/management/v2" - ], - "@type": "QuerySpec" - } -} - -script:post-response { - // get the transfer process id of "asset-1" and save it as an environment variable if the response body is not empty - if(res.getStatus() < 300 && res.getStatus() >= 200 && res.getBody().length > 0){ - const transferProcessId = res.getBody()[0]["transferProcessId"]; - bru.setVar("TRANSFER_PROCESS_ID", transferProcessId); - } - - test("Status code is >=200 and <300", function () { - expect(res.getStatus() < 300 && res.getStatus() >= 200).to.be.true - }); - test("Transfer process id is set", function(){ - expect(bru.getVar("TRANSFER_PROCESS_ID")).not.to.be.undefined - }) - -} - -settings { - encodeUrl: true - timeout: 0 -} diff --git a/Requests/ControlPlane Management/Get open dataflows.bru b/Requests/ControlPlane Management/Get open dataflows.bru new file mode 100644 index 00000000..d12e9121 --- /dev/null +++ b/Requests/ControlPlane Management/Get open dataflows.bru @@ -0,0 +1,37 @@ +meta { + name: Get open dataflows + type: http + seq: 7 +} + +get { + url: {{CONSUMER_DP}}/api/proxy/flows + body: json + auth: none +} + +body:text { + { + "@context": [ + "https://w3id.org/edc/connector/management/v2" + ], + "@type": "QuerySpec" + } +} + +script:post-response { + const body = res.getBody() + const map = new Map(Object.entries(body)); + // just use the first entry in the map + const [flowId, address] = map.entries().next().value; + + expect(flowId).not.to.be.undefined + bru.setVar("flowId",flowId) + + +} + +settings { + encodeUrl: true + timeout: 0 +} diff --git a/Requests/collection.bru b/Requests/collection.bru index 8b5e2ddb..0a21fd7e 100644 --- a/Requests/collection.bru +++ b/Requests/collection.bru @@ -18,8 +18,9 @@ vars:pre-request { CS_URL: http://ih.consumer.localhost/cs ISSUER_ADMIN_URL: http://issuer.localhost/admin ISSUER_DID: did:web:localhost%3A10100 - PROVIDER_PUBLIC_URL: http://dp.provider.localhost/public + PROVIDER_PUBLIC_URL: http://dp.provider.localhost PARTICIPANT_CONTEXT_ID: consumer-participant ISSUER_CONTEXT_ID: issuer PROVIDER_CP: http://cp.provider.localhost + CONSUMER_DP: http://dp.consumer.localhost } diff --git a/k8s/consumer/application/dataplane-config.yaml b/k8s/consumer/application/dataplane-config.yaml index a1977380..c22ea263 100644 --- a/k8s/consumer/application/dataplane-config.yaml +++ b/k8s/consumer/application/dataplane-config.yaml @@ -30,12 +30,11 @@ data: web.http.signaling.port: "8182" web.http.signaling.path: "/api/signaling" - web.http.control.port: "8083" - web.http.control.path: "/api/control" web.http.public.port: "11002" web.http.public.path: "/api/public" - web.http.certs.port: "8186" - web.http.certs.path: "/api/data" + + web.http.proxy.port: "11003" + web.http.proxy.path: "/api/proxy" edc.vault.hashicorp.url: "http://vault.consumer.svc.cluster.local:8200" edc.vault.hashicorp.token: "root" diff --git a/k8s/consumer/application/dataplane.yaml b/k8s/consumer/application/dataplane.yaml index bdf5b37d..e66d45ff 100644 --- a/k8s/consumer/application/dataplane.yaml +++ b/k8s/consumer/application/dataplane.yaml @@ -86,6 +86,9 @@ spec: - name: public port: 11002 targetPort: 11002 + - name: proxy + port: 11003 + targetPort: 11003 --- apiVersion: gateway.networking.k8s.io/v1 @@ -100,11 +103,10 @@ spec: hostnames: - dp.consumer.localhost rules: - # /dp/public → public port 11002 - matches: - path: type: PathPrefix - value: /public + value: / filters: - type: URLRewrite urlRewrite: @@ -113,37 +115,5 @@ spec: replacePrefixMatch: / backendRefs: - name: dataplane - port: 11002 + port: 11003 weight: 1 - - # /app/internal → control port 8083 - - matches: - - path: - type: PathPrefix - value: /app/internal - filters: - - type: URLRewrite - urlRewrite: - path: - type: ReplacePrefixMatch - replacePrefixMatch: / - backendRefs: - - name: dataplane - port: 8083 - weight: 1 - - # /app/public → certs port 8186 - - matches: - - path: - type: PathPrefix - value: /app/public - filters: - - type: URLRewrite - urlRewrite: - path: - type: ReplacePrefixMatch - replacePrefixMatch: / - backendRefs: - - name: dataplane - port: 8186 - weight: 1 \ No newline at end of file diff --git a/k8s/provider/application/dataplane.yaml b/k8s/provider/application/dataplane.yaml index 333a9261..4c695086 100644 --- a/k8s/provider/application/dataplane.yaml +++ b/k8s/provider/application/dataplane.yaml @@ -86,6 +86,9 @@ spec: - name: signaling port: 8182 targetPort: 8182 + - name: proxy + port: 11003 + targetPort: 11003 --- apiVersion: gateway.networking.k8s.io/v1 @@ -100,11 +103,11 @@ spec: hostnames: - dp.provider.localhost rules: - # /dp/public → public port 11002 + # /api/public → public port 11002 - matches: - path: type: PathPrefix - value: /public + value: / filters: - type: URLRewrite urlRewrite: @@ -114,36 +117,4 @@ spec: backendRefs: - name: dataplane port: 11002 - weight: 1 - - # /app/internal → control port 8083 - - matches: - - path: - type: PathPrefix - value: /app/internal - filters: - - type: URLRewrite - urlRewrite: - path: - type: ReplacePrefixMatch - replacePrefixMatch: / - backendRefs: - - name: dataplane - port: 8083 - weight: 1 - - # /app/public → certs port 8186 - - matches: - - path: - type: PathPrefix - value: /app/public - filters: - - type: URLRewrite - urlRewrite: - path: - type: ReplacePrefixMatch - replacePrefixMatch: / - backendRefs: - - name: dataplane - port: 8186 weight: 1 \ No newline at end of file diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java new file mode 100644 index 00000000..e56527d8 --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java @@ -0,0 +1,173 @@ +/* + * Copyright (c) 2026 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane; + +import org.eclipse.dataplane.Dataplane; +import org.eclipse.dataplane.domain.DataAddress; +import org.eclipse.dataplane.domain.Result; +import org.eclipse.dataplane.domain.dataflow.DataFlow; +import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; +import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.mvd.dataplane.data.ConsumerProxyController; +import org.eclipse.edc.mvd.dataplane.data.DataPlanePublicApiController; +import org.eclipse.edc.mvd.dataplane.signaling.ConsumerDataHandler; +import org.eclipse.edc.mvd.dataplane.signaling.NoneAuthorization; +import org.eclipse.edc.runtime.metamodel.annotation.Configuration; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Setting; +import org.eclipse.edc.runtime.metamodel.annotation.Settings; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.system.Hostname; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.web.spi.WebService; +import org.eclipse.edc.web.spi.configuration.ApiContext; +import org.eclipse.edc.web.spi.configuration.PortMapping; +import org.eclipse.edc.web.spi.configuration.PortMappingRegistry; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +import static java.util.Collections.emptyList; + +@Extension(value = SignalingDataPlaneRuntimeExtension.NAME) +public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { + + public static final String NAME = "Signaling pata plane Extension"; + public static final String APICONTEXT_PUBLIC = "public"; + public static final String APICONTEXT_PROXY = "proxy"; + + @Setting(key = "dataplane.id") + private String dataplaneId; + @Configuration + private SignalingApiConfig signalingApiConfig; + + @Configuration + private PublicApiConfig publicApiConfig; + + @Configuration + private ProxyApiConfig proxyApiConfig; + + @Inject + private WebService webService; + @Inject + private Monitor monitor; + + private Dataplane dataplane; + private ConsumerDataHandler dataFetcher; + @Inject + private PortMappingRegistry portMappingRegistry; + + @Inject + private Hostname hostname; + + @Inject + private EdcHttpClient httpClient; + + @Override + public String name() { + return NAME; + } + + @Override + public void initialize(ServiceExtensionContext context) { + dataFetcher = new ConsumerDataHandler(httpClient, monitor); + + var builder = Dataplane.newInstance() + .id(dataplaneId) + .registerAuthorization(new Oauth2ClientCredentialsAuthorization()) + .registerAuthorization(new NoneAuthorization()) + .endpoint(signalingApiConfig.dataFlowEndpoint(hostname.get())) + .transferType("HttpData-PULL") + .onPrepare(this::prepare) + .onStart(this::startDataFlow) + .onStarted(this::requestData) + .onSuspend(Result::success) // suspension not implemented + .onResume(flow -> flow.getType() == DataFlow.Type.PROVIDER + ? startDataFlow(flow) + : dataFetcher.storeDataflow(flow)) + .onCompleted(df -> dataplane.notifyCompleted(df.getId()).map(u -> df)) + .onTerminate(dataFetcher::removeDataflow); + + dataplane = builder.build(); + + var portMapping = new PortMapping(ApiContext.SIGNALING, signalingApiConfig.port(), signalingApiConfig.path()); + portMappingRegistry.register(portMapping); + + var publicPortMapping = new PortMapping(APICONTEXT_PUBLIC, publicApiConfig.port(), publicApiConfig.path()); + portMappingRegistry.register(publicPortMapping); + + var proxyPortMapping = new PortMapping(APICONTEXT_PROXY, proxyApiConfig.port(), proxyApiConfig.path()); + portMappingRegistry.register(proxyPortMapping); + + webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); + webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); + webService.registerResource(APICONTEXT_PUBLIC, new DataPlanePublicApiController(httpClient, monitor)); + webService.registerResource(APICONTEXT_PROXY, new ConsumerProxyController(dataFetcher)); + } + + private Result requestData(DataFlow dataFlow) { + return dataFetcher.storeDataflow(dataFlow) + .onFailure(throwable -> { + dataplane.notifyErrored(dataFlow.getId(), throwable); + }); + } + + private Result prepare(DataFlow dataFlow) { + return dataFlow.getTransferType().equals("HttpData-PULL") + ? Result.success(dataFlow) + : Result.failure(new UnsupportedOperationException("unsupported transfer type: " + dataFlow.getTransferType())); + } + + private @NotNull Result startDataFlow(DataFlow dataFlow) { + switch (dataFlow.getTransferType()) { + case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { + var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", publicApiConfig.dataSourceEndpoint(hostname.get()), emptyList()); + dataFlow.setDataAddress(dataAddress); + return Result.success(dataFlow); + } + default -> { + return Result.failure(new RuntimeException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); + } + } + } + + @Settings + public record SignalingApiConfig( + @Setting(key = "web.http." + ApiContext.SIGNALING + ".path") String path, + @Setting(key = "web.http." + ApiContext.SIGNALING + ".port") int port + ) { + + public URI dataFlowEndpoint(String hostname) { + return URI.create("http://%s:%d%s/v1/dataflows".formatted(hostname, port, path)); + } + } + + @Settings + public record PublicApiConfig( + @Setting(key = "web.http." + APICONTEXT_PUBLIC + ".path", defaultValue = "/api/public") String path, + @Setting(key = "web.http." + APICONTEXT_PUBLIC + ".port", defaultValue = "11002") int port) { + public String dataSourceEndpoint(String hostname) { + return "http://%s:%d%s/data/source".formatted(hostname, port, path); + } + } + + @Settings + public record ProxyApiConfig( + @Setting(key = "web.http." + APICONTEXT_PROXY + ".path", defaultValue = "/api/proxy") String path, + @Setting(key = "web.http." + APICONTEXT_PROXY + ".port", defaultValue = "11003") int port) { + } +} diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java new file mode 100644 index 00000000..fcc8ac0f --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2026 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane.data; + +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import org.eclipse.dataplane.domain.DataAddress; +import org.eclipse.dataplane.port.ExceptionMapper; +import org.eclipse.edc.mvd.dataplane.signaling.ConsumerDataHandler; + +import java.util.Map; + +@Path("/flows") +@Produces("application/json") +public class ConsumerProxyController { + + private final ConsumerDataHandler consumerDataHandler; + + public ConsumerProxyController(ConsumerDataHandler consumerDataHandler) { + this.consumerDataHandler = consumerDataHandler; + } + + @GET + @Path("/{flowId}") + public String handleDataFlow(@PathParam("flowId") String flowId) { + return consumerDataHandler.downloadData(flowId).orElseThrow(ExceptionMapper.MAP_TO_WSRS); + } + + @GET + public Map getAll() { + return consumerDataHandler.getAllFlows(); + } +} diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java index 6e427e16..b40da0f3 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java @@ -16,10 +16,11 @@ import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; -import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.PathParam; import okhttp3.Request; import org.eclipse.edc.http.spi.EdcHttpClient; import org.eclipse.edc.spi.monitor.Monitor; +import org.jetbrains.annotations.NotNull; /** * This controller serves as the public endpoint for the data plane. Its endpoints are intended for consumers to download data @@ -37,18 +38,27 @@ public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor) { @GET @Path("/source") - public String dataSource(@QueryParam("resource") String resource, @QueryParam("id") String id) { - // Use default values if parameters are not provided - if (resource == null || resource.isEmpty()) { - resource = "posts"; - } + public String dataSource() { + return downloadJsonFromUrl("https://jsonplaceholder.typicode.com/todos"); + } + - monitor.info("Data requested for resource: " + resource + ", id: " + id); + @GET + @Path("/source/{resource}") + public String dataSource(@PathParam("resource") String resource) { var formatted = "https://jsonplaceholder.typicode.com/%s".formatted(resource); - if (id != null && !id.isEmpty()) { - formatted += "/%s".formatted(id); - } + return downloadJsonFromUrl(formatted); + } + + @GET + @Path("/source/{resource}/{id}") + public String dataSource(@PathParam("resource") String resource, @PathParam("id") String id) { + var formatted = "https://jsonplaceholder.typicode.com/%s/%s".formatted(resource, id); + return downloadJsonFromUrl(formatted); + } + + private @NotNull String downloadJsonFromUrl(String formatted) { var request = new Request.Builder() .url(formatted) .get() diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java new file mode 100644 index 00000000..89b748b1 --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2025 Think-it GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Think-it GmbH - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane.signaling; + +import okhttp3.Request; +import org.eclipse.dataplane.domain.DataAddress; +import org.eclipse.dataplane.domain.Result; +import org.eclipse.dataplane.domain.dataflow.DataFlow; +import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.spi.monitor.Monitor; + +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + +public class ConsumerDataHandler { + + private final EdcHttpClient httpClient; + private final Monitor monitor; + private final Map ongoingTransfers = new HashMap<>(); + + public ConsumerDataHandler(EdcHttpClient httpClient, Monitor monitor) { + this.httpClient = httpClient; + this.monitor = monitor; + } + + public Result storeDataflow(DataFlow dataFlow) { + if (dataFlow.getTransferType().equals("HttpData-PULL")) { + ongoingTransfers.put(dataFlow.getId(), dataFlow.getDataAddress()); + return Result.success(dataFlow); + } else { + monitor.warning("TransferType %s not supported".formatted(dataFlow.getTransferType())); + return Result.failure(new UnsupportedOperationException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); + } + } + + public Result removeDataflow(DataFlow flow) { + ongoingTransfers.remove(flow.getId()); + return Result.success(null); + } + + public Result downloadData(String flowId) { + var dataAddress = ongoingTransfers.get(flowId); + + if (dataAddress == null) { + return Result.failure(new IllegalArgumentException("No data flow found for id %s".formatted(flowId))); + } + var sourceUri = URI.create(dataAddress.endpoint()); + var request = new Request.Builder().url(sourceUri.toString()).get().build(); + try (var response = httpClient.execute(request)) { + if (response.isSuccessful()) { + var body = response.body().string(); + monitor.info("Received data for data flow %s: %s".formatted(flowId, body)); + return Result.success(body); + } else { + return Result.failure(new IllegalArgumentException("Data source endpoint responded with %d".formatted(response.code()))); + } + } catch (RuntimeException e) { + return Result.failure(e); + } catch (Exception e) { + return Result.failure(new RuntimeException("Error retrieving data for data flow %s".formatted(flowId), e)); + } + + } + + public Map getAllFlows() { + return ongoingTransfers; + } +} diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java new file mode 100644 index 00000000..90bee35b --- /dev/null +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2026 Metaform Systems, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Metaform Systems, Inc. - initial API and implementation + * + */ + +package org.eclipse.edc.mvd.dataplane.signaling; + +import org.eclipse.dataplane.domain.Result; +import org.eclipse.dataplane.domain.registration.Authorization; + +public class NoneAuthorization implements Authorization { + @Override + public String type() { + return "none"; + } + + @Override + public Result authorizationHeader(org.eclipse.dataplane.domain.registration.AuthorizationProfile profile) { + return Result.success("Bearer dummy-token"); + } + + @Override + public Result extractCallerId(String authorizationHeader) { + return Result.success("anonymous"); + } +} diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java deleted file mode 100644 index 80124e55..00000000 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/SignalingDataPlaneRuntimeExtension.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright (c) 2025 Think-it GmbH - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Think-it GmbH - initial API and implementation - * - */ - -package org.eclipse.edc.mvd.dataplane.signaling; - -import dev.failsafe.Failsafe; -import dev.failsafe.RetryPolicy; -import org.eclipse.dataplane.Dataplane; -import org.eclipse.dataplane.domain.DataAddress; -import org.eclipse.dataplane.domain.Result; -import org.eclipse.dataplane.domain.dataflow.DataFlow; -import org.eclipse.dataplane.domain.registration.Authorization; -import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; -import org.eclipse.dataplane.logic.OnPrepare; -import org.eclipse.edc.http.spi.EdcHttpClient; -import org.eclipse.edc.mvd.dataplane.data.DataPlanePublicApiController; -import org.eclipse.edc.runtime.metamodel.annotation.Configuration; -import org.eclipse.edc.runtime.metamodel.annotation.Extension; -import org.eclipse.edc.runtime.metamodel.annotation.Inject; -import org.eclipse.edc.runtime.metamodel.annotation.Setting; -import org.eclipse.edc.runtime.metamodel.annotation.Settings; -import org.eclipse.edc.spi.monitor.Monitor; -import org.eclipse.edc.spi.system.Hostname; -import org.eclipse.edc.spi.system.ServiceExtension; -import org.eclipse.edc.spi.system.ServiceExtensionContext; -import org.eclipse.edc.web.spi.WebService; -import org.eclipse.edc.web.spi.configuration.ApiContext; -import org.eclipse.edc.web.spi.configuration.PortMapping; -import org.eclipse.edc.web.spi.configuration.PortMappingRegistry; -import org.jetbrains.annotations.NotNull; - -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.time.Duration; -import java.util.concurrent.CompletableFuture; - -import static java.net.http.HttpResponse.BodyHandlers.ofString; -import static java.util.Collections.emptyList; - -@Extension(value = SignalingDataPlaneRuntimeExtension.NAME) -public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { - - public static final String NAME = "Signaling pata plane Extension"; - @Setting(key = "dataplane.id") - private String dataplaneId; - @Configuration - private SignalingApiConfig signalingApiConfig; - - @Configuration - private PublicApiConfig publicApiConfig; - - @Inject - private WebService webService; - @Inject - private Monitor monitor; - - private Dataplane dataplane; - @Inject - private PortMappingRegistry portMappingRegistry; - - @Inject - private Hostname hostname; - - @Inject - private EdcHttpClient httpClient; - - @Override - public String name() { - return NAME; - } - - @Override - public void initialize(ServiceExtensionContext context) { - var builder = Dataplane.newInstance() - .id(dataplaneId) - .registerAuthorization(new Oauth2ClientCredentialsAuthorization()) - .registerAuthorization(new Authorization() { - @Override - public String type() { - return "none"; - } - - @Override - public Result authorizationHeader(org.eclipse.dataplane.domain.registration.AuthorizationProfile profile) { - return Result.success("Bearer dummy-token"); - } - - @Override - public Result extractCallerId(String authorizationHeader) { - return Result.success("anonymous"); - } - }) - .endpoint(signalingApiConfig.dataFlowEndpoint(hostname.get())) - .transferType("HttpData-PULL") - .onPrepare(new DataplaneOnPrepare()) - .onStart(this::startDataFlow) - .onStarted(this::receiveDataFlow) - .onSuspend(this::stopDataFlow) - .onResume(flow -> flow.getType() == DataFlow.Type.PROVIDER - ? startDataFlow(flow) - : receiveDataFlow(flow)) - .onCompleted(Result::success) - .onTerminate(this::stopDataFlow); - - dataplane = builder.build(); - - var portMapping = new PortMapping(ApiContext.SIGNALING, signalingApiConfig.port(), signalingApiConfig.path()); - portMappingRegistry.register(portMapping); - - var publicPortMapping = new PortMapping("public", publicApiConfig.port(), publicApiConfig.path()); - portMappingRegistry.register(publicPortMapping); - - // var typeTransformerRegistry = transformerRegistry.forContext("signaling-api"); - // typeTransformerRegistry.register(new DataAddressToDspDataAddressTransformer()); - // typeTransformerRegistry.register(new DataFlowStatusMessageToDataFlowResponseTransformer()); - // typeTransformerRegistry.register(new DspDataAddressToDataAddressTransformer()); - - webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); - webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); - webService.registerResource("public", new DataPlanePublicApiController(httpClient, monitor)); - // webService.registerResource(new ControlController(monitor, dataplane, apiConfiguration)); - } - - private @NotNull Result startDataFlow(DataFlow dataFlow) { - switch (dataFlow.getTransferType()) { - case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { - var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", publicApiConfig.dataSourceEndpoint(hostname.get()), emptyList()); - dataFlow.setDataAddress(dataAddress); - return Result.success(dataFlow); - } - default -> { - return Result.failure(new RuntimeException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); - } - } - } - - private Result receiveDataFlow(DataFlow dataFlow) { - if (dataFlow.getTransferType().equals("HttpData-PULL")) { - var sourceUri = URI.create(dataFlow.getDataAddress().endpoint()); - requestData(dataFlow, sourceUri) - .whenComplete((response, throwable) -> notifyCompletion(dataFlow, response, throwable)); - } else { - monitor.warning("TransferType %s not supported".formatted(dataFlow.getTransferType())); - } - - return Result.success(dataFlow); - } - - private CompletableFuture> requestData(DataFlow dataFlow, URI sourceUri) { - var request = HttpRequest.newBuilder(sourceUri).GET().build(); - return HttpClient.newHttpClient().sendAsync(request, ofString()).whenComplete((response, throwable) -> { - if (throwable == null) { - if (response.statusCode() == 200) { - var body = response.body(); - monitor.info("Received data for data flow %s: %s".formatted(dataFlow.getId(), body)); - } else { - monitor.severe("Error retrieving data: %s: %s".formatted(response.statusCode(), response.body())); - } - } else { - monitor.severe("Error retrieving data", throwable); - } - }); - } - - private static class DataplaneOnPrepare implements OnPrepare { - @Override - public Result action(DataFlow dataFlow) { - return dataFlow.getTransferType().equals("HttpData-PULL") - ? Result.success(dataFlow) - : Result.failure(new UnsupportedOperationException("unsupported transfer type: " + dataFlow.getTransferType())); - } - } - - private @NotNull Result stopDataFlow(DataFlow dataFlow) { - return Result.success(dataFlow); - } - - private void notifyCompletion(DataFlow dataFlow, HttpResponse response, Throwable throwable) { - if (throwable == null) { - var statusCode = response.statusCode(); - if (statusCode >= 200 && statusCode < 300) { - notifyCompleted(dataFlow); - } else { - dataplane.notifyErrored(dataFlow.getId(), new RuntimeException("Data source/destination endpoint responded with " + statusCode)); - } - } else { - dataplane.notifyErrored(dataFlow.getId(), throwable); - } - } - - private void notifyCompleted(DataFlow dataFlow) { - var retryPolicy = RetryPolicy.builder().withMaxRetries(5).withDelay(Duration.ofSeconds(1)).build(); - Failsafe.with(retryPolicy).run(context -> { - var notifyCompleted = dataplane.notifyCompleted(dataFlow.getId()); - if (notifyCompleted.failed()) { - throw new RuntimeException("Notify Completed failed: " + notifyCompleted); - } - }); - } - - @Settings - public record SignalingApiConfig( - @Setting(key = "web.http." + ApiContext.SIGNALING + ".path") String path, - @Setting(key = "web.http." + ApiContext.SIGNALING + ".port") int port - ) { - - public URI dataFlowEndpoint(String hostname) { - return URI.create("http://%s:%d%s/v1/dataflows".formatted(hostname, port, path)); - } - } - - @Settings - public record PublicApiConfig( - @Setting(key = "web.http.public.path", defaultValue = "/api/public") String path, - @Setting(key = "web.http.public.port", defaultValue = "11002") int port) { - public String dataSourceEndpoint(String hostname) { - return "http://%s:%d%s/data/source".formatted(hostname, port, path); - } - } -} diff --git a/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension index bd60813a..31e0807f 100644 --- a/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension +++ b/launchers/dataplane/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -14,4 +14,4 @@ org.eclipse.edc.mvd.dataplane.keyseed.KeySeedExtension -org.eclipse.edc.mvd.dataplane.signaling.SignalingDataPlaneRuntimeExtension \ No newline at end of file +org.eclipse.edc.mvd.dataplane.SignalingDataPlaneRuntimeExtension \ No newline at end of file From 39fa86e57d78682046414fb4bc3621fe5ee34270 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 18:01:33 +0200 Subject: [PATCH 10/17] improve e2e test --- .../Get open dataflow by ID.bru | 37 ++++++++++++ .../application/issuerservice-seed-job.yaml | 4 +- .../data/ConsumerProxyController.java | 8 ++- .../signaling/ConsumerDataHandler.java | 4 ++ .../tests/transfer/TransferEndToEndTest.java | 57 +++++++++---------- 5 files changed, 77 insertions(+), 33 deletions(-) create mode 100644 Requests/ControlPlane Management/Get open dataflow by ID.bru diff --git a/Requests/ControlPlane Management/Get open dataflow by ID.bru b/Requests/ControlPlane Management/Get open dataflow by ID.bru new file mode 100644 index 00000000..8d452183 --- /dev/null +++ b/Requests/ControlPlane Management/Get open dataflow by ID.bru @@ -0,0 +1,37 @@ +meta { + name: Get open dataflow by ID + type: http + seq: 9 +} + +get { + url: {{CONSUMER_DP}}/api/proxy/flows + body: json + auth: none +} + +body:text { + { + "@context": [ + "https://w3id.org/edc/connector/management/v2" + ], + "@type": "QuerySpec" + } +} + +script:post-response { + const body = res.getBody() + const map = new Map(Object.entries(body)); + // just use the first entry in the map + const [flowId, address] = map.entries().next().value; + + expect(flowId).not.to.be.undefined + bru.setVar("flowId",flowId) + + +} + +settings { + encodeUrl: true + timeout: 0 +} diff --git a/k8s/issuer/application/issuerservice-seed-job.yaml b/k8s/issuer/application/issuerservice-seed-job.yaml index fabcfcf5..32e24f35 100644 --- a/k8s/issuer/application/issuerservice-seed-job.yaml +++ b/k8s/issuer/application/issuerservice-seed-job.yaml @@ -22,7 +22,7 @@ metadata: labels: app: issuerservice-seed platform: edcv - type: edcv-job + type: edc-job spec: backoffLimit: 5 template: @@ -30,7 +30,7 @@ spec: labels: app: issuerservice-seed platform: edcv - type: edcv-job + type: edc-job spec: restartPolicy: OnFailure initContainers: diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java index fcc8ac0f..c55a7615 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java @@ -35,11 +35,17 @@ public ConsumerProxyController(ConsumerDataHandler consumerDataHandler) { } @GET - @Path("/{flowId}") + @Path("/{flowId}/data") public String handleDataFlow(@PathParam("flowId") String flowId) { return consumerDataHandler.downloadData(flowId).orElseThrow(ExceptionMapper.MAP_TO_WSRS); } + @GET + @Path("/{flowId}") + public DataAddress getFlowMetadata(@PathParam("flowId") String flowId) { + return consumerDataHandler.getFlow(flowId); + } + @GET public Map getAll() { return consumerDataHandler.getAllFlows(); diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java index 89b748b1..0521b217 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java @@ -78,4 +78,8 @@ public Result downloadData(String flowId) { public Map getAllFlows() { return ongoingTransfers; } + + public DataAddress getFlow(String flowId) { + return ongoingTransfers.get(flowId); + } } diff --git a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java index 26ad37e8..b43de67f 100644 --- a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java +++ b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java @@ -45,14 +45,11 @@ public class TransferEndToEndTest { // Management API base URL of the consumer connector, goes through Ingress controller private static final String CONSUMER_MANAGEMENT_URL = "http://cp.consumer.localhost:8080"; - // Catalog Query API URL of the consumer connector, goes through ingress controller - private static final String CONSUMER_CATALOG_URL = "http://127.0.0.1/consumer/fc"; // DSP service URL of the provider, not reachable outside the cluster private static final String PROVIDER_DSP_URL = "http://controlplane.provider.svc.cluster.local:8082/api/dsp/2025-1"; // DID of the provider company private static final String PROVIDER_ID = "did:web:identityhub.provider.svc.cluster.local%3A7083:provider"; - // public API endpoint of the provider-qna connector, goes through the ingress controller - private static final String PROVIDER_PUBLIC_URL = "http://dp.provider.localhost:8080/public"; + private static final String CONSUMER_PROXY_URL = "http://dp.consumer.localhost:8080/api/proxy"; private static final String PROVIDER_MANAGEMENT_URL = "http://cp.provider.localhost:8080"; private final TypeTransformerRegistry transformerRegistry = new TypeTransformerRegistryImpl(); @@ -69,23 +66,23 @@ private static RequestSpecification baseRequest() { @DisplayName("Tests a successful End-to-End contract negotiation and data transfer") @Test void transferData_hasPermission_shouldTransferData() { - System.out.println("Waiting for Provider dataplane to come online"); - // wait until provider's dataplane is available - await().atMost(TEST_TIMEOUT_DURATION) - .pollDelay(TEST_POLL_DELAY) - .untilAsserted(() -> { - var jp = baseRequest() - .get(PROVIDER_MANAGEMENT_URL + "/api/mgmt/v4/dataplanes") - .then() - .statusCode(200) - .log().ifValidationFails() - .extract().body().jsonPath(); - - var state = jp.getString("state"); - assertThat(state).isEqualTo("[REGISTERED]"); - }); - - System.out.println("Provider dataplane is online, fetching catalog"); +// System.out.println("Waiting for Provider dataplane to come online"); +// // wait until provider's dataplane is available +// await().atMost(TEST_TIMEOUT_DURATION) +// .pollDelay(TEST_POLL_DELAY) +// .untilAsserted(() -> { +// var jp = baseRequest() +// .get(PROVIDER_MANAGEMENT_URL + "/api/mgmt/v4/dataplanes") +// .then() +// .statusCode(200) +// .log().ifValidationFails() +// .extract().body().jsonPath(); +// +// var state = jp.getString("state"); +// assertThat(state).isEqualTo("[REGISTERED]"); +// }); +// +// System.out.println("Provider dataplane is online, fetching catalog"); var catalogRequestBody = Json.createObjectBuilder() .add("@context", Json.createObjectBuilder().add("edc", "https://w3id.org/edc/connector/management/v2")) @@ -178,7 +175,7 @@ void transferData_hasPermission_shouldTransferData() { assertThat(jp.getString("state")).contains("STARTED"); }); - System.out.printf("Fetch EDR with ID %s%n", transferProcessId); + System.out.printf("Fetch dataflow with ID %s from custom proxy%n", transferProcessId); // fetch EDR for transfer processs var endpoint = new AtomicReference(); var token = new AtomicReference(); @@ -186,28 +183,28 @@ void transferData_hasPermission_shouldTransferData() { .pollDelay(TEST_POLL_DELAY) .untilAsserted(() -> { var jp = baseRequest() - .get(CONSUMER_MANAGEMENT_URL + "/api/mgmt/v3/edrs/%s/dataaddress".formatted(transferProcessId)) + .get(CONSUMER_PROXY_URL + "/flows/%s".formatted(transferProcessId)) .then() .log().ifValidationFails() .statusCode(200) - .onFailMessage("Expected to find an EDR with transfer ID %s but did not!".formatted(transferProcessId)) + .onFailMessage("Expected to find a DataFlow ID %s but did not!".formatted(transferProcessId)) .extract().body().jsonPath(); endpoint.set(jp.getString("endpoint")); - token.set(jp.getString("authorization")); +// token.set(jp.getString("authorization")); - assertThat(endpoint.get()).isNotNull().endsWith("/api/public"); - assertThat(token.get()).isNotNull(); + assertThat(endpoint.get()).isNotNull().endsWith("/api/public/data/source"); +// assertThat(token.get()).isNotNull(); }); //download exemplary JSON data from public endpoint var response = given() - .header("Authorization", token.get()) - .get(PROVIDER_PUBLIC_URL + "/api/public") +// .header("Authorization", token.get()) + .get(CONSUMER_PROXY_URL + "/flows/%s/data".formatted(transferProcessId)) .then() .log().ifError() .statusCode(200) - .extract().body().asString(); + .extract().body().as(Object[].class); assertThat(response).isNotEmpty(); } From 0f0f1f6297db2ad52cba2ff94f79cc3efd0bdc85 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 18:30:34 +0200 Subject: [PATCH 11/17] add simple token auth --- .../Download Data from Public API.bru | 6 +-- .../Get open dataflow by ID.bru | 14 +++---- .../Get open dataflows.bru | 8 +++- .../SignalingDataPlaneRuntimeExtension.java | 7 +++- .../data/DataPlanePublicApiController.java | 33 +++++++++++---- .../signaling/ConsumerDataHandler.java | 10 ++++- .../tests/transfer/TransferEndToEndTest.java | 42 ++----------------- 7 files changed, 60 insertions(+), 60 deletions(-) diff --git a/Requests/ControlPlane Management/Download Data from Public API.bru b/Requests/ControlPlane Management/Download Data from Public API.bru index 76675831..ed9bb3f9 100644 --- a/Requests/ControlPlane Management/Download Data from Public API.bru +++ b/Requests/ControlPlane Management/Download Data from Public API.bru @@ -1,17 +1,17 @@ meta { name: Download Data from Public API type: http - seq: 8 + seq: 9 } get { - url: {{CONSUMER_DP}}/api/proxy/flows/{{flowId}} + url: {{CONSUMER_DP}}/api/proxy/flows/{{flowId}}/data body: none auth: none } headers { - Authorization: {{AUTHORIZATION}} + ~Authorization: {{accessToken}} } script:post-response { diff --git a/Requests/ControlPlane Management/Get open dataflow by ID.bru b/Requests/ControlPlane Management/Get open dataflow by ID.bru index 8d452183..4ef66e70 100644 --- a/Requests/ControlPlane Management/Get open dataflow by ID.bru +++ b/Requests/ControlPlane Management/Get open dataflow by ID.bru @@ -1,11 +1,11 @@ meta { name: Get open dataflow by ID type: http - seq: 9 + seq: 8 } get { - url: {{CONSUMER_DP}}/api/proxy/flows + url: {{CONSUMER_DP}}/api/proxy/flows/{{flowId}} body: json auth: none } @@ -21,12 +21,12 @@ body:text { script:post-response { const body = res.getBody() - const map = new Map(Object.entries(body)); - // just use the first entry in the map - const [flowId, address] = map.entries().next().value; - expect(flowId).not.to.be.undefined - bru.setVar("flowId",flowId) + const token = body.endpointProperties.find(obj => obj.name === "access_token"); + expect(token).not.to.be.undefined + console.log(token.value) + bru.setVar("accessToken", token.value) + } diff --git a/Requests/ControlPlane Management/Get open dataflows.bru b/Requests/ControlPlane Management/Get open dataflows.bru index d12e9121..cc8a0490 100644 --- a/Requests/ControlPlane Management/Get open dataflows.bru +++ b/Requests/ControlPlane Management/Get open dataflows.bru @@ -26,9 +26,15 @@ script:post-response { const [flowId, address] = map.entries().next().value; expect(flowId).not.to.be.undefined - bru.setVar("flowId",flowId) + bru.setVar("flowId", flowId) + console.log(flowId) + const token = address.endpointProperties.find(obj => obj.name === "access_token"); + expect(token).not.to.be.undefined + console.log(token.value) + bru.setVar("accessToken", token.value) + } settings { diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java index e56527d8..f37f0d88 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java @@ -40,6 +40,7 @@ import org.jetbrains.annotations.NotNull; import java.net.URI; +import java.util.List; import static java.util.Collections.emptyList; @@ -115,7 +116,7 @@ public void initialize(ServiceExtensionContext context) { webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); - webService.registerResource(APICONTEXT_PUBLIC, new DataPlanePublicApiController(httpClient, monitor)); + webService.registerResource(APICONTEXT_PUBLIC, new DataPlanePublicApiController(httpClient, monitor, "foobar")); webService.registerResource(APICONTEXT_PROXY, new ConsumerProxyController(dataFetcher)); } @@ -135,7 +136,9 @@ private Result prepare(DataFlow dataFlow) { private @NotNull Result startDataFlow(DataFlow dataFlow) { switch (dataFlow.getTransferType()) { case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { - var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", publicApiConfig.dataSourceEndpoint(hostname.get()), emptyList()); + var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", + publicApiConfig.dataSourceEndpoint(hostname.get()), + List.of(new DataAddress.EndpointProperty("authorization", "access_token", "foobar"))); dataFlow.setDataAddress(dataAddress); return Result.success(dataFlow); } diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java index b40da0f3..9476f5e3 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java @@ -17,6 +17,9 @@ import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.Response; import okhttp3.Request; import org.eclipse.edc.http.spi.EdcHttpClient; import org.eclipse.edc.spi.monitor.Monitor; @@ -30,32 +33,48 @@ public class DataPlanePublicApiController { private final EdcHttpClient client; private final Monitor monitor; + private final String expectedAuthHeader; - public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor) { + public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor, String expectedAuthHeader) { this.client = client; this.monitor = monitor; + this.expectedAuthHeader = expectedAuthHeader; } @GET @Path("/source") - public String dataSource() { - return downloadJsonFromUrl("https://jsonplaceholder.typicode.com/todos"); + public Response dataSource(@Context HttpHeaders headers) { + if (!isAuthorized(headers)) { + return Response.status(Response.Status.UNAUTHORIZED).build(); + } + return Response.ok(downloadJsonFromUrl("https://jsonplaceholder.typicode.com/todos")).build(); } @GET @Path("/source/{resource}") - public String dataSource(@PathParam("resource") String resource) { + public Response dataSource(@PathParam("resource") String resource, @Context HttpHeaders headers) { + if (!isAuthorized(headers)) { + return Response.status(Response.Status.UNAUTHORIZED).build(); + } var formatted = "https://jsonplaceholder.typicode.com/%s".formatted(resource); - return downloadJsonFromUrl(formatted); + return Response.ok(downloadJsonFromUrl(formatted)).build(); } @GET @Path("/source/{resource}/{id}") - public String dataSource(@PathParam("resource") String resource, @PathParam("id") String id) { + public Response dataSource(@PathParam("resource") String resource, @PathParam("id") String id, @Context HttpHeaders headers) { + if (!isAuthorized(headers)) { + return Response.status(Response.Status.UNAUTHORIZED).build(); + } var formatted = "https://jsonplaceholder.typicode.com/%s/%s".formatted(resource, id); - return downloadJsonFromUrl(formatted); + return Response.ok(downloadJsonFromUrl(formatted)).build(); + } + + private boolean isAuthorized(HttpHeaders headers) { + var authHeader = headers.getHeaderString(HttpHeaders.AUTHORIZATION); + return expectedAuthHeader.equals(authHeader); } private @NotNull String downloadJsonFromUrl(String formatted) { diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java index 0521b217..5e610b81 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java @@ -58,7 +58,15 @@ public Result downloadData(String flowId) { return Result.failure(new IllegalArgumentException("No data flow found for id %s".formatted(flowId))); } var sourceUri = URI.create(dataAddress.endpoint()); - var request = new Request.Builder().url(sourceUri.toString()).get().build(); + var token = dataAddress.endpointProperties().stream().filter(ep -> ep.name().equals("access_token")) + .findFirst() + .map(DataAddress.EndpointProperty::value) + .orElseThrow(() -> new IllegalArgumentException("No access token found in data address properties")); + var request = new Request.Builder() + .addHeader("Authorization", token) + .url(sourceUri.toString()) + .get() + .build(); try (var response = httpClient.execute(request)) { if (response.isSuccessful()) { var body = response.body().string(); diff --git a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java index b43de67f..cc6bc626 100644 --- a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java +++ b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java @@ -66,24 +66,6 @@ private static RequestSpecification baseRequest() { @DisplayName("Tests a successful End-to-End contract negotiation and data transfer") @Test void transferData_hasPermission_shouldTransferData() { -// System.out.println("Waiting for Provider dataplane to come online"); -// // wait until provider's dataplane is available -// await().atMost(TEST_TIMEOUT_DURATION) -// .pollDelay(TEST_POLL_DELAY) -// .untilAsserted(() -> { -// var jp = baseRequest() -// .get(PROVIDER_MANAGEMENT_URL + "/api/mgmt/v4/dataplanes") -// .then() -// .statusCode(200) -// .log().ifValidationFails() -// .extract().body().jsonPath(); -// -// var state = jp.getString("state"); -// assertThat(state).isEqualTo("[REGISTERED]"); -// }); -// -// System.out.println("Provider dataplane is online, fetching catalog"); - var catalogRequestBody = Json.createObjectBuilder() .add("@context", Json.createObjectBuilder().add("edc", "https://w3id.org/edc/connector/management/v2")) .add("@type", "CatalogRequest") @@ -191,15 +173,15 @@ void transferData_hasPermission_shouldTransferData() { .extract().body().jsonPath(); endpoint.set(jp.getString("endpoint")); -// token.set(jp.getString("authorization")); + token.set(jp.get("endpointProperties.find { it.name == 'access_token' }.value")); assertThat(endpoint.get()).isNotNull().endsWith("/api/public/data/source"); -// assertThat(token.get()).isNotNull(); + assertThat(token.get()).isNotNull(); }); //download exemplary JSON data from public endpoint var response = given() -// .header("Authorization", token.get()) + .header("Authorization", token.get()) .get(CONSUMER_PROXY_URL + "/flows/%s/data".formatted(transferProcessId)) .then() .log().ifError() @@ -212,24 +194,6 @@ void transferData_hasPermission_shouldTransferData() { @DisplayName("Tests a failing End-to-End contract negotiation because of an unfulfilled policy") @Test void transferData_doesNotHavePermission_shouldTerminate() { - System.out.println("Waiting for Provider dataplane to come online"); - // wait until provider's dataplane is available - await().atMost(TEST_TIMEOUT_DURATION) - .pollDelay(TEST_POLL_DELAY) - .untilAsserted(() -> { - var jp = baseRequest() - .get(PROVIDER_MANAGEMENT_URL + "/api/mgmt/v4/dataplanes") - .then() - .statusCode(200) - .log().ifValidationFails() - .extract().body().jsonPath(); - - var state = jp.getString("state"); - assertThat(state).isEqualTo("[REGISTERED]"); - }); - - System.out.println("Provider dataplane is online, fetching catalog"); - var catalogRequestBody = Json.createObjectBuilder() .add("@context", Json.createObjectBuilder().add("edc", "https://w3id.org/edc/connector/management/v2")) .add("@type", "CatalogRequest") From 0ac7dacee9b754ad416eb47ac4b1eb1600a890bb Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Mon, 11 May 2026 19:28:23 +0200 Subject: [PATCH 12/17] remove unneeded modules --- .../data-plane-public-api-v2/build.gradle.kts | 40 ---- .../api/DataPlanePublicApiV2Extension.java | 113 --------- .../ContainerRequestContextApi.java | 68 ------ .../ContainerRequestContextApiImpl.java | 106 -------- .../controller/DataFlowRequestSupplier.java | 73 ------ .../api/controller/DataPlanePublicApiV2.java | 90 ------- .../DataPlanePublicApiV2Controller.java | 184 -------------- ...rg.eclipse.edc.spi.system.ServiceExtension | 1 - .../main/resources/public-api-version.json | 8 - .../DataFlowStartMessageSupplierTest.java | 94 -------- .../DataPlanePublicApiV2ControllerTest.java | 226 ------------------ .../data-plane-registration/build.gradle.kts | 36 --- .../api/DataplaneRegistrationExtension.java | 48 ---- .../DataplaneRegistrationApiController.java | 49 ---- ...rg.eclipse.edc.spi.system.ServiceExtension | 1 - .../SignalingDataPlaneRuntimeExtension.java | 2 - settings.gradle.kts | 2 - 17 files changed, 1141 deletions(-) delete mode 100644 extensions/data-plane-public-api-v2/build.gradle.kts delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/DataPlanePublicApiV2Extension.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApi.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApiImpl.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowRequestSupplier.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2Controller.java delete mode 100644 extensions/data-plane-public-api-v2/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension delete mode 100644 extensions/data-plane-public-api-v2/src/main/resources/public-api-version.json delete mode 100644 extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowStartMessageSupplierTest.java delete mode 100644 extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2ControllerTest.java delete mode 100644 extensions/data-plane-registration/build.gradle.kts delete mode 100644 extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/DataplaneRegistrationExtension.java delete mode 100644 extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java delete mode 100644 extensions/data-plane-registration/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension diff --git a/extensions/data-plane-public-api-v2/build.gradle.kts b/extensions/data-plane-public-api-v2/build.gradle.kts deleted file mode 100644 index 78db2ebe..00000000 --- a/extensions/data-plane-public-api-v2/build.gradle.kts +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -plugins { - `java-library` - id(libs.plugins.swagger.get().pluginId) -} - -dependencies { - api(libs.edc.spi.http) - api(libs.edc.spi.web) - api(libs.edc.spi.dataplane) - implementation(libs.edc.lib.util) - implementation(libs.edc.lib.util.dataplane) - implementation(libs.jakarta.rsApi) - - testImplementation(libs.edc.lib.http) - testImplementation(libs.edc.junit) - testImplementation(libs.restAssured) - testImplementation(testFixtures(libs.edc.core.jersey)) - -} -edcBuild { - swagger { - apiGroup.set("public-api") - } -} - - diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/DataPlanePublicApiV2Extension.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/DataPlanePublicApiV2Extension.java deleted file mode 100644 index 612e0256..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/DataPlanePublicApiV2Extension.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api; - -import org.eclipse.edc.connector.dataplane.api.controller.DataPlanePublicApiV2Controller; -import org.eclipse.edc.connector.dataplane.spi.Endpoint; -import org.eclipse.edc.connector.dataplane.spi.iam.DataPlaneAuthorizationService; -import org.eclipse.edc.connector.dataplane.spi.iam.PublicEndpointGeneratorService; -import org.eclipse.edc.connector.dataplane.spi.pipeline.PipelineService; -import org.eclipse.edc.runtime.metamodel.annotation.Configuration; -import org.eclipse.edc.runtime.metamodel.annotation.Extension; -import org.eclipse.edc.runtime.metamodel.annotation.Inject; -import org.eclipse.edc.runtime.metamodel.annotation.Setting; -import org.eclipse.edc.runtime.metamodel.annotation.Settings; -import org.eclipse.edc.spi.system.ExecutorInstrumentation; -import org.eclipse.edc.spi.system.Hostname; -import org.eclipse.edc.spi.system.ServiceExtension; -import org.eclipse.edc.spi.system.ServiceExtensionContext; -import org.eclipse.edc.web.spi.WebService; -import org.eclipse.edc.web.spi.configuration.PortMapping; -import org.eclipse.edc.web.spi.configuration.PortMappingRegistry; - -import java.util.concurrent.Executors; - -/** - * This extension provides generic endpoints which are open to public participants of the Dataspace to execute - * requests on the actual data source. - */ -@Extension(value = DataPlanePublicApiV2Extension.NAME) -public class DataPlanePublicApiV2Extension implements ServiceExtension { - public static final String NAME = "Data Plane Public API"; - - public static final String API_CONTEXT = "public"; - private static final int DEFAULT_PUBLIC_PORT = 8185; - private static final String DEFAULT_PUBLIC_PATH = "/api/public"; - private static final int DEFAULT_THREAD_POOL = 10; - @Setting(description = "Base url of the public API endpoint without the trailing slash. This should point to the public endpoint configured.", - required = false, - key = "edc.dataplane.api.public.baseurl", warnOnMissingConfig = true) - private String publicBaseUrl; - @Setting(description = "Optional base url of the response channel endpoint without the trailing slash. A common practice is to use /responseChannel", key = "edc.dataplane.api.public.response.baseurl", required = false) - private String publicApiResponseUrl; - @Configuration - private PublicApiConfiguration apiConfiguration; - @Inject - private PortMappingRegistry portMappingRegistry; - @Inject - private PipelineService pipelineService; - @Inject - private WebService webService; - @Inject - private ExecutorInstrumentation executorInstrumentation; - @Inject - private DataPlaneAuthorizationService authorizationService; - @Inject - private PublicEndpointGeneratorService generatorService; - @Inject - private Hostname hostname; - - @Override - public String name() { - return NAME; - } - - @Override - public void initialize(ServiceExtensionContext context) { - context.getMonitor().warning("The `data-plane-public-api-v2` has been deprecated, please provide an" + - "alternative implementation for Http Proxy if needed"); - - var portMapping = new PortMapping(API_CONTEXT, apiConfiguration.port(), apiConfiguration.path()); - portMappingRegistry.register(portMapping); - var executorService = executorInstrumentation.instrument( - Executors.newFixedThreadPool(DEFAULT_THREAD_POOL), - "Data plane proxy transfers" - ); - - if (publicBaseUrl == null) { - publicBaseUrl = "http://%s:%d%s".formatted(hostname.get(), portMapping.port(), portMapping.path()); - context.getMonitor().warning("The public API endpoint was not explicitly configured, the default '%s' will be used.".formatted(publicBaseUrl)); - } - var endpoint = Endpoint.url(publicBaseUrl); - generatorService.addGeneratorFunction("HttpData", dataAddress -> endpoint); - - if (publicApiResponseUrl != null) { - generatorService.addResponseGeneratorFunction("HttpData", () -> Endpoint.url(publicApiResponseUrl)); - } - - var publicApiController = new DataPlanePublicApiV2Controller(pipelineService, executorService, authorizationService); - webService.registerResource(API_CONTEXT, publicApiController); - } - - @Settings - record PublicApiConfiguration( - @Setting(key = "web.http." + API_CONTEXT + ".port", description = "Port for " + API_CONTEXT + " api context", defaultValue = DEFAULT_PUBLIC_PORT + "") - int port, - @Setting(key = "web.http." + API_CONTEXT + ".path", description = "Path for " + API_CONTEXT + " api context", defaultValue = DEFAULT_PUBLIC_PATH) - String path - ) { - - } -} diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApi.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApi.java deleted file mode 100644 index 5bc60505..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApi.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import jakarta.ws.rs.container.ContainerRequestContext; - -import java.util.Map; - -/** - * Wrapper around {@link ContainerRequestContext} enabling mocking. - */ -public interface ContainerRequestContextApi { - - /** - * Get the request headers. Note that if more than one value is associated to a specific header, - * only the first one is retained. - * - * @return Headers map. - */ - Map headers(); - - /** - * Format query of the request as string, e.g. "hello=world\&foo=bar". - * - * @return Query param string. - */ - String queryParams(); - - /** - * Format the request body into a string. - * - * @return Request body. - */ - String body(); - - /** - * Get the media type from incoming request. - * - * @return Media type. - */ - String mediaType(); - - /** - * Return request path, e.g. "hello/world/foo/bar". - * - * @return Path string. - */ - String path(); - - /** - * Get http method from the incoming request, e.g. "GET", "POST"... - * - * @return Http method. - */ - String method(); -} diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApiImpl.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApiImpl.java deleted file mode 100644 index 6bcb5ba6..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/ContainerRequestContextApiImpl.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.core.MediaType; -import org.eclipse.edc.spi.EdcException; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; - -/** - * This class provides a set of API wrapping a {@link ContainerRequestContext}. - */ -public class ContainerRequestContextApiImpl implements ContainerRequestContextApi { - - private static final String QUERY_PARAM_SEPARATOR = "&"; - - private final ContainerRequestContext context; - - public ContainerRequestContextApiImpl(ContainerRequestContext context) { - this.context = context; - } - - @Override - public Map headers() { - return context.getHeaders().entrySet() - .stream() - .filter(entry -> !entry.getValue().isEmpty()) - .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().get(0))); - } - - @Override - public String queryParams() { - return context.getUriInfo().getQueryParameters().entrySet() - .stream() - .flatMap(entry -> entry.getValue().stream().map(val -> new QueryParam(entry.getKey(), val))) - .map(QueryParam::toString) - .collect(Collectors.joining(QUERY_PARAM_SEPARATOR)); - } - - @Override - public String body() { - try (BufferedReader br = new BufferedReader(new InputStreamReader(context.getEntityStream()))) { - return br.lines().collect(Collectors.joining("\n")); - } catch (IOException e) { - throw new EdcException("Failed to read request body: " + e.getMessage()); - } - } - - @Override - public String mediaType() { - return Optional.ofNullable(context.getMediaType()) - .map(MediaType::toString) - .orElse(null); - } - - @Override - public String path() { - var pathInfo = context.getUriInfo().getPath(); - return pathInfo.startsWith("/") ? pathInfo.substring(1) : pathInfo; - } - - @Override - public String method() { - return context.getMethod(); - } - - private static final class QueryParam { - - private final String key; - private final String values; - private final boolean valid; - - private QueryParam(String key, String values) { - this.key = key; - this.values = values; - this.valid = key != null && values != null && !values.isEmpty(); - } - - public boolean isValid() { - return valid; - } - - @Override - public String toString() { - return valid ? key + "=" + values : ""; - } - } -} diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowRequestSupplier.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowRequestSupplier.java deleted file mode 100644 index c3c1aa7b..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowRequestSupplier.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import org.eclipse.edc.connector.dataplane.util.sink.AsyncStreamingDataSink; -import org.eclipse.edc.spi.types.domain.DataAddress; -import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; -import org.eclipse.edc.spi.types.domain.transfer.FlowType; - -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.function.BiFunction; - -import static org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema.BODY; -import static org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema.MEDIA_TYPE; -import static org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema.METHOD; -import static org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema.PATH; -import static org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema.QUERY_PARAMS; - -public class DataFlowRequestSupplier implements BiFunction { - - /** - * Put all properties of the incoming request (method, request body, query params...) into a map. - */ - private static Map createProps(ContainerRequestContextApi contextApi) { - var props = new HashMap(); - props.put(METHOD, contextApi.method()); - props.put(QUERY_PARAMS, contextApi.queryParams()); - props.put(PATH, contextApi.path()); - Optional.ofNullable(contextApi.mediaType()) - .ifPresent(mediaType -> { - props.put(MEDIA_TYPE, mediaType); - props.put(BODY, contextApi.body()); - }); - return props; - } - - /** - * Create a {@link DataFlowStartMessage} based on incoming request and claims decoded from the access token. - * - * @param contextApi Api for accessing request properties. - * @param dataAddress Source data address. - * @return DataFlowRequest - */ - @Override - public DataFlowStartMessage apply(ContainerRequestContextApi contextApi, DataAddress dataAddress) { - var props = createProps(contextApi); - return DataFlowStartMessage.Builder.newInstance() - .processId(UUID.randomUUID().toString()) - .sourceDataAddress(dataAddress) - .flowType(FlowType.PULL) // if a request hits the public DP API, we can assume a PULL transfer - .destinationDataAddress(DataAddress.Builder.newInstance() - .type(AsyncStreamingDataSink.TYPE) - .build()) - .id(UUID.randomUUID().toString()) - .properties(props) - .build(); - } -} diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2.java deleted file mode 100644 index aea4942e..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import io.swagger.v3.oas.annotations.OpenAPIDefinition; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.ws.rs.container.AsyncResponse; -import jakarta.ws.rs.container.ContainerRequestContext; - -@OpenAPIDefinition -@Tag(name = "Data Plane public API", - description = "The public API of the Data Plane is a data proxy enabling a data consumer to actively query" + - "data from the provider data source (e.g. backend Rest API, internal database...) through its Data Plane" + - "instance. Thus the Data Plane is the only entry/output door for the data, which avoids the provider to expose" + - "directly its data externally." + - "The Data Plane public API being a proxy, it supports all verbs (i.e. GET, POST, PUT, PATCH, DELETE), which" + - "can then conveyed until the data source is required. This is especially useful when the actual data source" + - "is a Rest API itself." + - "In the same manner, any set of arbitrary query parameters, path parameters and request body are supported " + - "(in the limits fixed by the HTTP server) and can also conveyed to the actual data source.") -public interface DataPlanePublicApiV2 { - - @Operation(description = "Send `GET` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void get(ContainerRequestContext context, AsyncResponse response); - - @Operation(description = "Send `HEAD` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void head(ContainerRequestContext context, AsyncResponse response); - - @Operation(description = "Send `POST` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void post(ContainerRequestContext context, AsyncResponse response); - - @Operation(description = "Send `PUT` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void put(ContainerRequestContext context, AsyncResponse response); - - @Operation(description = "Send `DELETE` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void delete(ContainerRequestContext context, AsyncResponse response); - - @Operation(description = "Send `PATCH` data query to the Data Plane.", - responses = { - @ApiResponse(responseCode = "400", description = "Missing access token"), - @ApiResponse(responseCode = "403", description = "Access token is expired or invalid"), - @ApiResponse(responseCode = "500", description = "Failed to transfer data") - } - ) - void patch(ContainerRequestContext context, AsyncResponse response); -} diff --git a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2Controller.java b/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2Controller.java deleted file mode 100644 index 1502fe8a..00000000 --- a/extensions/data-plane-public-api-v2/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2Controller.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.HEAD; -import jakarta.ws.rs.PATCH; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.container.AsyncResponse; -import jakarta.ws.rs.container.ContainerRequestContext; -import jakarta.ws.rs.container.Suspended; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; -import org.eclipse.edc.connector.dataplane.spi.iam.DataPlaneAuthorizationService; -import org.eclipse.edc.connector.dataplane.spi.pipeline.PipelineService; -import org.eclipse.edc.connector.dataplane.spi.response.TransferErrorResponse; -import org.eclipse.edc.connector.dataplane.util.sink.AsyncStreamingDataSink; -import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; - -import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON; -import static jakarta.ws.rs.core.MediaType.WILDCARD; -import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; -import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; -import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED; -import static jakarta.ws.rs.core.Response.status; - -@Path("{any:.*}") -@Produces(WILDCARD) -public class DataPlanePublicApiV2Controller implements DataPlanePublicApiV2 { - - private final PipelineService pipelineService; - private final DataFlowRequestSupplier requestSupplier; - private final ExecutorService executorService; - private final DataPlaneAuthorizationService authorizationService; - - public DataPlanePublicApiV2Controller(PipelineService pipelineService, - ExecutorService executorService, - DataPlaneAuthorizationService authorizationService) { - this.pipelineService = pipelineService; - this.authorizationService = authorizationService; - this.requestSupplier = new DataFlowRequestSupplier(); - this.executorService = executorService; - } - - private static Response error(Response.Status status, List errors) { - return status(status).type(APPLICATION_JSON).entity(new TransferErrorResponse(errors)).build(); - } - - @GET - @Override - public void get(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - @HEAD - @Override - public void head(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - /** - * Sends a {@link POST} request to the data source and returns data. - * - * @param requestContext Request context. - * @param response Data fetched from the data source. - */ - @POST - @Override - public void post(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - /** - * Sends a {@link PUT} request to the data source and returns data. - * - * @param requestContext Request context. - * @param response Data fetched from the data source. - */ - @PUT - @Override - public void put(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - /** - * Sends a {@link DELETE} request to the data source and returns data. - * - * @param requestContext Request context. - * @param response Data fetched from the data source. - */ - @DELETE - @Override - public void delete(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - /** - * Sends a {@link PATCH} request to the data source and returns data. - * - * @param requestContext Request context. - * @param response Data fetched from the data source. - */ - @PATCH - @Override - public void patch(@Context ContainerRequestContext requestContext, @Suspended AsyncResponse response) { - handle(requestContext, response); - } - - private void handle(ContainerRequestContext requestContext, AsyncResponse response) { - var contextApi = new ContainerRequestContextApiImpl(requestContext); - - var token = contextApi.headers().get(HttpHeaders.AUTHORIZATION); - if (token == null) { - response.resume(error(UNAUTHORIZED, List.of("Missing Authorization Header"))); - return; - } - - var sourceDataAddress = authorizationService.authorize(token, buildRequestData(requestContext)); - if (sourceDataAddress.failed()) { - response.resume(error(FORBIDDEN, sourceDataAddress.getFailureMessages())); - return; - } - - var startMessage = requestSupplier.apply(contextApi, sourceDataAddress.getContent()); - - processRequest(startMessage, response); - } - - private Map buildRequestData(ContainerRequestContext requestContext) { - var requestData = new HashMap(); - requestData.put("headers", requestContext.getHeaders()); - requestData.put("path", requestContext.getUriInfo()); - requestData.put("method", requestContext.getMethod()); - requestData.put("content-type", requestContext.getMediaType()); - return requestData; - } - - private void processRequest(DataFlowStartMessage dataFlowStartMessage, AsyncResponse response) { - - AsyncStreamingDataSink.AsyncResponseContext asyncResponseContext = callback -> { - StreamingOutput output = t -> callback.outputStreamConsumer().accept(t); - var resp = Response.ok(output).type(callback.mediaType()).build(); - return response.resume(resp); - }; - - var sink = new AsyncStreamingDataSink(asyncResponseContext, executorService); - - pipelineService.transfer(dataFlowStartMessage, sink) - .whenComplete((result, throwable) -> { - if (throwable == null) { - if (result.failed()) { - response.resume(error(INTERNAL_SERVER_ERROR, result.getFailureMessages())); - } - } else { - var error = "Unhandled exception occurred during data transfer: " + throwable.getMessage(); - response.resume(error(INTERNAL_SERVER_ERROR, List.of(error))); - } - }); - } - -} diff --git a/extensions/data-plane-public-api-v2/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/extensions/data-plane-public-api-v2/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension deleted file mode 100644 index 985fe60e..00000000 --- a/extensions/data-plane-public-api-v2/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension +++ /dev/null @@ -1 +0,0 @@ -org.eclipse.edc.connector.dataplane.api.DataPlanePublicApiV2Extension \ No newline at end of file diff --git a/extensions/data-plane-public-api-v2/src/main/resources/public-api-version.json b/extensions/data-plane-public-api-v2/src/main/resources/public-api-version.json deleted file mode 100644 index 72890702..00000000 --- a/extensions/data-plane-public-api-v2/src/main/resources/public-api-version.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "version": "2.0.1", - "urlPath": "/v2", - "lastUpdated": "2024-07-10T08:56:00Z", - "maturity": "stable" - } -] diff --git a/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowStartMessageSupplierTest.java b/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowStartMessageSupplierTest.java deleted file mode 100644 index 8961e2e4..00000000 --- a/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataFlowStartMessageSupplierTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright (c) 2022 Amadeus - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Amadeus - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import jakarta.ws.rs.HttpMethod; -import jakarta.ws.rs.core.MediaType; -import org.eclipse.edc.connector.dataplane.spi.schema.DataFlowRequestSchema; -import org.eclipse.edc.connector.dataplane.util.sink.AsyncStreamingDataSink; -import org.eclipse.edc.spi.types.domain.DataAddress; -import org.junit.jupiter.api.Test; - -import java.util.Map; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -class DataFlowStartMessageSupplierTest { - - - private final DataFlowRequestSupplier supplier = new DataFlowRequestSupplier(); - - private static DataAddress createDataAddress() { - return DataAddress.Builder.newInstance().type("test-type").build(); - } - - @Test - void verifyMapping_noInputBody() { - var contextApi = mock(ContainerRequestContextApi.class); - var address = createDataAddress(); - - var method = HttpMethod.GET; - var queryParams = "test-query-param"; - var path = "test-path"; - - when(contextApi.method()).thenReturn(method); - when(contextApi.queryParams()).thenReturn(queryParams); - when(contextApi.path()).thenReturn(path); - - var request = supplier.apply(contextApi, address); - - assertThat(request.getId()).isNotBlank(); - assertThat(request.getDestinationDataAddress().getType()).isEqualTo(AsyncStreamingDataSink.TYPE); - assertThat(request.getSourceDataAddress().getType()).isEqualTo(address.getType()); - assertThat(request.getProperties()).containsExactlyInAnyOrderEntriesOf(Map.of( - DataFlowRequestSchema.PATH, path, - DataFlowRequestSchema.METHOD, method, - DataFlowRequestSchema.QUERY_PARAMS, queryParams - - )); - } - - @Test - void verifyMapping_withInputBody() { - var contextApi = mock(ContainerRequestContextApi.class); - var address = createDataAddress(); - - var method = HttpMethod.GET; - var queryParams = "test-query-param"; - var path = "test-path"; - var body = "Test request body"; - - when(contextApi.method()).thenReturn(method); - when(contextApi.queryParams()).thenReturn(queryParams); - when(contextApi.path()).thenReturn(path); - when(contextApi.mediaType()).thenReturn(MediaType.TEXT_PLAIN); - when(contextApi.body()).thenReturn(body); - - var request = supplier.apply(contextApi, address); - - assertThat(request.getId()).isNotBlank(); - assertThat(request.getDestinationDataAddress().getType()).isEqualTo(AsyncStreamingDataSink.TYPE); - assertThat(request.getSourceDataAddress().getType()).isEqualTo(address.getType()); - assertThat(request.getProperties()).containsExactlyInAnyOrderEntriesOf(Map.of( - DataFlowRequestSchema.PATH, path, - DataFlowRequestSchema.METHOD, method, - DataFlowRequestSchema.QUERY_PARAMS, queryParams, - DataFlowRequestSchema.BODY, body, - DataFlowRequestSchema.MEDIA_TYPE, MediaType.TEXT_PLAIN - )); - } -} diff --git a/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2ControllerTest.java b/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2ControllerTest.java deleted file mode 100644 index 05aba40c..00000000 --- a/extensions/data-plane-public-api-v2/src/test/java/org/eclipse/edc/connector/dataplane/api/controller/DataPlanePublicApiV2ControllerTest.java +++ /dev/null @@ -1,226 +0,0 @@ -/* - * Copyright (c) 2022 Amadeus - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Amadeus - initial API and implementation - * Bayerische Motoren Werke Aktiengesellschaft (BMW AG) - improvements - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import io.restassured.specification.RequestSpecification; -import jakarta.ws.rs.core.Response; -import org.eclipse.edc.connector.dataplane.spi.iam.DataPlaneAuthorizationService; -import org.eclipse.edc.connector.dataplane.spi.pipeline.DataSource; -import org.eclipse.edc.connector.dataplane.spi.pipeline.PipelineService; -import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamFailure; -import org.eclipse.edc.connector.dataplane.spi.pipeline.StreamResult; -import org.eclipse.edc.connector.dataplane.spi.resolver.DataAddressResolver; -import org.eclipse.edc.connector.dataplane.util.sink.AsyncStreamingDataSink; -import org.eclipse.edc.junit.annotations.ApiTest; -import org.eclipse.edc.spi.result.Result; -import org.eclipse.edc.spi.types.domain.DataAddress; -import org.eclipse.edc.spi.types.domain.transfer.DataFlowStartMessage; -import org.eclipse.edc.web.jersey.testfixtures.RestControllerTestBase; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; -import java.util.stream.Stream; - -import static io.restassured.RestAssured.given; -import static io.restassured.http.ContentType.JSON; -import static jakarta.ws.rs.core.HttpHeaders.AUTHORIZATION; -import static java.util.concurrent.CompletableFuture.completedFuture; -import static java.util.concurrent.CompletableFuture.failedFuture; -import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.isA; -import static org.hamcrest.CoreMatchers.not; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -@ApiTest -class DataPlanePublicApiV2ControllerTest extends RestControllerTestBase { - - private final PipelineService pipelineService = mock(); - private final DataAddressResolver dataAddressResolver = mock(); - private final DataPlaneAuthorizationService authorizationService = mock(); - - @BeforeEach - void setup() { - when(authorizationService.authorize(anyString(), anyMap())) - .thenReturn(Result.success(testDestAddress())); - } - - @Test - void should_returnBadRequest_if_missingAuthorizationHeader() { - baseRequest() - .post("/any") - .then() - .statusCode(Response.Status.UNAUTHORIZED.getStatusCode()) - .body("errors[0]", is("Missing Authorization Header")); - } - - @Test - void shouldNotReturn302_whenUrlWithoutTrailingSlash() { - baseRequest() - .post("") - .then() - .statusCode(not(302)); - } - - @Test - void should_returnForbidden_if_tokenValidationFails() { - var token = UUID.randomUUID().toString(); - when(authorizationService.authorize(anyString(), anyMap())).thenReturn(Result.failure("token is not valid")); - - baseRequest() - .header(AUTHORIZATION, token) - .post("/any") - .then() - .statusCode(Response.Status.FORBIDDEN.getStatusCode()) - .contentType(JSON) - .body("errors.size()", is(1)); - - verify(authorizationService).authorize(eq(token), anyMap()); - } - - @Test - void should_returnInternalServerError_if_transferFails() { - var token = UUID.randomUUID().toString(); - var errorMsg = UUID.randomUUID().toString(); - when(dataAddressResolver.resolve(any())).thenReturn(Result.success(testDestAddress())); - when(pipelineService.transfer(any(), any())) - .thenReturn(completedFuture(StreamResult.error(errorMsg))); - - baseRequest() - .header(AUTHORIZATION, token) - .when() - .post("/any") - .then() - .statusCode(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()) - .contentType(JSON) - .body("errors[0]", is(errorMsg)); - } - - @Test - void should_returnListOfErrorsAsResponse_if_anythingFails() { - var token = UUID.randomUUID().toString(); - var firstErrorMsg = UUID.randomUUID().toString(); - var secondErrorMsg = UUID.randomUUID().toString(); - - when(dataAddressResolver.resolve(any())).thenReturn(Result.success(testDestAddress())); - when(pipelineService.transfer(any(), any())) - .thenReturn(completedFuture(StreamResult.failure(new StreamFailure(List.of(firstErrorMsg, secondErrorMsg), StreamFailure.Reason.GENERAL_ERROR)))); - - baseRequest() - .header(AUTHORIZATION, token) - .when() - .post("/any") - .then() - .statusCode(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()) - .contentType(JSON) - .body("errors", isA(List.class)) - .body("errors[0]", is(firstErrorMsg)) - .body("errors[1]", is(secondErrorMsg)); - } - - @Test - void should_returnInternalServerError_if_transferThrows() { - var token = UUID.randomUUID().toString(); - var errorMsg = UUID.randomUUID().toString(); - when(dataAddressResolver.resolve(any())).thenReturn(Result.success(testDestAddress())); - when(pipelineService.transfer(any(DataFlowStartMessage.class), any())) - .thenReturn(failedFuture(new RuntimeException(errorMsg))); - - baseRequest() - .header(AUTHORIZATION, token) - .when() - .post("/any") - .then() - .statusCode(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()) - .contentType(JSON) - .body("errors[0]", is("Unhandled exception occurred during data transfer: " + errorMsg)); - } - - @Test - void shouldStreamSourceToResponse() { - when(dataAddressResolver.resolve(any())).thenReturn(Result.success(testDestAddress())); - when(pipelineService.transfer(any(), any())).thenAnswer(i -> { - ((AsyncStreamingDataSink) i.getArgument(1)).transfer(new TestDataSource("application/something", "data")); - return CompletableFuture.completedFuture(StreamResult.success()); - }); - - var responseBody = baseRequest() - .header(AUTHORIZATION, UUID.randomUUID().toString()) - .when() - .post("/any?foo=bar") - .then() - .log().ifError() - .statusCode(Response.Status.OK.getStatusCode()) - .contentType("application/something") - .extract().body().asString(); - - assertThat(responseBody).isEqualTo("data"); - var requestCaptor = ArgumentCaptor.forClass(DataFlowStartMessage.class); - verify(pipelineService).transfer(requestCaptor.capture(), any()); - var request = requestCaptor.getValue(); - assertThat(request.getDestinationDataAddress().getType()).isEqualTo(AsyncStreamingDataSink.TYPE); - assertThat(request.getSourceDataAddress().getType()).isEqualTo("test"); - assertThat(request.getProperties()).containsEntry("method", "POST").containsEntry("pathSegments", "any").containsEntry("queryParams", "foo=bar"); - } - - @Override - protected Object controller() { - return new DataPlanePublicApiV2Controller(pipelineService, Executors.newSingleThreadExecutor(), authorizationService); - } - - private RequestSpecification baseRequest() { - return given() - .baseUri("http://localhost:" + port) - .when(); - } - - private DataAddress testDestAddress() { - return DataAddress.Builder.newInstance().type("test").build(); - } - - private record TestDataSource(String mediaType, String data) implements DataSource, DataSource.Part { - - @Override - public StreamResult> openPartStream() { - return StreamResult.success(Stream.of(this)); - } - - @Override - public String name() { - return "test"; - } - - @Override - public InputStream openStream() { - return new ByteArrayInputStream(data.getBytes()); - } - - } - -} diff --git a/extensions/data-plane-registration/build.gradle.kts b/extensions/data-plane-registration/build.gradle.kts deleted file mode 100644 index f0336909..00000000 --- a/extensions/data-plane-registration/build.gradle.kts +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -plugins { - `java-library` - id(libs.plugins.swagger.get().pluginId) -} - -dependencies { - api(libs.edc.spi.http) - api(libs.edc.spi.web) - api(libs.edc.spi.dataplane) - implementation(libs.edc.spi.dataplane.selector) - implementation(libs.edc.lib.util) - implementation(libs.edc.lib.util.dataplane) - implementation(libs.jakarta.rsApi) - -} -edcBuild { - swagger { - apiGroup.set("public-api") - } -} - - diff --git a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/DataplaneRegistrationExtension.java b/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/DataplaneRegistrationExtension.java deleted file mode 100644 index c3c492a8..00000000 --- a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/DataplaneRegistrationExtension.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api; - -import org.eclipse.edc.connector.dataplane.api.controller.DataplaneRegistrationApiController; -import org.eclipse.edc.connector.dataplane.selector.spi.DataPlaneSelectorService; -import org.eclipse.edc.runtime.metamodel.annotation.Extension; -import org.eclipse.edc.runtime.metamodel.annotation.Inject; -import org.eclipse.edc.spi.system.ServiceExtension; -import org.eclipse.edc.spi.system.ServiceExtensionContext; -import org.eclipse.edc.web.spi.WebService; -import org.eclipse.edc.web.spi.configuration.ApiContext; - -/** - * This extension provides generic endpoints which are open to public participants of the Dataspace to execute - * requests on the actual data source. - */ -@Extension(value = DataplaneRegistrationExtension.NAME) -public class DataplaneRegistrationExtension implements ServiceExtension { - public static final String NAME = "Data Plane Registration API"; - - @Inject - private WebService webService; - @Inject - private DataPlaneSelectorService selectorService; - - @Override - public String name() { - return NAME; - } - - @Override - public void initialize(ServiceExtensionContext context) { - webService.registerResource(ApiContext.MANAGEMENT, new DataplaneRegistrationApiController(selectorService)); - } -} diff --git a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java b/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java deleted file mode 100644 index ec0f2043..00000000 --- a/extensions/data-plane-registration/src/main/java/org/eclipse/edc/connector/dataplane/api/controller/DataplaneRegistrationApiController.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2025 Metaform Systems, Inc. - * - * This program and the accompanying materials are made available under the - * terms of the Apache License, Version 2.0 which is available at - * https://www.apache.org/licenses/LICENSE-2.0 - * - * SPDX-License-Identifier: Apache-2.0 - * - * Contributors: - * Metaform Systems, Inc. - initial API and implementation - * - */ - -package org.eclipse.edc.connector.dataplane.api.controller; - -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import org.eclipse.edc.connector.dataplane.selector.spi.DataPlaneSelectorService; -import org.eclipse.edc.connector.dataplane.selector.spi.instance.DataPlaneInstance; - -import java.util.Base64; - -import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON; -import static org.eclipse.edc.web.spi.exception.ServiceResultHandler.exceptionMapper; - -@Consumes(APPLICATION_JSON) -@Produces(APPLICATION_JSON) -@Path("/v4beta/dataplanes") -public class DataplaneRegistrationApiController { - - private final DataPlaneSelectorService dataPlaneSelectorService; - - public DataplaneRegistrationApiController(DataPlaneSelectorService dataPlaneSelectorService) { - this.dataPlaneSelectorService = dataPlaneSelectorService; - } - - @POST - @Path("/{participantContextId}") - public void registerDataplane(DataPlaneInstance instance, @PathParam("participantContextId") String participantContextId) { - var decoded = new String(Base64.getUrlDecoder().decode(participantContextId)); - dataPlaneSelectorService.register(instance.toBuilder().participantContextId(decoded).build()) // ugly, but will initialize all internal objects e.g. clock - .orElseThrow(exceptionMapper(DataPlaneInstance.class)); - - } -} diff --git a/extensions/data-plane-registration/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/extensions/data-plane-registration/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension deleted file mode 100644 index b9313e30..00000000 --- a/extensions/data-plane-registration/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension +++ /dev/null @@ -1 +0,0 @@ -org.eclipse.edc.connector.dataplane.api.DataplaneRegistrationExtension diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java index f37f0d88..49e49004 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java @@ -42,8 +42,6 @@ import java.net.URI; import java.util.List; -import static java.util.Collections.emptyList; - @Extension(value = SignalingDataPlaneRuntimeExtension.NAME) public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { diff --git a/settings.gradle.kts b/settings.gradle.kts index aedbb608..cf009142 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -27,8 +27,6 @@ pluginManagement { rootProject.name = "mvd" include(":tests:end2end") -include(":extensions:data-plane-public-api-v2") -include(":extensions:data-plane-registration") include(":extensions:signaling-auth-none") // launcher modules From 79ef30e3aebf90de74e23f3c36c0ce6539c9a31d Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Tue, 12 May 2026 06:52:56 +0200 Subject: [PATCH 13/17] split deployment --- .github/workflows/run-e2e-tests.yml | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 1390cc42..71dfa155 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -89,16 +89,37 @@ jobs: sleep 5 # to be safe - - name: "Deploy MVD" + - name: "Deploy MVD common resources" run: |- - kubectl apply -k k8s/ + kubectl apply -k k8s/common + kubectl wait -A \ + --for=condition=ready pod \ + --selector=type=edc-infra \ + --timeout=120s || { kubectl get pods -A; exit 1; } + + - name: "Deploy MVD issuer resources" + run: |- + kubectl apply -k k8s/issuer + kubectl wait -A \ + --selector=type=edc-job \ + --for=condition=complete job --all \ + --timeout=240s || { kubectl get pods -A; exit 1; } + + - name: "Deploy MVD consumer resources" + run: |- + kubectl apply -k k8s/consumer + kubectl wait -A \ + --selector=type=edc-job \ + --for=condition=complete job --all \ + --timeout=240s || { kubectl get pods -A; exit 1; } - - name: "Wait for MVD to be ready" + - name: "Deploy MVD provider resources" run: |- + kubectl apply -k k8s/provider kubectl wait -A \ --selector=type=edc-job \ --for=condition=complete job --all \ - --timeout=300s || kubectl get pods -A + --timeout=240s || { kubectl get pods -A; exit 1; } - name: "Run E2E Test" From 95cd004ffe3132f942ca11b5c1bd9b9d4abb9512 Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Tue, 12 May 2026 09:19:46 +0200 Subject: [PATCH 14/17] add auth --- .../Initiate Transfer.bru | 1 + gradle/libs.versions.toml | 2 + .../application/dataplane-config.yaml | 4 +- k8s/provider/application/dataplane.yaml | 4 +- launchers/dataplane/build.gradle.kts | 7 ++- .../SignalingDataPlaneRuntimeExtension.java | 58 ++++++++++++++++--- .../data/DataPlanePublicApiController.java | 47 ++++++++++----- .../signaling/ConsumerDataHandler.java | 2 +- 8 files changed, 97 insertions(+), 28 deletions(-) diff --git a/Requests/ControlPlane Management/Initiate Transfer.bru b/Requests/ControlPlane Management/Initiate Transfer.bru index b03f859e..9b2969f1 100644 --- a/Requests/ControlPlane Management/Initiate Transfer.bru +++ b/Requests/ControlPlane Management/Initiate Transfer.bru @@ -20,6 +20,7 @@ body:json { "counterPartyAddress": "{{PROVIDER_DSP}}", "connectorId": "{{PROVIDER_ID}}", "contractId": "{{CONTRACT_AGREEMENT_ID}}", + "dataDestination": { "@type": "DataAddress", "type": "HttpProxy" diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index b7a9d3f4..4fe8e032 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -52,6 +52,7 @@ edc-spi-identity-trust = { module = "org.eclipse.edc:decentralized-claims-spi", edc-spi-boot = { module = "org.eclipse.edc:boot-spi", version.ref = "edc" } edc-spi-transform = { module = "org.eclipse.edc:transform-spi", version.ref = "edc" } edc-spi-catalog = { module = "org.eclipse.edc:catalog-spi", version.ref = "edc" } +edc-spi-token = { module = "org.eclipse.edc:token-spi", version.ref = "edc" } edc-spi-identity-did = { module = "org.eclipse.edc:identity-did-spi", version.ref = "edc" } edc-spi-http = { module = "org.eclipse.edc:http-spi", version.ref = "edc" } edc-spi-web = { module = "org.eclipse.edc:web-spi", version.ref = "edc" } @@ -67,6 +68,7 @@ edc-lib-keys = { module = "org.eclipse.edc:keys-lib", version.ref = "edc" } edc-lib-jsonld = { module = "org.eclipse.edc:json-ld-lib", version.ref = "edc" } edc-lib-http = { module = "org.eclipse.edc:http-lib", version.ref = "edc" } edc-lib-util = { module = "org.eclipse.edc:util-lib", version.ref = "edc" } +edc-lib-token = { module = "org.eclipse.edc:token-lib", version.ref = "edc" } edc-lib-sql = { module = "org.eclipse.edc:sql-lib", version.ref = "edc" } edc-lib-util-dataplane = { module = "org.eclipse.edc:data-plane-util", version.ref = "edc" } edc-lib-oauth2-authn = { module = "org.eclipse.edc:auth-authentication-oauth2-lib", version.ref = "edc" } diff --git a/k8s/provider/application/dataplane-config.yaml b/k8s/provider/application/dataplane-config.yaml index dea70bc9..fb44b544 100644 --- a/k8s/provider/application/dataplane-config.yaml +++ b/k8s/provider/application/dataplane-config.yaml @@ -21,8 +21,8 @@ data: edc.hostname: "dataplane.provider.svc.cluster.local" dataplane.id: "provider-dataplane" - edc.transfer.proxy.token.verifier.publickey.alias: "dataplane-private" - edc.transfer.proxy.token.signer.privatekey.alias: "dataplane-public" + edc.transfer.proxy.token.verifier.publickey.alias: "dataplane-public" + edc.transfer.proxy.token.signer.privatekey.alias: "dataplane-private" edc.dpf.selector.url: "http://controlplane.provider.svc.cluster.local:8083/api/control/v1/dataplanes" diff --git a/k8s/provider/application/dataplane.yaml b/k8s/provider/application/dataplane.yaml index 4c695086..7157f701 100644 --- a/k8s/provider/application/dataplane.yaml +++ b/k8s/provider/application/dataplane.yaml @@ -40,10 +40,10 @@ spec: - configMapRef: name: dataplane-config ports: - - containerPort: 8182 - name: signaling-port - containerPort: 1044 name: debug-port + - containerPort: 8182 + name: signaling-port livenessProbe: httpGet: path: /api/check/liveness diff --git a/launchers/dataplane/build.gradle.kts b/launchers/dataplane/build.gradle.kts index 64769d48..175137e2 100644 --- a/launchers/dataplane/build.gradle.kts +++ b/launchers/dataplane/build.gradle.kts @@ -24,6 +24,10 @@ dependencies { implementation(libs.edc.core.runtime) implementation(libs.edc.ext.http) implementation(libs.dataplane.sdk) + implementation(libs.edc.spi.token) + implementation(libs.edc.lib.token) + implementation(libs.edc.lib.crypto) + implementation(libs.edc.lib.keys) // needed for the key seed extension runtimeOnly(libs.tink) @@ -32,9 +36,8 @@ dependencies { // for kubernetes probes runtimeOnly(libs.edc.api.observability) -// runtimeOnly(project(":extensions:data-plane-public-api-v2")) runtimeOnly(libs.edc.core.participantcontext.config) -// runtimeOnly(libs.edc.vault.hashicorp) + runtimeOnly(libs.edc.vault.hashicorp) runtimeOnly(libs.edc.bom.dataplane.sql) } diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java index 49e49004..e754c8ca 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/SignalingDataPlaneRuntimeExtension.java @@ -14,12 +14,17 @@ package org.eclipse.edc.mvd.dataplane; +import com.fasterxml.jackson.databind.ObjectMapper; import org.eclipse.dataplane.Dataplane; import org.eclipse.dataplane.domain.DataAddress; import org.eclipse.dataplane.domain.Result; import org.eclipse.dataplane.domain.dataflow.DataFlow; import org.eclipse.dataplane.domain.registration.Oauth2ClientCredentialsAuthorization; import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.keys.KeyParserRegistryImpl; +import org.eclipse.edc.keys.LocalPublicKeyServiceImpl; +import org.eclipse.edc.keys.VaultPrivateKeyResolver; +import org.eclipse.edc.keys.keyparsers.JwkParser; import org.eclipse.edc.mvd.dataplane.data.ConsumerProxyController; import org.eclipse.edc.mvd.dataplane.data.DataPlanePublicApiController; import org.eclipse.edc.mvd.dataplane.signaling.ConsumerDataHandler; @@ -29,10 +34,15 @@ import org.eclipse.edc.runtime.metamodel.annotation.Inject; import org.eclipse.edc.runtime.metamodel.annotation.Setting; import org.eclipse.edc.runtime.metamodel.annotation.Settings; +import org.eclipse.edc.security.token.jwt.DefaultJwsSignerProvider; import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.security.Vault; import org.eclipse.edc.spi.system.Hostname; import org.eclipse.edc.spi.system.ServiceExtension; import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.token.JwtGenerationService; +import org.eclipse.edc.token.TokenValidationServiceImpl; +import org.eclipse.edc.token.spi.TokenGenerationService; import org.eclipse.edc.web.spi.WebService; import org.eclipse.edc.web.spi.configuration.ApiContext; import org.eclipse.edc.web.spi.configuration.PortMapping; @@ -51,6 +61,10 @@ public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { @Setting(key = "dataplane.id") private String dataplaneId; + @Setting(key = "edc.transfer.proxy.token.signer.privatekey.alias") + private String privateKeyId; + @Setting(key = "edc.transfer.proxy.token.verifier.publickey.alias") + private String publicKeyId; @Configuration private SignalingApiConfig signalingApiConfig; @@ -76,6 +90,11 @@ public class SignalingDataPlaneRuntimeExtension implements ServiceExtension { @Inject private EdcHttpClient httpClient; + @Inject + private Vault vault; + + private TokenGenerationService tokenGenerationService; + @Override public String name() { return NAME; @@ -103,6 +122,14 @@ public void initialize(ServiceExtensionContext context) { dataplane = builder.build(); + //initialize token services + var registry = new KeyParserRegistryImpl(); + registry.register(new JwkParser(new ObjectMapper(), context.getMonitor())); + var signerProvider = new DefaultJwsSignerProvider(new VaultPrivateKeyResolver(registry, vault, context.getMonitor(), context.getConfig())); + tokenGenerationService = new JwtGenerationService(signerProvider); + var tokenValidationService = new TokenValidationServiceImpl(); + var publicKeyResolver = new LocalPublicKeyServiceImpl(vault, registry); + var portMapping = new PortMapping(ApiContext.SIGNALING, signalingApiConfig.port(), signalingApiConfig.path()); portMappingRegistry.register(portMapping); @@ -114,7 +141,7 @@ public void initialize(ServiceExtensionContext context) { webService.registerResource(ApiContext.SIGNALING, dataplane.controller()); webService.registerResource(ApiContext.SIGNALING, dataplane.registrationController()); - webService.registerResource(APICONTEXT_PUBLIC, new DataPlanePublicApiController(httpClient, monitor, "foobar")); + webService.registerResource(APICONTEXT_PUBLIC, new DataPlanePublicApiController(httpClient, monitor, tokenValidationService, publicKeyResolver)); webService.registerResource(APICONTEXT_PROXY, new ConsumerProxyController(dataFetcher)); } @@ -134,11 +161,26 @@ private Result prepare(DataFlow dataFlow) { private @NotNull Result startDataFlow(DataFlow dataFlow) { switch (dataFlow.getTransferType()) { case "NonFinite-PULL", "Finite-PULL", "HttpData-PULL" -> { - var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", - publicApiConfig.dataSourceEndpoint(hostname.get()), - List.of(new DataAddress.EndpointProperty("authorization", "access_token", "foobar"))); - dataFlow.setDataAddress(dataAddress); - return Result.success(dataFlow); + + var token = tokenGenerationService.generate(privateKeyId, + tokenParameters -> tokenParameters.header("kid", publicKeyId), + tokenParameters -> tokenParameters.claims("sub", dataFlow.getId()), + tokenParameters -> tokenParameters.claims("scope", "dataflow"), + tokenParameters -> tokenParameters.claims("aud", dataplaneId), + tokenParameters -> tokenParameters.claims("iss", dataplaneId) + ); + + if (token.succeeded()) { + var dataAddress = new DataAddress(dataFlow.getTransferType(), "http", + publicApiConfig.dataSourceEndpoint(hostname.get(), dataFlow.getId()), + List.of(new DataAddress.EndpointProperty("authorization", "access_token", token.getContent().getToken()))); + dataFlow.setDataAddress(dataAddress); + return Result.success(dataFlow); + } else { + return Result.failure(new RuntimeException("Token generation failed for data flow: " + dataFlow.getId())); + } + + } default -> { return Result.failure(new RuntimeException("TransferType %s not supported".formatted(dataFlow.getTransferType()))); @@ -161,8 +203,8 @@ public URI dataFlowEndpoint(String hostname) { public record PublicApiConfig( @Setting(key = "web.http." + APICONTEXT_PUBLIC + ".path", defaultValue = "/api/public") String path, @Setting(key = "web.http." + APICONTEXT_PUBLIC + ".port", defaultValue = "11002") int port) { - public String dataSourceEndpoint(String hostname) { - return "http://%s:%d%s/data/source".formatted(hostname, port, path); + public String dataSourceEndpoint(String hostname, String dataFlowId) { + return "http://%s:%d%s/%s/data/source".formatted(hostname, port, path, dataFlowId); } } diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java index 9476f5e3..dd24733f 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java @@ -22,40 +22,49 @@ import jakarta.ws.rs.core.Response; import okhttp3.Request; import org.eclipse.edc.http.spi.EdcHttpClient; +import org.eclipse.edc.keys.spi.PublicKeyResolver; +import org.eclipse.edc.spi.iam.ClaimToken; import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.result.Result; +import org.eclipse.edc.token.spi.TokenValidationRule; +import org.eclipse.edc.token.spi.TokenValidationService; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Map; /** * This controller serves as the public endpoint for the data plane. Its endpoints are intended for consumers to download data */ -@Path("/data") +@Path("/{dataflowId}/data") public class DataPlanePublicApiController { private final EdcHttpClient client; private final Monitor monitor; - private final String expectedAuthHeader; + private final TokenValidationService tokenValidationService; + private final PublicKeyResolver publicKeyResolver; - public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor, String expectedAuthHeader) { + public DataPlanePublicApiController(EdcHttpClient client, Monitor monitor, TokenValidationService tokenValidationService, PublicKeyResolver publicKeyResolver) { this.client = client; this.monitor = monitor; - this.expectedAuthHeader = expectedAuthHeader; + this.tokenValidationService = tokenValidationService; + this.publicKeyResolver = publicKeyResolver; } @GET @Path("/source") - public Response dataSource(@Context HttpHeaders headers) { - if (!isAuthorized(headers)) { + public Response dataSource(@PathParam("dataflowId") String dataflowId, @Context HttpHeaders headers) { + if (isAuthorized(headers, dataflowId).failed()) { return Response.status(Response.Status.UNAUTHORIZED).build(); } return Response.ok(downloadJsonFromUrl("https://jsonplaceholder.typicode.com/todos")).build(); } - @GET @Path("/source/{resource}") - public Response dataSource(@PathParam("resource") String resource, @Context HttpHeaders headers) { - if (!isAuthorized(headers)) { + public Response dataSource(@PathParam("dataflowId") String dataflowId, @PathParam("resource") String resource, @Context HttpHeaders headers) { + if (isAuthorized(headers, dataflowId).failed()) { return Response.status(Response.Status.UNAUTHORIZED).build(); } var formatted = "https://jsonplaceholder.typicode.com/%s".formatted(resource); @@ -64,17 +73,29 @@ public Response dataSource(@PathParam("resource") String resource, @Context Http @GET @Path("/source/{resource}/{id}") - public Response dataSource(@PathParam("resource") String resource, @PathParam("id") String id, @Context HttpHeaders headers) { - if (!isAuthorized(headers)) { + public Response dataSource(@PathParam("dataflowId") String dataflowId, @PathParam("resource") String resource, @PathParam("id") String id, @Context HttpHeaders headers) { + if (isAuthorized(headers, dataflowId).failed()) { return Response.status(Response.Status.UNAUTHORIZED).build(); } var formatted = "https://jsonplaceholder.typicode.com/%s/%s".formatted(resource, id); return Response.ok(downloadJsonFromUrl(formatted)).build(); } - private boolean isAuthorized(HttpHeaders headers) { + private Result isAuthorized(HttpHeaders headers, String dataflowId) { var authHeader = headers.getHeaderString(HttpHeaders.AUTHORIZATION); - return expectedAuthHeader.equals(authHeader); + if (authHeader == null) { + return Result.failure("No Authorization header"); + } + if (!authHeader.startsWith("Bearer ")) { + return Result.failure("Authorization header is not a Bearer token"); + } + var token = authHeader.replace("Bearer ", ""); + return tokenValidationService.validate(token, publicKeyResolver, new TokenValidationRule() { + @Override + public Result checkRule(@NotNull ClaimToken toVerify, @Nullable Map additional) { + return toVerify.getStringClaim("sub").equals(dataflowId) ? Result.success() : Result.failure("Not authorized"); + } + }); } private @NotNull String downloadJsonFromUrl(String formatted) { diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java index 5e610b81..92d664a8 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java @@ -63,7 +63,7 @@ public Result downloadData(String flowId) { .map(DataAddress.EndpointProperty::value) .orElseThrow(() -> new IllegalArgumentException("No access token found in data address properties")); var request = new Request.Builder() - .addHeader("Authorization", token) + .addHeader("Authorization", "Bearer " + token) .url(sourceUri.toString()) .get() .build(); From 3ae767dc035386b55c478630fddc878c6a55850b Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Tue, 12 May 2026 09:28:29 +0200 Subject: [PATCH 15/17] fix tests --- .../eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java index cc6bc626..e8c3b36a 100644 --- a/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java +++ b/tests/end2end/src/test/java/org/eclipse/edc/demo/tests/transfer/TransferEndToEndTest.java @@ -158,7 +158,6 @@ void transferData_hasPermission_shouldTransferData() { }); System.out.printf("Fetch dataflow with ID %s from custom proxy%n", transferProcessId); - // fetch EDR for transfer processs var endpoint = new AtomicReference(); var token = new AtomicReference(); await().atMost(TEST_TIMEOUT_DURATION) @@ -175,7 +174,7 @@ void transferData_hasPermission_shouldTransferData() { endpoint.set(jp.getString("endpoint")); token.set(jp.get("endpointProperties.find { it.name == 'access_token' }.value")); - assertThat(endpoint.get()).isNotNull().endsWith("/api/public/data/source"); + assertThat(endpoint.get()).isNotNull().matches(".*/api/public/.*/data/source"); assertThat(token.get()).isNotNull(); }); From 105ed4fa8b390a65dfa557a3cb056569a55dde6b Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Tue, 12 May 2026 09:35:58 +0200 Subject: [PATCH 16/17] some source doc [skip ci] --- .../edc/mvd/dataplane/data/ConsumerProxyController.java | 4 ++++ .../edc/mvd/dataplane/data/DataPlanePublicApiController.java | 3 ++- .../edc/mvd/dataplane/signaling/ConsumerDataHandler.java | 4 ++++ .../edc/mvd/dataplane/signaling/NoneAuthorization.java | 3 +++ 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java index c55a7615..b6a86f6c 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/ConsumerProxyController.java @@ -24,6 +24,10 @@ import java.util.Map; +/** + * Controller that allows consumers to transfer data from the provider by simply providing a data flow ID. This is typically + * used on consumer data planes. + */ @Path("/flows") @Produces("application/json") public class ConsumerProxyController { diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java index dd24733f..f69fd32f 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/data/DataPlanePublicApiController.java @@ -34,7 +34,8 @@ import java.util.Map; /** - * This controller serves as the public endpoint for the data plane. Its endpoints are intended for consumers to download data + * This controller serves as the public endpoint for the data plane. Its endpoints are intended for consumers to download data. + * This is typically exposed by provider data planes, and consumed by consumer data planes. */ @Path("/{dataflowId}/data") public class DataPlanePublicApiController { diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java index 92d664a8..d2c2bd4f 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/ConsumerDataHandler.java @@ -25,6 +25,10 @@ import java.util.HashMap; import java.util.Map; +/** + * Small helper class that receives authorization and endpoint information when a data flow enters the "STARTED" state on + * data planes. It records that information and uses it later to download the data. + */ public class ConsumerDataHandler { private final EdcHttpClient httpClient; diff --git a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java index 90bee35b..05ce0b38 100644 --- a/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java +++ b/launchers/dataplane/src/main/java/org/eclipse/edc/mvd/dataplane/signaling/NoneAuthorization.java @@ -17,6 +17,9 @@ import org.eclipse.dataplane.domain.Result; import org.eclipse.dataplane.domain.registration.Authorization; +/** + * NOOP authorization between control plane and data plane of a single participant. + */ public class NoneAuthorization implements Authorization { @Override public String type() { From 1712358710afa68591ee46ef7cce7496cd0d183d Mon Sep 17 00:00:00 2001 From: Paul Latzelsperger Date: Tue, 12 May 2026 09:47:56 +0200 Subject: [PATCH 17/17] update README --- README.md | 149 +++++++++++++++++++++++++++++------------------------- 1 file changed, 80 insertions(+), 69 deletions(-) diff --git a/README.md b/README.md index 5d4784ae..12eb3623 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,41 @@ # Minimum Viable Dataspace Demo + * [Minimum Viable Dataspace Demo](#minimum-viable-dataspace-demo) - * [1. Introduction](#1-introduction) - * [2. Purpose of this Demo](#2-purpose-of-this-demo) - * [2.1 Version stability and backwards compatibility guarantees](#21-version-stability-and-backwards-compatibility-guarantees) - * [2.2 Which version should I use?](#22-which-version-should-i-use) - * [3. The Scenario](#3-the-scenario) - * [3.1 Participants](#31-participants) - * [3.2 Data setup](#32-data-setup) - * [3.3 Access control](#33-access-control) - * [3.4 DIDs, participant lists, and VerifiableCredentials](#34-dids-participant-lists-and-verifiablecredentials) - * [4. Running the Demo (Kubernetes)](#4-running-the-demo-kubernetes) - * [4.1 Create the K8S cluster](#41-create-the-k8s-cluster) - * [4.2 Deploy the MVD components](#42-deploy-the-mvd-components) - * [4.2.1 Using pre-built images](#421-using-pre-built-images) - * [4.2.2 Building from source](#422-building-from-source) - * [4.2.3 Deploy MVD components](#423-deploy-mvd-components) - * [4.3 Seed the dataspace](#43-seed-the-dataspace) - * [4.4 Debugging MVD in Kubernetes](#44-debugging-mvd-in-kubernetes) - * [5. Executing REST requests using Bruno](#5-executing-rest-requests-using-bruno) - * [5.1 Get the catalog](#51-get-the-catalog) - * [5.2 Initiate the contract negotiation](#52-initiate-the-contract-negotiation) - * [5.3 Query negotiation status](#53-query-negotiation-status) - * [5.4 Initiate data transfer](#54-initiate-data-transfer) - * [5.5 Query data transfers](#55-query-data-transfers) - * [5.6 Get EndpointDataReference](#56-get-endpointdatareference) - * [5.7 Get access token for EDR](#57-get-access-token-for-edr) - * [5.8 Fetch data](#58-fetch-data) - * [6. Custom extensions in MVD](#6-custom-extensions-in-mvd) - * [7. Advanced topics](#7-advanced-topics) - * [7.2 Regenerating key pairs](#72-regenerating-key-pairs) - * [8. Other caveats, shortcuts, and workarounds](#8-other-caveats-shortcuts-and-workarounds) - * [8.2 DID resolution for participants](#82-did-resolution-for-participants) - * [8.3 Seed Jobs](#83-seed-jobs) + * [1. Introduction](#1-introduction) + * [2. Purpose of this Demo](#2-purpose-of-this-demo) + * [2.1 Version stability and backwards compatibility guarantees](#21-version-stability-and-backwards-compatibility-guarantees) + * [2.2 Which version should I use?](#22-which-version-should-i-use) + * [3. The Scenario](#3-the-scenario) + * [3.1 Participants](#31-participants) + * [3.2 Data setup](#32-data-setup) + * [3.3 Access control](#33-access-control) + * [3.4 DIDs, participant lists, and VerifiableCredentials](#34-dids-participant-lists-and-verifiablecredentials) + * [4. Running the Demo (Kubernetes)](#4-running-the-demo-kubernetes) + * [4.1 Create the K8S cluster](#41-create-the-k8s-cluster) + * [4.2 Deploy the MVD components](#42-deploy-the-mvd-components) + * [4.2.1 Using pre-built images](#421-using-pre-built-images) + * [4.2.2 Building from source](#422-building-from-source) + * [4.2.3 Deploy MVD components](#423-deploy-mvd-components) + * [4.3 Seed the dataspace](#43-seed-the-dataspace) + * [4.4 Debugging MVD in Kubernetes](#44-debugging-mvd-in-kubernetes) + * [5. Executing REST requests using Bruno](#5-executing-rest-requests-using-bruno) + * [5.1 Get the catalog](#51-get-the-catalog) + * [5.2 Initiate the contract negotiation](#52-initiate-the-contract-negotiation) + * [5.3 Query negotiation status](#53-query-negotiation-status) + * [5.4 Initiate data transfer](#54-initiate-data-transfer) + * [5.5 Query data transfers](#55-query-data-transfers) + * [5.6 Get EndpointDataReference](#56-get-endpointdatareference) + * [5.7 Get access token for EDR](#57-get-access-token-for-edr) + * [5.8 Fetch data](#58-fetch-data) + * [6. Custom extensions in MVD](#6-custom-extensions-in-mvd) + * [7. Advanced topics](#7-advanced-topics) + * [7.2 Regenerating key pairs](#72-regenerating-key-pairs) + * [8. Other caveats, shortcuts, and workarounds](#8-other-caveats-shortcuts-and-workarounds) + * [8.2 DID resolution for participants](#82-did-resolution-for-participants) + * [8.3 Seed Jobs](#83-seed-jobs) + ## 1. Introduction @@ -218,8 +220,10 @@ All commands are executed from the **repository's root folder** unless stated ot > Since this is not a production deployment, all applications are deployed _in the same cluster_ and in the same > namespace, plainly for the sake of simplicity. -This builds the runtime images and creates the following docker images: `ghcr.io/eclipse-edc/minimumviabledataspace/controlplane:latest`, -`ghcr.io/eclipse-edc/minimumviabledataspace/dataplane:latest`, `ghcr.io/eclipse-edc/minimumviabledataspace/issuerservice:latest` and +This builds the runtime images and creates the following docker images: +`ghcr.io/eclipse-edc/minimumviabledataspace/controlplane:latest`, +`ghcr.io/eclipse-edc/minimumviabledataspace/dataplane:latest`, +`ghcr.io/eclipse-edc/minimumviabledataspace/issuerservice:latest` and `ghcr.io/eclipse-edc/minimumviabledataspace/identity-hub:latest` in the local docker image cache. PostgreSQL and Hashicorp Vault obviously require additional configuration, which is handled by the Kubernetes manifests @@ -238,11 +242,12 @@ kind create cluster -n mvd ### 4.2 Deploy the MVD components -The following commands deploy the MVD components to the cluster. +The following commands deploy the MVD components to the cluster. #### 4.2.1 Using pre-built images -If you are using the Docker images from the GitHub container registry, no further action is required. Process to step [4.2.3](#423-deploy-mvd-components) +If you are using the Docker images from the GitHub container registry, no further action is required. Process to +step [4.2.3](#423-deploy-mvd-components) #### 4.2.2 Building from source @@ -331,7 +336,8 @@ Remote Debugging is possible, but Kubernetes port-forwards of port 1044 are nece ### 4.3 Seed the dataspace -Once all pods are up and running, and all seed jobs have completed, all necessary demo data is already in place, **no need +Once all pods are up and running, and all seed jobs have completed, all necessary demo data is already in place, **no +need to execute scripts or manually invoke the REST API**. This includes: @@ -495,49 +501,54 @@ response, we can move on. The type of data transfer that we are using here (`HttpData-PULL`) means that we can fetch data from the provider dataplane's public endpoint, as we would query any other REST API. However, an access token is needed to authenticate -the request. This access token is provided to the consumer in the form of an EndpointDataReference (EDR). We must thus -query the consumer's EDR endpoint to obtain the token. - -### 5.6 Get EndpointDataReference - -Using the `ControlPlane Management/Get Cached EDRs` request, we fetch the EDR and note down the value of the `@id` -field, for example `392d1767-e546-4b54-ab6e-6fb20a3dc12a`. This should be identical to the value of the -`transferProcessId` field. - -With that value, we can obtain the access token for this particular EDR. +the request. This access token is provided to the consumer in the form of an EndpointProperty which we can obtain by +using the`ControlPlane Management/Get Open Dataflows` request. -### 5.7 Get access token for EDR +### 5.6 Get Open Dataflows -In the `ControlPlane Management/Get EDR DataAddress for TransferId` request we have to paste the `transferProcessId` -value from the previous step in the URL path, for example: +Using the `ControlPlane Management/Get Open Dataflows` request, we fetch the data flow and note down the value of the +key entry in the map, for example `392d1767-e546-4b54-ab6e-6fb20a3dc12a`. This should be identical to the value of +the `transferProcessId` field. -``` -{{HOST}}/api/management/v3/edrs/392d1767-e546-4b54-ab6e-6fb20a3dc12a/dataaddress -``` - -Executing this request produces a response that contains both the endpoint where we can fetch the data, and the -authorization token: +The access token is conveyed in the `endpointProperties` field of the response, in particular the `"authorization"` +field: ```json { - //... - "endpoint": "http://provider-qna-dataplane:11002/api/public", - "authType": "bearer", - "endpointType": "https://w3id.org/idsa/v4.1/HTTP", - "authorization": "eyJra.....PbovoypJGtWJst30vD9zy5w" - //... + "f9f3ee24-6968-4603-867f-71cbf2bf9152": { + "@type": "DataAddress", + "endpointType": "http", + "endpoint": "http://dataplane.provider.svc.cluster.local:11002/api/public/63423d1f-846c-4fa4-bfc6-e034f9ad6a85/data/source", + "endpointProperties": [ + { + "type": null, + "name": "access_token", + "value": "eyJraWQiOiJkYXRhcGxhbmUtcHVibGljIiwiYWxnIjoiRWQyNTUxOSJ9.eyJpc3MiOiJwcm92aWRlci1kYXRhcGxhbmUiLCJzdWIiOiI2MzQyM2QxZi04NDZjLTRmYTQtYmZjNi1lMDM0ZjlhZDZhODUiLCJhdWQiOiJwcm92aWRlci1kYXRhcGxhbmUiLCJzY29wZSI6ImRhdGFmbG93In0.gjmSsp5eROrvW98_a0G1QwfCSi7d6yy7Bcye4mT2vhRwWjXPXcJyX0I8T1SE7fh1i3Ab5hKcPAvrvdYkrSI8Cw" + }, + { + "type": null, + "name": "https://w3id.org/edc/v0.0.1/ns/type", + "value": "http" + }, + { + "type": null, + "name": "https://w3id.org/edc/v0.0.1/ns/endpoint", + "value": "http://dataplane.provider.svc.cluster.local:11002/api/public/63423d1f-846c-4fa4-bfc6-e034f9ad6a85/data/source" + } + ] + } } ``` -Note that the token was abbreviated for legibility. - -### 5.8 Fetch data +### 5.7 Download the data -Using the endpoint and the authorization token from the previous step, we can then download data using the `ControlPlane -Management/Download Data from Public API` request. To do that, the token must be copied into the request's -`Authorization` header. +The data plane comes with a convenience API, that allows us to download data directly from the provider's public +endpoint. Using that "proxy" API, we can simply invoke the `ControlPlane Management/Download Data from Public API` +providing the dataflow ID in the path. -Important: do not prepend a `bearer` prefix! +Thus, the request travels Bruno → Consumer Dataplane (Proxy API) → Provider Dataplane (Public API), and since the +consumer dataplane maintains a list of ongoing dataflows plus auth tokens, all we need to do is provide the dataflow ID +to the proxy API. This will return some dummy JSON data.