From 21efecdbc73e4b4d1755e4b14db97a2b9e3bbc74 Mon Sep 17 00:00:00 2001 From: Akshay Date: Sun, 7 Nov 2021 18:57:20 +0100 Subject: [PATCH 01/25] Issue-#808: Improve C2D docs --- content/concepts/compute-to-data.md | 90 ++---------------- content/tutorials/compute-to-data-overview.md | 83 ++++++++++++++++ .../images/Starting New Compute Job.png | Bin data/sidebars/concepts.yml | 2 +- data/sidebars/tutorials.yml | 2 + 5 files changed, 94 insertions(+), 83 deletions(-) create mode 100644 content/tutorials/compute-to-data-overview.md rename content/{concepts => tutorials}/images/Starting New Compute Job.png (100%) diff --git a/content/concepts/compute-to-data.md b/content/concepts/compute-to-data.md index a19d9a90..bb524088 100644 --- a/content/concepts/compute-to-data.md +++ b/content/concepts/compute-to-data.md @@ -5,6 +5,11 @@ slug: /concepts/compute-to-data/ section: concepts --- +## Quick Start + +- [Compute-to-Data example](https://github.com/oceanprotocol/ocean.py/blob/main/READMEs/c2d-flow.md) + + ## Motivation The most basic scenario for a Publisher is to provide access to the datasets they own or manage. However, a Publisher may offer a service to execute some computation on top of their data. This has some benefits: @@ -17,13 +22,13 @@ The most basic scenario for a Publisher is to provide access to the datasets the ## Datasets & Algorithms -With Compute-to-Data, datasets are not allowed to leave the premises of the data holder, only algorithms can be permitted to run on them under certain conditions within an isolated and secure environment. Algorithms are an asset type just like datasets. They they too can have a pool or a fixed price to determine their price whenever they are used. +With Compute-to-Data, datasets are not allowed to leave the premises of the data holder, only algorithms can be permitted to run on them under certain conditions within an isolated and secure environment. Algorithms are an asset type just like datasets. They too can have a pool or a fixed price to determine their price whenever they are used. -Algorithms can be public or private by setting `"attributes.main.type"` value as follows: +Algorithms can be public or private by setting `"attributes.main.type"` value in DDO as follows: - `"access"` - public. The algorithm can be downloaded, given appropriate datatoken. -- `"compute"` - private. The algorithm is only available to use as part of a compute job without any way to download it. The dataset must be published on the same Ocean Provider as the dataset it's targeted to run on. +- `"compute"` - private. The algorithm is only available to use as part of a compute job without any way to download it. The Algorithm must be published on the same Ocean Provider as the dataset it's targeted to run on. For each dataset, publishers can choose to allow various permission levels for algorithms to run: @@ -33,85 +38,6 @@ For each dataset, publishers can choose to allow various permission levels for a All implementations should set permissions to private by default: upon publishing a compute dataset, no algorithms should be allowed to run on it. This is to prevent data escape by a rogue algorithm being written in a way to extract all data from a dataset. -## Architecture Overview - -Here's the sequence diagram for starting a new compute job. - -![Sequence Diagram for computing services](images/Starting New Compute Job.png) - -The Consumer calls the Provider with `start(did, algorithm, additionalDIDs)`. It returns job id `XXXX`. The Provider oversees the rest of the work. At any point, the Consumer can query the Provider for the job status via `getJobDetails(XXXX)`. - -Here's how Provider works. First, it ensures that the Consumer has sent the appropriate datatokens to get access. Then, it calls asks the Operator-Service (a microservice) to start the job, which passes on the request to Operator-Engine (the actual compute system). Operator-Engine runs Kubernetes compute jobs etc as needed. Operator-Engine reports when to Operator-Service when the job has finished. - -Here's the actors/components: - -- Consumers - The end users who need to use some computing services offered by the same Publisher as the data Publisher. -- Operator-Service - Micro-service that is handling the compute requests. -- Operator-Engine - The computing systems where the compute will be executed. -- Kubernetes - a K8 cluster - -Before the flow can begin, these pre-conditions must be met: - -- The Asset DDO has a `compute` service. -- The Asset DDO compute service must permit algorithms to run on it. -- The Asset DDO must specify an Ocean Provider endpoint exposed by the Publisher. - -## Access Control using Ocean Provider - -As [with the `access` service](/concepts/architecture/#datatokens--access-control-tools), the `compute` service requires the **Ocean Provider** as a component handled by Publishers. Ocean Provider is in charge of interacting with users and managing the basics of a Publisher's infrastructure to integrate this infrastructure into Ocean Protocol. The direct interaction with the infrastructure where the data resides happens through this component only. - -Ocean Provider includes the credentials to interact with the infrastructure (initially in cloud providers, but it could be on-premise). - - - -## Compute-to-Data Environment - -### Operator Service - -The **Operator Service** is a micro-service in charge of managing the workflow executing requests. - -The main responsibilities are: - -- Expose an HTTP API allowing for the execution of data access and compute endpoints. -- Interact with the infrastructure (cloud/on-premise) using the Publisher's credentials. -- Start/stop/execute computing instances with the algorithms provided by users. -- Retrieve the logs generated during executions. - -Typically the Operator Service is integrated from Ocean Provider, but can be called independently of it. - -The Operator Service is in charge of establishing the communication with the K8s cluster, allowing it to: - -- Register new compute jobs -- List the current compute jobs -- Get a detailed result for a given job -- Stop a running job - -The Operator Service doesn't provide any storage capability, all the state is stored directly in the K8s cluster. - - - -### Operator Engine - -The **Operator Engine** is in charge of orchestrating the compute infrastructure using Kubernetes as backend where each compute job runs in an isolated [Kubernetes Pod](https://kubernetes.io/docs/concepts/workloads/pods/). Typically the Operator Engine retrieves the workflows created by the Operator Service in Kubernetes, and manage the infrastructure necessary to complete the execution of the compute workflows. - -The Operator Engine is in charge of retrieving all the workflows registered in a K8s cluster, allowing to: - -- Orchestrate the flow of the execution -- Start the configuration pod in charge of download the workflow dependencies (datasets and algorithms) -- Start the pod including the algorithm to execute -- Start the publishing pod that publish the new assets created in the Ocean Protocol network. -- The Operator Engine doesn't provide any storage capability, all the state is stored directly in the K8s cluster. - - - -### Pod: Configuration - - - -### Pod: Publishing - - - ## Further Reading - [Tutorial: Writing Algorithms](/tutorials/compute-to-data-algorithms/) diff --git a/content/tutorials/compute-to-data-overview.md b/content/tutorials/compute-to-data-overview.md new file mode 100644 index 00000000..5e46a9ae --- /dev/null +++ b/content/tutorials/compute-to-data-overview.md @@ -0,0 +1,83 @@ +--- +title: Compute-to-Data +description: Architecture overview +--- + +## Architecture Overview + +Here's the sequence diagram for starting a new compute job. + +![Sequence Diagram for computing services](images/Starting New Compute Job.png) + +The Consumer calls the Provider with `start(did, algorithm, additionalDIDs)`. It returns job id `XXXX`. The Provider oversees the rest of the work. At any point, the Consumer can query the Provider for the job status via `getJobDetails(XXXX)`. + +Here's how Provider works. First, it ensures that the Consumer has sent the appropriate datatokens to get access. Then, it calls asks the Operator-Service (a microservice) to start the job, which passes on the request to Operator-Engine (the actual compute system). Operator-Engine runs Kubernetes compute jobs etc as needed. Operator-Engine reports when to Operator-Service when the job has finished. + +Here's the actors/components: + +- Consumers - The end users who need to use some computing services offered by the same Publisher as the data Publisher. +- Operator-Service - Micro-service that is handling the compute requests. +- Operator-Engine - The computing systems where the compute will be executed. +- Kubernetes - a K8 cluster + +Before the flow can begin, these pre-conditions must be met: + +- The Asset DDO has a `compute` service. +- The Asset DDO compute service must permit algorithms to run on it. +- The Asset DDO must specify an Ocean Provider endpoint exposed by the Publisher. + +## Access Control using Ocean Provider + +As [with the `access` service](/concepts/architecture/#datatokens--access-control-tools), the `compute` service requires the **Ocean Provider** as a component handled by Publishers. Ocean Provider is in charge of interacting with users and managing the basics of a Publisher's infrastructure to integrate this infrastructure into Ocean Protocol. The direct interaction with the infrastructure where the data resides happens through this component only. + +Ocean Provider includes the credentials to interact with the infrastructure (initially in cloud providers, but it could be on-premise). + + + +## Compute-to-Data Environment + +### Operator Service + +The **Operator Service** is a micro-service in charge of managing the workflow executing requests. + +The main responsibilities are: + +- Expose an HTTP API allowing for the execution of data access and compute endpoints. +- Interact with the infrastructure (cloud/on-premise) using the Publisher's credentials. +- Start/stop/execute computing instances with the algorithms provided by users. +- Retrieve the logs generated during executions. + +Typically the Operator Service is integrated from Ocean Provider, but can be called independently of it. + +The Operator Service is in charge of establishing the communication with the K8s cluster, allowing it to: + +- Register new compute jobs +- List the current compute jobs +- Get a detailed result for a given job +- Stop a running job + +The Operator Service doesn't provide any storage capability, all the state is stored directly in the K8s cluster. + + + +### Operator Engine + +The **Operator Engine** is in charge of orchestrating the compute infrastructure using Kubernetes as backend where each compute job runs in an isolated [Kubernetes Pod](https://kubernetes.io/docs/concepts/workloads/pods/). Typically the Operator Engine retrieves the workflows created by the Operator Service in Kubernetes, and manage the infrastructure necessary to complete the execution of the compute workflows. + +The Operator Engine is in charge of retrieving all the workflows registered in a K8s cluster, allowing to: + +- Orchestrate the flow of the execution +- Start the configuration pod in charge of download the workflow dependencies (datasets and algorithms) +- Start the pod including the algorithm to execute +- Start the publishing pod that publish the new assets created in the Ocean Protocol network. +- The Operator Engine doesn't provide any storage capability, all the state is stored directly in the K8s cluster. + + + +### Pod: Configuration + + + +### Pod: Publishing + + diff --git a/content/concepts/images/Starting New Compute Job.png b/content/tutorials/images/Starting New Compute Job.png similarity index 100% rename from content/concepts/images/Starting New Compute Job.png rename to content/tutorials/images/Starting New Compute Job.png diff --git a/data/sidebars/concepts.yml b/data/sidebars/concepts.yml index 0bb7fecc..18037d4e 100644 --- a/data/sidebars/concepts.yml +++ b/data/sidebars/concepts.yml @@ -15,7 +15,7 @@ - group: Compute-to-Data items: - - title: Compute-to-Data Overview + - title: Introduction link: /concepts/compute-to-data/ - group: Specifying Assets diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 1179db0a..01d4db01 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -37,6 +37,8 @@ - group: Compute-to-Data items: + - title: Architecture Overview + link: /tutorials/compute-to-data-overview/ - title: Writing Algorithms link: /tutorials/compute-to-data-algorithms/ - title: Run a Compute-to-Data Environment From 5a1c268448f7a919e1399c0ec721cf1e13a635e6 Mon Sep 17 00:00:00 2001 From: Akshay Date: Sun, 7 Nov 2021 19:01:38 +0100 Subject: [PATCH 02/25] Issue-#808: Improve C2D docs --- content/concepts/compute-to-data.md | 1 + ...pute-to-data-overview.md => compute-to-data-architecture.md} | 0 data/sidebars/tutorials.yml | 2 +- 3 files changed, 2 insertions(+), 1 deletion(-) rename content/tutorials/{compute-to-data-overview.md => compute-to-data-architecture.md} (100%) diff --git a/content/concepts/compute-to-data.md b/content/concepts/compute-to-data.md index bb524088..7a78a353 100644 --- a/content/concepts/compute-to-data.md +++ b/content/concepts/compute-to-data.md @@ -40,6 +40,7 @@ All implementations should set permissions to private by default: upon publishin ## Further Reading +- [Compute-to-Data architecture](/tutorials/compute-to-data-architecture/) - [Tutorial: Writing Algorithms](/tutorials/compute-to-data-algorithms/) - [Tutorial: Set Up a Compute-to-Data Environment](/tutorials/compute-to-data/) - [Compute-to-Data in Ocean Market](https://blog.oceanprotocol.com) diff --git a/content/tutorials/compute-to-data-overview.md b/content/tutorials/compute-to-data-architecture.md similarity index 100% rename from content/tutorials/compute-to-data-overview.md rename to content/tutorials/compute-to-data-architecture.md diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 01d4db01..9c1d99d1 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -38,7 +38,7 @@ - group: Compute-to-Data items: - title: Architecture Overview - link: /tutorials/compute-to-data-overview/ + link: /tutorials/compute-to-data-architecture/ - title: Writing Algorithms link: /tutorials/compute-to-data-algorithms/ - title: Run a Compute-to-Data Environment From 21b192040f95e4e611c210cce30e5482ac415010 Mon Sep 17 00:00:00 2001 From: Akshay Date: Tue, 9 Nov 2021 12:00:38 +0100 Subject: [PATCH 03/25] Issue-#808: C2D docs --- content/tutorials/compute-to-data-minikube.md | 62 ++++++++++++------- 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/content/tutorials/compute-to-data-minikube.md b/content/tutorials/compute-to-data-minikube.md index e5fc7445..919340ff 100644 --- a/content/tutorials/compute-to-data-minikube.md +++ b/content/tutorials/compute-to-data-minikube.md @@ -24,8 +24,46 @@ wget -q --show-progress https://github.com/kubernetes/minikube/releases/download sudo dpkg -i minikube_1.22.0-0_amd64.deb ``` +## Start Minikube + +First command is imporant, and solves a [PersistentVolumeClaims problem](https://github.com/kubernetes/minikube/issues/7828). + +```bash +minikube config set kubernetes-version v1.16.0 +minikube start --cni=calico --driver=docker --container-runtime=docker +``` + +## Install kubectl + +```bash +curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" +curl -LO "https://dl.k8s.io/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl.sha256" +echo "$(> /etc/hosts' + +``` + ## Download and Configure Operator Service +Open new terminal and run the command below. ```bash git clone https://github.com/oceanprotocol/operator-service.git ``` @@ -68,30 +106,6 @@ Check the [README](https://github.com/oceanprotocol/operator-engine#customize-yo At a minimum you should add your IPFS URLs or AWS settings, and add (or remove) notification URLs. -## Install kubectl - -```bash -curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" -curl -LO "https://dl.k8s.io/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl.sha256" -echo "$( Date: Mon, 22 Nov 2021 18:51:50 +0100 Subject: [PATCH 04/25] Issue-#808: Add Section datasets and algorithms --- content/concepts/compute-to-data.md | 18 --------------- .../compute-to-data-datasets-algorithms.md | 22 +++++++++++++++++++ data/sidebars/tutorials.yml | 2 ++ 3 files changed, 24 insertions(+), 18 deletions(-) create mode 100644 content/tutorials/compute-to-data-datasets-algorithms.md diff --git a/content/concepts/compute-to-data.md b/content/concepts/compute-to-data.md index 7a78a353..78acdb2e 100644 --- a/content/concepts/compute-to-data.md +++ b/content/concepts/compute-to-data.md @@ -20,24 +20,6 @@ The most basic scenario for a Publisher is to provide access to the datasets the [This page](https://oceanprotocol.com/technology/compute-to-data) elaborates on the benefits. -## Datasets & Algorithms - -With Compute-to-Data, datasets are not allowed to leave the premises of the data holder, only algorithms can be permitted to run on them under certain conditions within an isolated and secure environment. Algorithms are an asset type just like datasets. They too can have a pool or a fixed price to determine their price whenever they are used. - - -Algorithms can be public or private by setting `"attributes.main.type"` value in DDO as follows: - -- `"access"` - public. The algorithm can be downloaded, given appropriate datatoken. -- `"compute"` - private. The algorithm is only available to use as part of a compute job without any way to download it. The Algorithm must be published on the same Ocean Provider as the dataset it's targeted to run on. - -For each dataset, publishers can choose to allow various permission levels for algorithms to run: - -- allow selected algorithms, referenced by their DID -- allow all algorithms published within a network or marketplace -- allow raw algorithms, for advanced use cases circumventing algorithm as an asset type, but most prone to data escape - -All implementations should set permissions to private by default: upon publishing a compute dataset, no algorithms should be allowed to run on it. This is to prevent data escape by a rogue algorithm being written in a way to extract all data from a dataset. - ## Further Reading - [Compute-to-Data architecture](/tutorials/compute-to-data-architecture/) diff --git a/content/tutorials/compute-to-data-datasets-algorithms.md b/content/tutorials/compute-to-data-datasets-algorithms.md new file mode 100644 index 00000000..e926d795 --- /dev/null +++ b/content/tutorials/compute-to-data-datasets-algorithms.md @@ -0,0 +1,22 @@ +--- +title: Compute-to-Data +description: Datasets and Algorithms +--- + +## Datasets & Algorithms + +With Compute-to-Data, datasets are not allowed to leave the premises of the data holder, only algorithms can be permitted to run on them under certain conditions within an isolated and secure environment. Algorithms are an asset type just like datasets. They too can have a pool or a fixed price to determine their price whenever they are used. + + +Algorithms can be public or private by setting `"attributes.main.type"` value in DDO as follows: + +- `"access"` - public. The algorithm can be downloaded, given appropriate datatoken. +- `"compute"` - private. The algorithm is only available to use as part of a compute job without any way to download it. The Algorithm must be published on the same Ocean Provider as the dataset it's targeted to run on. + +For each dataset, publishers can choose to allow various permission levels for algorithms to run: + +- allow selected algorithms, referenced by their DID +- allow all algorithms published within a network or marketplace +- allow raw algorithms, for advanced use cases circumventing algorithm as an asset type, but most prone to data escape + +All implementations should set permissions to private by default: upon publishing a compute dataset, no algorithms should be allowed to run on it. This is to prevent data escape by a rogue algorithm being written in a way to extract all data from a dataset. diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 9c1d99d1..74dca187 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -41,6 +41,8 @@ link: /tutorials/compute-to-data-architecture/ - title: Writing Algorithms link: /tutorials/compute-to-data-algorithms/ + - title: Datasets and algorithms + link: /tutorials/compute-to-data-datasets-algorithms/ - title: Run a Compute-to-Data Environment link: /tutorials/compute-to-data/ - title: Minikube Compute-to-Data Environment From 6571b61e7df0801564f54eca4b8817217499ff99 Mon Sep 17 00:00:00 2001 From: Akshay Date: Mon, 22 Nov 2021 18:56:28 +0100 Subject: [PATCH 05/25] Issue-#808: Remove duplicate content --- content/tutorials/compute-to-data-minikube.md | 31 ++++ content/tutorials/compute-to-data.md | 132 ------------------ 2 files changed, 31 insertions(+), 132 deletions(-) delete mode 100644 content/tutorials/compute-to-data.md diff --git a/content/tutorials/compute-to-data-minikube.md b/content/tutorials/compute-to-data-minikube.md index 919340ff..389531d2 100644 --- a/content/tutorials/compute-to-data-minikube.md +++ b/content/tutorials/compute-to-data-minikube.md @@ -61,6 +61,37 @@ sudo /bin/sh -c 'echo "127.0.0.1 youripfsserver" >> /etc/hosts' ``` +## Storage class (Optional) + +For minikube, you can use the default 'standard' class. + +For AWS, please make sure that your class allocates volumes in the same region and zone in which you are running your pods. + +We created our own 'standard' class in AWS: + +```bash +kubectl get storageclass standard -o yaml +``` + +```yaml +allowedTopologies: +- matchLabelExpressions: + - key: failure-domain.beta.kubernetes.io/zone + values: + - us-east-1a +apiVersion: storage.k8s.io/v1 +kind: StorageClass +parameters: + fsType: ext4 + type: gp2 +provisioner: kubernetes.io/aws-ebs +reclaimPolicy: Delete +volumeBindingMode: Immediate +``` + +For more information, please visit https://kubernetes.io/docs/concepts/storage/storage-classes/ + + ## Download and Configure Operator Service Open new terminal and run the command below. diff --git a/content/tutorials/compute-to-data.md b/content/tutorials/compute-to-data.md deleted file mode 100644 index 06db06fe..00000000 --- a/content/tutorials/compute-to-data.md +++ /dev/null @@ -1,132 +0,0 @@ ---- -title: Set Up a Compute-to-Data Environment -description: ---- - -## Requirements - -First, create a folder with the following structure: - -```text -ocean/ - barge/ - operator-service/ - operator-engine/ -``` - -Then you need the following parts: - -- working [Barge](https://github.com/oceanprotocol/barge). For this setup, we will asume the Barge is installed in /ocean/barge/ -- a working Kubernetes (K8s) cluster ([Minikube](../compute-to-data-minikube/) is a good start) -- a working `kubectl` connected to the K8s cluster -- one folder (/ocean/operator-service/), in which we will download the following: - - [postgres-configmap.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-service/main/kubernetes/postgres-configmap.yaml) - - [postgres-storage.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-service/main/kubernetes/postgres-storage.yaml) - - [postgres-deployment.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-service/main/kubernetes/postgres-deployment.yaml) - - [postgres-service.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-service/main/kubernetes/postgresql-service.yaml) - - [deployment.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-service/main/kubernetes/deployment.yaml) -- one folder (/ocean/operator-engine/), in which we will download the following: - - [sa.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-engine/main/kubernetes/sa.yml) - - [binding.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-engine/main/kubernetes/binding.yml) - - [operator.yaml](https://raw.githubusercontent.com/oceanprotocol/operator-engine/main/kubernetes/operator.yml) - -## Customize your Operator Service deployment - -The following resources need attention: - -| Resource | Variable | Description | -| ------------------------- | ------------------ | ------------------------------------------------------------------------------------------------------ | -| `postgres-configmap.yaml` | | Contains secrets for the PostgreSQL deployment. | -| `deployment.yaml` | `ALGO_POD_TIMEOUT` | Allowed time for a algorithm to run. If it exceeded this value (in minutes), it's going to get killed. | - -## Customize your Operator Engine deployment - -Check the [README](https://github.com/oceanprotocol/operator-engine#customize-your-operator-engine-deployment) section of operator engine to customize your deployment - -## Storage class - -For minikube, you can use the default 'standard' class. - -For AWS, please make sure that your class allocates volumes in the same region and zone in which you are running your pods. - -We created our own 'standard' class in AWS: - -```bash -kubectl get storageclass standard -o yaml -``` - -```yaml -allowedTopologies: -- matchLabelExpressions: - - key: failure-domain.beta.kubernetes.io/zone - values: - - us-east-1a -apiVersion: storage.k8s.io/v1 -kind: StorageClass -parameters: - fsType: ext4 - type: gp2 -provisioner: kubernetes.io/aws-ebs -reclaimPolicy: Delete -volumeBindingMode: Immediate -``` - -For more information, please visit https://kubernetes.io/docs/concepts/storage/storage-classes/ - -## Create namespaces - -```bash -kubectl create ns ocean-operator -kubectl create ns ocean-compute -``` - -## Deploy Operator Service - -```bash -kubectl config set-context --current --namespace ocean-operator -kubectl create -f /ocean/operator-service/postgres-configmap.yaml -kubectl create -f /ocean/operator-service/postgres-storage.yaml -kubectl create -f /ocean/operator-service/postgres-deployment.yaml -kubectl create -f /ocean/operator-service/postgresql-service.yaml -kubectl apply -f /ocean/operator-service/deployment.yaml -``` - -## Deploy Operator Engine - -```bash -kubectl config set-context --current --namespace ocean-compute -kubectl apply -f /ocean/operator-engine/sa.yml -kubectl apply -f /ocean/operator-engine/binding.yml -kubectl apply -f /ocean/operator-engine/operator.yml -kubectl create -f /ocean/operator-service/postgres-configmap.yaml -``` - -## Expose Operator Service - -```bash -kubectl expose deployment operator-api --namespace=ocean-operator --port=8050 -``` - -Run a port forward or create your ingress service (not covered here): - -```bash -kubectl -n ocean-operator port-forward svc/operator-api 8050 -``` - -## Initialize database - -If your cluster is running on example.com: - -```bash -curl -X POST "http://example.com:8050/api/v1/operator/pgsqlinit" -H "accept: application/json" -``` - -## Update Barge for local testing - -Update Barge's Provider by adding or updating the `OPERATOR_SERVICE_URL` env in `/ocean/barge/compose-files/provider.yaml` - -```yaml -OPERATOR_SERVICE_URL: http://example.com:8050/ -``` - -Restart Barge with updated provider configuration From 7c81912b3219d2d889705aa79af47da42f23bbb5 Mon Sep 17 00:00:00 2001 From: Akshay Date: Mon, 22 Nov 2021 18:58:27 +0100 Subject: [PATCH 06/25] Issue-#808: Remove link --- data/sidebars/tutorials.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 74dca187..34c174ee 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -44,8 +44,6 @@ - title: Datasets and algorithms link: /tutorials/compute-to-data-datasets-algorithms/ - title: Run a Compute-to-Data Environment - link: /tutorials/compute-to-data/ - - title: Minikube Compute-to-Data Environment link: /tutorials/compute-to-data-minikube/ - group: Storage Setup From a5b40eb5d540abec19d59f624c5298b703d08baf Mon Sep 17 00:00:00 2001 From: Akshay Date: Mon, 22 Nov 2021 18:59:22 +0100 Subject: [PATCH 07/25] Issue-#808: Reorganize C2D content --- data/sidebars/tutorials.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 34c174ee..5a66b028 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -39,12 +39,12 @@ items: - title: Architecture Overview link: /tutorials/compute-to-data-architecture/ - - title: Writing Algorithms - link: /tutorials/compute-to-data-algorithms/ - - title: Datasets and algorithms - link: /tutorials/compute-to-data-datasets-algorithms/ - title: Run a Compute-to-Data Environment link: /tutorials/compute-to-data-minikube/ + - title: Datasets and algorithms + link: /tutorials/compute-to-data-datasets-algorithms/ + - title: Writing Algorithms + link: /tutorials/compute-to-data-algorithms/ - group: Storage Setup items: From b7258da91a1339d8f3e6d5122d2305ab0f2fbf57 Mon Sep 17 00:00:00 2001 From: Akshay Date: Mon, 22 Nov 2021 19:44:04 +0100 Subject: [PATCH 08/25] Issue-#808: Change links --- content/concepts/compute-to-data.md | 2 +- content/tutorials/compute-to-data-datasets-algorithms.md | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/content/concepts/compute-to-data.md b/content/concepts/compute-to-data.md index 78acdb2e..5303f441 100644 --- a/content/concepts/compute-to-data.md +++ b/content/concepts/compute-to-data.md @@ -24,6 +24,6 @@ The most basic scenario for a Publisher is to provide access to the datasets the - [Compute-to-Data architecture](/tutorials/compute-to-data-architecture/) - [Tutorial: Writing Algorithms](/tutorials/compute-to-data-algorithms/) -- [Tutorial: Set Up a Compute-to-Data Environment](/tutorials/compute-to-data/) +- [Tutorial: Set Up a Compute-to-Data Environment](/tutorials/compute-to-data-minikube/) - [Compute-to-Data in Ocean Market](https://blog.oceanprotocol.com) - [(Old) Compute-to-Data specs](https://github.com/oceanprotocol-archive/OEPs/tree/master/12) (OEP12) diff --git a/content/tutorials/compute-to-data-datasets-algorithms.md b/content/tutorials/compute-to-data-datasets-algorithms.md index e926d795..3b5572a0 100644 --- a/content/tutorials/compute-to-data-datasets-algorithms.md +++ b/content/tutorials/compute-to-data-datasets-algorithms.md @@ -20,3 +20,8 @@ For each dataset, publishers can choose to allow various permission levels for a - allow raw algorithms, for advanced use cases circumventing algorithm as an asset type, but most prone to data escape All implementations should set permissions to private by default: upon publishing a compute dataset, no algorithms should be allowed to run on it. This is to prevent data escape by a rogue algorithm being written in a way to extract all data from a dataset. + +## DDO Links + +- [Algorithm DDO](/concepts/ddo-metadata/#fields-when-attributesmaintype--algorithm) +- [Compute DDO](/concepts/ddo-metadata/#fields-when-attributesmaintype--compute) \ No newline at end of file From 46b2e76a4f831597a5b32a829101b9ed1a7a9104 Mon Sep 17 00:00:00 2001 From: Corrie Sloot <35015915+corrie-sloot@users.noreply.github.com> Date: Mon, 22 Nov 2021 16:32:52 -0500 Subject: [PATCH 09/25] Update external link to Matic's Guide for Mumbai (#828) --- content/concepts/networks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/content/concepts/networks.md b/content/concepts/networks.md index c007709e..326517a6 100644 --- a/content/concepts/networks.md +++ b/content/concepts/networks.md @@ -232,7 +232,7 @@ In MetaMask and other ERC20 wallets, click on the network name dropdown, then se Mumbai is a test network tuned for Matic / Polygon. -If you don't find Mumbai as a predefined network in your wallet, you can connect to it manually via [Matic's guide](https://docs.matic.network/docs/develop/metamask/config-matic/). +If you don't find Mumbai as a predefined network in your wallet, you can connect to it manually via [Matic's guide](https://docs.polygon.technology/docs/develop/metamask/config-polygon-on-metamask/). **Tokens** From 92536d4fd77b4b5d013992953f91188d9c6dabe3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Nov 2021 21:33:11 +0000 Subject: [PATCH 10/25] Bump markdownlint-cli from 0.29.0 to 0.30.0 (#827) Bumps [markdownlint-cli](https://github.com/igorshubovych/markdownlint-cli) from 0.29.0 to 0.30.0. - [Release notes](https://github.com/igorshubovych/markdownlint-cli/releases) - [Commits](https://github.com/igorshubovych/markdownlint-cli/compare/v0.29.0...v0.30.0) --- updated-dependencies: - dependency-name: markdownlint-cli dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 22 +++++++++++----------- package.json | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/package-lock.json b/package-lock.json index cab806a6..cbcfbf59 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17459,16 +17459,16 @@ } }, "markdownlint-cli": { - "version": "0.29.0", - "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.29.0.tgz", - "integrity": "sha512-SEXRUT1ri9sXV8xQK88vjGAgmz2X9rxEG2tXdDZMljzW8e++LNTO9zzBBStx3JQWrTDoGTPHNrcurbuiyF97gw==", + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.30.0.tgz", + "integrity": "sha512-NiG8iERjwsRZtJAIyLMDdYL2O3bJVn3fUxzDl+6Iv61/YYz9H9Nzgke/v0/cW9HfGvgZHhbfI19LFMp6gbKdyw==", "dev": true, "requires": { - "commander": "~8.2.0", + "commander": "~8.3.0", "deep-extend": "~0.6.0", "get-stdin": "~8.0.0", "glob": "~7.2.0", - "ignore": "~5.1.8", + "ignore": "~5.1.9", "js-yaml": "^4.1.0", "jsonc-parser": "~3.0.0", "lodash.differencewith": "~4.5.0", @@ -17487,9 +17487,9 @@ "dev": true }, "commander": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.2.0.tgz", - "integrity": "sha512-LLKxDvHeL91/8MIyTAD5BFMNtoIwztGPMiM/7Bl8rIPmHCZXRxmSWr91h57dpOpnQ6jIUqEWdXE/uBYMfiVZDA==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", "dev": true }, "get-stdin": { @@ -17513,9 +17513,9 @@ } }, "ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.9.tgz", + "integrity": "sha512-2zeMQpbKz5dhZ9IwL0gbxSW5w0NK/MSAMtNuhgIHEPmaU3vPdKPL0UdvUCXs5SS4JAwsBxysK5sFMW8ocFiVjQ==", "dev": true }, "js-yaml": { diff --git a/package.json b/package.json index 7a670bc6..85b1bc77 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,7 @@ "eslint-plugin-prettier": "^4.0.0", "git-format-staged": "^2.1.2", "husky": "^7.0.4", - "markdownlint-cli": "^0.29.0", + "markdownlint-cli": "^0.30.0", "node-sass": "^5.0.0", "npm-run-all": "^4.1.5", "prettier": "^2.4.1" From 5569f6305be330c3e1ec5cdeb8b36f7152d058bc Mon Sep 17 00:00:00 2001 From: Akshay Date: Tue, 23 Nov 2021 11:58:02 +0100 Subject: [PATCH 11/25] Issue-#808: Fix lint issues --- content/tutorials/compute-to-data-minikube.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/content/tutorials/compute-to-data-minikube.md b/content/tutorials/compute-to-data-minikube.md index 389531d2..befcd39c 100644 --- a/content/tutorials/compute-to-data-minikube.md +++ b/content/tutorials/compute-to-data-minikube.md @@ -51,6 +51,7 @@ watch kubectl get pods --all-namespaces ``` ### Run IPFS host + ```bash export ipfs_staging=~/ipfs_staging export ipfs_data=~/ipfs_data @@ -91,10 +92,10 @@ volumeBindingMode: Immediate For more information, please visit https://kubernetes.io/docs/concepts/storage/storage-classes/ - ## Download and Configure Operator Service Open new terminal and run the command below. + ```bash git clone https://github.com/oceanprotocol/operator-service.git ``` From 87d7673c1c061a21b7af44a7a32d47975536bb9c Mon Sep 17 00:00:00 2001 From: Akshay Date: Tue, 23 Nov 2021 12:15:49 +0100 Subject: [PATCH 12/25] Issue-#808: Fix lint issue --- content/tutorials/compute-to-data-datasets-algorithms.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/content/tutorials/compute-to-data-datasets-algorithms.md b/content/tutorials/compute-to-data-datasets-algorithms.md index 3b5572a0..19c7f1a8 100644 --- a/content/tutorials/compute-to-data-datasets-algorithms.md +++ b/content/tutorials/compute-to-data-datasets-algorithms.md @@ -24,4 +24,4 @@ All implementations should set permissions to private by default: upon publishin ## DDO Links - [Algorithm DDO](/concepts/ddo-metadata/#fields-when-attributesmaintype--algorithm) -- [Compute DDO](/concepts/ddo-metadata/#fields-when-attributesmaintype--compute) \ No newline at end of file +- [Compute DDO](/concepts/ddo-metadata/#fields-when-attributesmaintype--compute) From c6297d9ce586088210ac28db242b2212b09cfd5d Mon Sep 17 00:00:00 2001 From: Trent McConaghy <5305452+trentmc@users.noreply.github.com> Date: Sat, 27 Nov 2021 08:37:52 +0100 Subject: [PATCH 13/25] Improve C2D links Fixes #830 --- content/concepts/compute-to-data.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/content/concepts/compute-to-data.md b/content/concepts/compute-to-data.md index a19d9a90..5de65068 100644 --- a/content/concepts/compute-to-data.md +++ b/content/concepts/compute-to-data.md @@ -116,5 +116,7 @@ The Operator Engine is in charge of retrieving all the workflows registered in a - [Tutorial: Writing Algorithms](/tutorials/compute-to-data-algorithms/) - [Tutorial: Set Up a Compute-to-Data Environment](/tutorials/compute-to-data/) -- [Compute-to-Data in Ocean Market](https://blog.oceanprotocol.com) +- [Use Compute-to-Data in Ocean Market](https://blog.oceanprotocol.com/compute-to-data-is-now-available-in-ocean-market-58868be52ef7) +- [Build ML models via Ocean Market or Python](https://medium.com/ravenprotocol/machine-learning-series-using-logistic-regression-for-classification-in-oceans-compute-to-data-18df49b6b165) +- [Compute-to-Data Python Quickstart](https://github.com/oceanprotocol/ocean.py/blob/main/READMEs/c2d-flow.md) - [(Old) Compute-to-Data specs](https://github.com/oceanprotocol-archive/OEPs/tree/master/12) (OEP12) From 04f63028b82167292fcca7c6d75d31fc541607fe Mon Sep 17 00:00:00 2001 From: Corrie Sloot <35015915+corrie-sloot@users.noreply.github.com> Date: Mon, 29 Nov 2021 13:08:20 -0500 Subject: [PATCH 14/25] Update EWT link (#829) * Update external link to Matic's Guide for Mumbai * Fix external 404 link to Energy Web Chain --- content/concepts/networks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/content/concepts/networks.md b/content/concepts/networks.md index 326517a6..a7e17303 100644 --- a/content/concepts/networks.md +++ b/content/concepts/networks.md @@ -108,7 +108,7 @@ Use [Anyswap](https://anyswap.exchange/#/bridge) to bridge between ETH Mainnet a ## Energy Web Chain -Ocean is deployed to [Energy Web Chain](https://www.energyweb.org/technology/energy-web-chain/), another production network. Energy Web's native token is EWT. +Ocean is deployed to [Energy Web Chain](https://energy-web-foundation.gitbook.io/energy-web/technology/the-stack/trust-layer-energy-web-chain), another production network. Energy Web's native token is EWT. If you don't find Energy Web Chain as a predefined network in your wallet, you can connect to it manually via [Ocean's guide](/tutorials/metamask-setup/#set-up-custom-network) and the parameters below. From bee3c4b13561d33574d5d221b92b18a1c602c35c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:08:42 +0000 Subject: [PATCH 15/25] Bump slugify from 1.6.2 to 1.6.3 (#832) Bumps [slugify](https://github.com/simov/slugify) from 1.6.2 to 1.6.3. - [Release notes](https://github.com/simov/slugify/releases) - [Commits](https://github.com/simov/slugify/compare/v1.6.2...v1.6.3) --- updated-dependencies: - dependency-name: slugify dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index cbcfbf59..32c49230 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23073,9 +23073,9 @@ } }, "slugify": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.2.tgz", - "integrity": "sha512-XMtI8qD84LwCpthLMBHlIhcrj10cgA+U/Ot8G6FD6uFuWZtMfKK75JO7l81nzpFJsPlsW6LT+VKqWQJW3+6New==" + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.3.tgz", + "integrity": "sha512-1MPyqnIhgiq+/0iDJyqSJHENdnH5MMIlgJIBxmkRMzTNKlS/QsN5dXsB+MdDq4E6w0g9jFA4XOTRkVDjDae/2w==" }, "smoothscroll-polyfill": { "version": "0.4.4", diff --git a/package.json b/package.json index 85b1bc77..4c1c556e 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "remark-github-plugin": "^1.4.0", "remark-react": "^8.0.0", "shortid": "^2.2.16", - "slugify": "^1.6.2", + "slugify": "^1.6.3", "smoothscroll-polyfill": "^0.4.4", "swagger-client": "^3.17.0" }, From 829e4e13a8753c934f3345755c5bc77e4794d2de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Nov 2021 18:08:56 +0000 Subject: [PATCH 16/25] Bump prettier from 2.4.1 to 2.5.0 (#833) Bumps [prettier](https://github.com/prettier/prettier) from 2.4.1 to 2.5.0. - [Release notes](https://github.com/prettier/prettier/releases) - [Changelog](https://github.com/prettier/prettier/blob/main/CHANGELOG.md) - [Commits](https://github.com/prettier/prettier/compare/2.4.1...2.5.0) --- updated-dependencies: - dependency-name: prettier dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 32c49230..ede6ccaa 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20691,9 +20691,9 @@ "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=" }, "prettier": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz", - "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==" + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.0.tgz", + "integrity": "sha512-FM/zAKgWTxj40rH03VxzIPdXmj39SwSjwG0heUcNFwI+EMZJnY93yAiKXM3dObIKAM5TA88werc8T/EwhB45eg==" }, "prettier-linter-helpers": { "version": "1.0.0", diff --git a/package.json b/package.json index 4c1c556e..e84753b9 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "markdownlint-cli": "^0.30.0", "node-sass": "^5.0.0", "npm-run-all": "^4.1.5", - "prettier": "^2.4.1" + "prettier": "^2.5.0" }, "repository": { "type": "git", From 86085a341d5b0019b809909c76d8c9d5c751be9e Mon Sep 17 00:00:00 2001 From: Akshay Date: Wed, 1 Dec 2021 13:30:51 +0100 Subject: [PATCH 17/25] Issue-#835: Trigger rebuild From a9f9a5a648ffb33b312c2d4407a2265c0970755d Mon Sep 17 00:00:00 2001 From: David Hunt-Mateo Date: Mon, 27 Dec 2021 11:12:00 -0500 Subject: [PATCH 18/25] Update provider docs 0.4.19 (#846) From 240e62a26f488598ae7a935f1a14cfdff252acb6 Mon Sep 17 00:00:00 2001 From: Akshay Date: Fri, 7 Jan 2022 12:50:39 +0100 Subject: [PATCH 19/25] Issue-#808: Add documentation for running own docker registry --- .../compute-to-data-docker-registry.md | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 content/tutorials/compute-to-data-docker-registry.md diff --git a/content/tutorials/compute-to-data-docker-registry.md b/content/tutorials/compute-to-data-docker-registry.md new file mode 100644 index 00000000..a258a9f9 --- /dev/null +++ b/content/tutorials/compute-to-data-docker-registry.md @@ -0,0 +1,82 @@ +--- +title: Setting up private docker registry for Compute-to-Data environment +description: Learn how to setup own docker registry and push images for running algorithms in C2D environment. +--- + +## Prerequisites +1. Running docker environment on the server. +2. Domain name is mapped to the server IP address. +3. SSL certificate + +## Step 1: Generate password file + +Replace content in `<>` with appropriate content. + +```bash +docker run \ + --entrypoint htpasswd \ + httpd:2 -Bbn > /auth/htpasswd +``` + +## Docker compose template file for registry + +Copy the below yml content to `docker-compose.yml` file and replace content in `<>`. + +```yml +version: '3' + +services: + registry: + restart: always + container_name: my-docker-registry + image: registry:2 + ports: + - 5050:5000 + environment: + REGISTRY_HTTP_TLS_CERTIFICATE: /certs/domain.crt + REGISTRY_HTTP_TLS_KEY: /certs/domain.key + REGISTRY_AUTH: htpasswd + REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd + REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm + REGISTRY_HTTP_SECRET: + volumes: + - /data:/var/lib/registry + - /auth:/auth + - /certs:/certs + +``` + +## Start the registry + +```bash +docker-compose -f docker-compose.yml up +``` + +## List images in the registry + +```bash +curl -X GET -u : https://example.com/v2/_catalog +``` + +## Other useful commands + + +## Login to registry + +```bash +docker login example.com -u -p +``` + +## Build and push image to registry + +Use the commands below to build an image from a `Dockerfile` and push to your own private registry. + +```bash +docker build . -t example.com/my-algo:latest + +docker image tag example.com/my-algo:latest +``` + +## Next step + +You can publish an algorithm asset with the metadata containing registry url, image, and tag information to enable users to run C2D jobs. \ No newline at end of file From 620f30c2aae99993c4cf16ab42a679fdf009d4d6 Mon Sep 17 00:00:00 2001 From: Akshay Date: Fri, 7 Jan 2022 12:56:11 +0100 Subject: [PATCH 20/25] Issue-#808: Add documentation for running own docker registry --- content/tutorials/compute-to-data-docker-registry.md | 5 +++-- data/sidebars/tutorials.yml | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/content/tutorials/compute-to-data-docker-registry.md b/content/tutorials/compute-to-data-docker-registry.md index a258a9f9..03cd864d 100644 --- a/content/tutorials/compute-to-data-docker-registry.md +++ b/content/tutorials/compute-to-data-docker-registry.md @@ -4,11 +4,12 @@ description: Learn how to setup own docker registry and push images for running --- ## Prerequisites + 1. Running docker environment on the server. 2. Domain name is mapped to the server IP address. 3. SSL certificate -## Step 1: Generate password file +## Generate password file Replace content in `<>` with appropriate content. @@ -79,4 +80,4 @@ docker image tag example.com/my-algo:latest ## Next step -You can publish an algorithm asset with the metadata containing registry url, image, and tag information to enable users to run C2D jobs. \ No newline at end of file +You can publish an algorithm asset with the metadata containing registry url, image, and tag information to enable users to run C2D jobs. diff --git a/data/sidebars/tutorials.yml b/data/sidebars/tutorials.yml index 5a66b028..75280355 100644 --- a/data/sidebars/tutorials.yml +++ b/data/sidebars/tutorials.yml @@ -45,6 +45,8 @@ link: /tutorials/compute-to-data-datasets-algorithms/ - title: Writing Algorithms link: /tutorials/compute-to-data-algorithms/ + - title: Setting up docker registry + link: /tutorials/compute-to-data-docker-registry/ - group: Storage Setup items: From 8d3b4c7c787da9fc4dcbd9c225e6efcb7d4c5bc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jan 2022 11:39:04 +0000 Subject: [PATCH 21/25] Bump prettier from 2.5.0 to 2.5.1 (#839) Bumps [prettier](https://github.com/prettier/prettier) from 2.5.0 to 2.5.1. - [Release notes](https://github.com/prettier/prettier/releases) - [Changelog](https://github.com/prettier/prettier/blob/main/CHANGELOG.md) - [Commits](https://github.com/prettier/prettier/compare/2.5.0...2.5.1) --- updated-dependencies: - dependency-name: prettier dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index ede6ccaa..7eac47dc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20691,9 +20691,9 @@ "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=" }, "prettier": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.0.tgz", - "integrity": "sha512-FM/zAKgWTxj40rH03VxzIPdXmj39SwSjwG0heUcNFwI+EMZJnY93yAiKXM3dObIKAM5TA88werc8T/EwhB45eg==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz", + "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==" }, "prettier-linter-helpers": { "version": "1.0.0", diff --git a/package.json b/package.json index e84753b9..3f59239d 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "markdownlint-cli": "^0.30.0", "node-sass": "^5.0.0", "npm-run-all": "^4.1.5", - "prettier": "^2.5.0" + "prettier": "^2.5.1" }, "repository": { "type": "git", From 2cfdb471a9006c755ea7ab9ee39b2aee31a1a5cf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Jan 2022 11:39:18 +0000 Subject: [PATCH 22/25] Bump slugify from 1.6.3 to 1.6.5 (#851) Bumps [slugify](https://github.com/simov/slugify) from 1.6.3 to 1.6.5. - [Release notes](https://github.com/simov/slugify/releases) - [Commits](https://github.com/simov/slugify/compare/v1.6.3...v1.6.5) --- updated-dependencies: - dependency-name: slugify dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7eac47dc..38c2dc8d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23073,9 +23073,9 @@ } }, "slugify": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.3.tgz", - "integrity": "sha512-1MPyqnIhgiq+/0iDJyqSJHENdnH5MMIlgJIBxmkRMzTNKlS/QsN5dXsB+MdDq4E6w0g9jFA4XOTRkVDjDae/2w==" + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.5.tgz", + "integrity": "sha512-8mo9bslnBO3tr5PEVFzMPIWwWnipGS0xVbYf65zxDqfNwmzYn1LpiKNrR6DlClusuvo+hDHd1zKpmfAe83NQSQ==" }, "smoothscroll-polyfill": { "version": "0.4.4", diff --git a/package.json b/package.json index 3f59239d..11aaed27 100644 --- a/package.json +++ b/package.json @@ -60,7 +60,7 @@ "remark-github-plugin": "^1.4.0", "remark-react": "^8.0.0", "shortid": "^2.2.16", - "slugify": "^1.6.3", + "slugify": "^1.6.5", "smoothscroll-polyfill": "^0.4.4", "swagger-client": "^3.17.0" }, From 3c691eaed0568f683c3e44ad235763c16ba77d01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 14:18:01 +0000 Subject: [PATCH 23/25] Bump swagger-client from 3.17.0 to 3.18.0 (#854) Bumps [swagger-client](https://github.com/swagger-api/swagger-js) from 3.17.0 to 3.18.0. - [Release notes](https://github.com/swagger-api/swagger-js/releases) - [Changelog](https://github.com/swagger-api/swagger-js/blob/master/.releaserc) - [Commits](https://github.com/swagger-api/swagger-js/compare/v3.17.0...v3.18.0) --- updated-dependencies: - dependency-name: swagger-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 52 ++++++++++++++++++++++++++--------------------- package.json | 2 +- 2 files changed, 30 insertions(+), 24 deletions(-) diff --git a/package-lock.json b/package-lock.json index 38c2dc8d..05ff3911 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11433,14 +11433,14 @@ } }, "form-data-encoder": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.6.0.tgz", - "integrity": "sha512-P97AVaOB8hZaniiKK3f46zxQcchQXI8EgBnX+2+719gLv5ZbDSf3J1XtIuAQ8xbGLU4vZYhy7xwhFtK8U5u9Nw==" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.1.tgz", + "integrity": "sha512-EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg==" }, "formdata-node": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.3.0.tgz", - "integrity": "sha512-TwqhWUZd2jB5l0kUhhcy1XYNsXq46NH6k60zmiu7xsxMztul+cCMuPSAQrSDV62zznhBKJdA9O+zeWj5i5Pbfg==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.3.2.tgz", + "integrity": "sha512-k7lYJyzDOSL6h917favP8j1L0/wNyylzU+x+1w4p5haGVHNlP58dbpdJhiCUsDbWsa9HwEtLp89obQgXl2e0qg==", "requires": { "node-domexception": "1.0.0", "web-streams-polyfill": "4.0.0-beta.1" @@ -24025,9 +24025,9 @@ } }, "swagger-client": { - "version": "3.17.0", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.17.0.tgz", - "integrity": "sha512-d8DOEME49wTXm+uT+lBAjJ5D6IDjEHdbkqa7MbcslR2c+oHIhi13ObwleVWGfr89MPkWgBl6RBq9VUHmrBJRbg==", + "version": "3.18.0", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.18.0.tgz", + "integrity": "sha512-lNfwTXHim0QiCNuZ4BKgWle7N7+9WlFLtcP02n0xSchFtdzsKJb2kWsOlwplRU3appVFjnHRy+1eVabRc3ZhbA==", "requires": { "@babel/runtime-corejs3": "^7.11.2", "btoa": "^1.2.1", @@ -24037,19 +24037,20 @@ "fast-json-patch": "^3.0.0-1", "form-data-encoder": "^1.4.3", "formdata-node": "^4.0.0", + "is-plain-object": "^5.0.0", "js-yaml": "^4.1.0", "lodash": "^4.17.21", - "qs": "^6.9.4", + "qs": "^6.10.2", "traverse": "~0.6.6", "url": "~0.11.0" }, "dependencies": { "@babel/runtime-corejs3": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.15.4.tgz", - "integrity": "sha512-lWcAqKeB624/twtTc3w6w/2o9RqJPaNBhPGK6DKLSiwuVWC7WFkypWyNg+CpZoyJH0jVzv1uMtXZ/5/lQOLtCg==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.16.7.tgz", + "integrity": "sha512-MiYR1yk8+TW/CpOD0CyX7ve9ffWTKqLk/L6pk8TPl0R8pNi+1pFY8fH9yET55KlvukQ4PAWfXsGr2YHVjcI4Pw==", "requires": { - "core-js-pure": "^3.16.0", + "core-js-pure": "^3.19.0", "regenerator-runtime": "^0.13.4" } }, @@ -24064,9 +24065,9 @@ "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==" }, "core-js-pure": { - "version": "3.18.3", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.18.3.tgz", - "integrity": "sha512-qfskyO/KjtbYn09bn1IPkuhHl5PlJ6IzJ9s9sraJ1EqcuGyLGKzhSM1cY0zgyL9hx42eulQLZ6WaeK5ycJCkqw==" + "version": "3.20.2", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.20.2.tgz", + "integrity": "sha512-CmWHvSKn2vNL6p6StNp1EmMIfVY/pqn3JLAjfZQ8WZGPOlGoO92EkX9/Mk81i6GxvoPXjUqEQnpM3rJ5QxxIOg==" }, "cross-fetch": { "version": "3.1.4", @@ -24076,6 +24077,11 @@ "node-fetch": "2.6.1" } }, + "is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==" + }, "js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -24085,14 +24091,14 @@ } }, "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==" + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==" }, "qs": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.1.tgz", - "integrity": "sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg==", + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.2.tgz", + "integrity": "sha512-mSIdjzqznWgfd4pMii7sHtaYF8rx8861hBO80SraY5GT0XQibWZWJSid0avzHGkDIZLImux2S5mXO0Hfct2QCw==", "requires": { "side-channel": "^1.0.4" } diff --git a/package.json b/package.json index 11aaed27..704b74a8 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,7 @@ "shortid": "^2.2.16", "slugify": "^1.6.5", "smoothscroll-polyfill": "^0.4.4", - "swagger-client": "^3.17.0" + "swagger-client": "^3.18.0" }, "devDependencies": { "@svgr/webpack": "^5.5.0", From 198a431c61cb54e18b3d92530c7309716777b45d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 14:18:17 +0000 Subject: [PATCH 24/25] Bump git-format-staged from 2.1.2 to 2.1.3 (#856) Bumps [git-format-staged](https://github.com/hallettj/git-format-staged) from 2.1.2 to 2.1.3. - [Release notes](https://github.com/hallettj/git-format-staged/releases) - [Commits](https://github.com/hallettj/git-format-staged/compare/v2.1.2...v2.1.3) --- updated-dependencies: - dependency-name: git-format-staged dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 05ff3911..129a149c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14617,9 +14617,9 @@ } }, "git-format-staged": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/git-format-staged/-/git-format-staged-2.1.2.tgz", - "integrity": "sha512-ieP6iEyMJQ9xPKJGFSmK4HELcDdYwUO84dG4NBKdjaSTOdsZgrW9paLaEau2D4daPQjLwSsgwdqtYjqoVxz3Lw==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/git-format-staged/-/git-format-staged-2.1.3.tgz", + "integrity": "sha512-M9q3W4CCQShYPHUiINhYUtHPJ3E1/aa3Ajbk8q2OAaCgqEmqZ6gBI6P1fnwD54/Fs9SA2MaOvDxpYRNa1OVGIA==", "dev": true }, "git-up": { diff --git a/package.json b/package.json index 704b74a8..d7142ed6 100644 --- a/package.json +++ b/package.json @@ -71,7 +71,7 @@ "eslint-config-oceanprotocol": "^1.5.0", "eslint-config-prettier": "^8.3.0", "eslint-plugin-prettier": "^4.0.0", - "git-format-staged": "^2.1.2", + "git-format-staged": "^2.1.3", "husky": "^7.0.4", "markdownlint-cli": "^0.30.0", "node-sass": "^5.0.0", From 97316134f87a3a14441adbf1e5907561e78c0c46 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 14:19:11 +0000 Subject: [PATCH 25/25] Bump rehype-react from 7.0.3 to 7.0.4 (#857) Bumps [rehype-react](https://github.com/rehypejs/rehype-react) from 7.0.3 to 7.0.4. - [Release notes](https://github.com/rehypejs/rehype-react/releases) - [Commits](https://github.com/rehypejs/rehype-react/compare/7.0.3...7.0.4) --- updated-dependencies: - dependency-name: rehype-react dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 42 +++++++++++++++++++++--------------------- package.json | 2 +- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/package-lock.json b/package-lock.json index 129a149c..9a7cbc23 100644 --- a/package-lock.json +++ b/package-lock.json @@ -21681,9 +21681,9 @@ } }, "rehype-react": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/rehype-react/-/rehype-react-7.0.3.tgz", - "integrity": "sha512-nrn2fAYAPv/XD3mFe9Z2cfra1UY0a9TutNYdb5dAHsfz4HAzSVxf1LbyGins/1UtvKBzvNS/0FQJknjp/d+iEg==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/rehype-react/-/rehype-react-7.0.4.tgz", + "integrity": "sha512-mC3gT/EVmxB8mgwz6XkupjF/UAhA2NOai/bYvTQYC+AW0jvomXB+LGpC4UcX3vsY327nM29BttEDG4lLrtqu/g==", "requires": { "@mapbox/hast-util-table-cell-style": "^0.2.0", "@types/hast": "^2.0.0", @@ -21702,9 +21702,9 @@ } }, "bail": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.1.tgz", - "integrity": "sha512-d5FoTAr2S5DSUPKl85WNm2yUwsINN8eidIdIwsOge2t33DaOfOdSmmsI11jMN3GmALCXaw+Y6HMVHDzePshFAA==" + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==" }, "comma-separated-tokens": { "version": "2.0.2", @@ -21741,9 +21741,9 @@ "integrity": "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==" }, "property-information": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.0.1.tgz", - "integrity": "sha512-F4WUUAF7fMeF4/JUFHNBWDaKDXi2jbvqBW/y6o5wsf3j19wTZ7S60TmtB5HoBhtgw7NKQRMWuz5vk2PR0CygUg==" + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.1.1.tgz", + "integrity": "sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==" }, "space-separated-tokens": { "version": "2.0.1", @@ -21756,9 +21756,9 @@ "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==" }, "unified": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.0.tgz", - "integrity": "sha512-4U3ru/BRXYYhKbwXV6lU6bufLikoAavTwev89H5UxY8enDFaAT2VXmIXYNm6hb5oHPng/EXr77PVyDFcptbk5g==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.1.tgz", + "integrity": "sha512-v4ky1+6BN9X3pQrOdkFIPWAaeDsHPE1svRDxq7YpTc2plkIqFMwukfqM+l0ewpP9EfwARlt9pPFAeWYhHm8X9w==", "requires": { "@types/unist": "^2.0.0", "bail": "^2.0.0", @@ -21783,9 +21783,9 @@ } }, "vfile": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.2.0.tgz", - "integrity": "sha512-ftCpb6pU8Jrzcqku8zE6N3Gi4/RkDhRwEXSWudzZzA2eEOn/cBpsfk9aulCUR+j1raRSAykYQap9u6j6rhUaCA==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.2.1.tgz", + "integrity": "sha512-vXW5XKbELM6mLj88kmkJ+gjFGZ/2gTmpdqPDjs3y+qbvI5i7md7rba/+pbYEawa7t22W7ynywPV6lUUAS1WiYg==", "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -21794,18 +21794,18 @@ } }, "vfile-message": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.0.2.tgz", - "integrity": "sha512-UUjZYIOg9lDRwwiBAuezLIsu9KlXntdxwG+nXnjuQAHvBpcX3x0eN8h+I7TkY5nkCXj+cWVp4ZqebtGBvok8ww==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.0.tgz", + "integrity": "sha512-4QJbBk+DkPEhBXq3f260xSaWtjE4gPKOfulzfMFF8ZNwaPZieWsg3iVlcmF04+eebzpcpeXOOFMfrYzJHVYg+g==", "requires": { "@types/unist": "^2.0.0", "unist-util-stringify-position": "^3.0.0" } }, "web-namespaces": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.0.tgz", - "integrity": "sha512-dE7ELZRVWh0ceQsRgkjLgsAvwTuv3kcjSY/hLjqL0llleUlQBDjE9JkB9FCBY5F2mnFEwiyJoowl8+NVGHe8dw==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==" } } }, diff --git a/package.json b/package.json index d7142ed6..e9729ccb 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,7 @@ "react-helmet": "^6.1.0", "react-json-view": "^1.21.3", "react-scrollspy": "^3.4.3", - "rehype-react": "^7.0.3", + "rehype-react": "^7.0.4", "remark": "^13.0.0", "remark-github-plugin": "^1.4.0", "remark-react": "^8.0.0",