1
0
mirror of https://github.com/oceanprotocol/ocean.js.git synced 2024-11-26 20:39:05 +01:00

Merge pull request #1870 from oceanprotocol/issue-1867-c2d-v2

add datasets on ComputeAsset, new start compute fn
This commit is contained in:
paulo@oceanprotocol 2024-11-18 09:59:08 +00:00 committed by GitHub
commit 94a6619c0f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 263 additions and 24 deletions

View File

@ -4,8 +4,18 @@ All notable changes to this project will be documented in this file. Dates are d
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
#### [v4.0.0-next.1](https://github.com/oceanprotocol/ocean.js/compare/v4.0.0-next.0...v4.0.0-next.1)
- add datasets on ComputeAsset, new start compute fn [`8563429`](https://github.com/oceanprotocol/ocean.js/commit/85634293306fca9aaa3ab5ae06a114149ccc8911)
- support for consumer signature on initialize compute [`71c5923`](https://github.com/oceanprotocol/ocean.js/commit/71c59230db99a08a6c28df66c26e32ca61c71089)
- add file object types [`cd936c2`](https://github.com/oceanprotocol/ocean.js/commit/cd936c24a989633d3d8e71b908a375802d2e2970)
#### [v4.0.0-next.0](https://github.com/oceanprotocol/ocean.js/compare/v3.4.3...v4.0.0-next.0)
> 12 November 2024
- Release 4.0.0-next.0 [`658cfe8`](https://github.com/oceanprotocol/ocean.js/commit/658cfe83ff855a83eb97e3cbf1276bed6022a1a6)
#### [v3.4.3](https://github.com/oceanprotocol/ocean.js/compare/v3.4.2...v3.4.3)
> 5 November 2024

View File

@ -627,7 +627,7 @@ Let's have 5 minute of compute access
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
```
<!--
@ -655,7 +655,7 @@ Let's have 5 minute of compute access
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)

View File

@ -485,7 +485,7 @@ Initializes the provider for a compute request.
| `computeEnv` | `string` | The compute environment. |
| `validUntil` | `number` | The job expiration date. |
| `providerUri` | `string` | The provider URI. |
| `accountId` | `string` | caller address |
| `signer` | `Signer` | caller account |
| `signal?` | `AbortSignal` | abort signal |
#### Returns

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "@oceanprotocol/lib",
"version": "4.0.0-next.0",
"version": "4.0.0-next.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@oceanprotocol/lib",
"version": "4.0.0-next.0",
"version": "4.0.0-next.1",
"license": "Apache-2.0",
"dependencies": {
"@oasisprotocol/sapphire-paratime": "^1.3.2",

View File

@ -1,7 +1,7 @@
{
"name": "@oceanprotocol/lib",
"source": "./src/index.ts",
"version": "4.0.0-next.0",
"version": "4.0.0-next.1",
"description": "JavaScript client library for Ocean Protocol",
"main": "./dist/lib.js",
"umd:main": "dist/lib.umd.js",

View File

@ -22,6 +22,7 @@ export interface ComputeEnvironment {
storageExpiry: number
maxJobDuration: number
lastSeen: number
free: boolean
}
export interface ComputeResult {
@ -59,7 +60,42 @@ export interface ComputeOutput {
whitelist?: string[]
}
export enum FileObjectType {
URL = 'url',
IPFS = 'ipfs',
ARWEAVE = 'arweave'
}
export enum EncryptMethod {
AES = 'AES',
ECIES = 'ECIES'
}
export interface HeadersObject {
[key: string]: string
}
export interface BaseFileObject {
type: string
encryptedBy?: string
encryptMethod?: EncryptMethod
}
export interface UrlFileObject extends BaseFileObject {
url: string
method: string
headers?: [HeadersObject]
}
export interface IpfsFileObject extends BaseFileObject {
hash: string
}
export interface ArweaveFileObject extends BaseFileObject {
transactionId: string
}
export interface ComputeAsset {
fileObject?: BaseFileObject // C2D v2
documentId: string
serviceId: string
transferTxId?: string
@ -67,6 +103,7 @@ export interface ComputeAsset {
}
export interface ComputeAlgorithm {
fileObject?: BaseFileObject // C2D v2
documentId?: string
serviceId?: string
meta?: MetadataAlgorithm

View File

@ -418,7 +418,7 @@ export class Provider {
* @param {AbortSignal} signal abort signal
* @return {Promise<ProviderComputeInitialize>} ProviderComputeInitialize data
*/
public async initializeCompute(
public async initializeComputeV1(
assets: ComputeAsset[],
algorithm: ComputeAlgorithm,
computeEnv: string,
@ -470,6 +470,90 @@ export class Provider {
throw new Error(JSON.stringify(resolvedResponse))
}
/** Initializes the provider for a compute request.
* @param {ComputeAsset[]} assets The datasets array to initialize compute request.
* @param {ComputeAlgorithmber} algorithm The algorithm to use.
* @param {string} computeEnv The compute environment.
* @param {number} validUntil The job expiration date.
* @param {string} providerUri The provider URI.
* @param {Signer} signer caller address
* @param {AbortSignal} signal abort signal
* @return {Promise<ProviderComputeInitialize>} ProviderComputeInitialize data
*/
public async initializeCompute(
assets: ComputeAsset[],
algorithm: ComputeAlgorithm,
computeEnv: string,
validUntil: number,
providerUri: string,
signer: Signer,
signal?: AbortSignal
): Promise<ProviderComputeInitializeResults> {
const providerEndpoints = await this.getEndpoints(providerUri)
const serviceEndpoints = await this.getServiceEndpoints(
providerUri,
providerEndpoints
)
// Diff from V1. We might need a signature to get the files object, specially if dealing with confidential evm and template 4
// otherwise it can be ignored
const consumerAddress = await signer.getAddress()
const nonce = (
(await this.getNonce(
providerUri,
consumerAddress,
signal,
providerEndpoints,
serviceEndpoints
)) + 1
).toString()
// same signed message as for start compute (consumer address + did[0] + nonce)
let signatureMessage = consumerAddress
signatureMessage += assets[0].documentId
signatureMessage += nonce
const signature = await this.signProviderRequest(signer, signatureMessage)
const providerData = {
datasets: assets,
algorithm,
compute: { env: computeEnv, validUntil },
consumerAddress,
signature
}
const initializeUrl = this.getEndpointURL(serviceEndpoints, 'initializeCompute')
? this.getEndpointURL(serviceEndpoints, 'initializeCompute').urlPath
: null
if (!initializeUrl) return null
let response
try {
response = await fetch(initializeUrl, {
method: 'POST',
body: JSON.stringify(providerData),
headers: { 'Content-Type': 'application/json' },
signal
})
} catch (e) {
LoggerInstance.error('Initialize compute failed: ')
LoggerInstance.error(e)
throw new Error('ComputeJob cannot be initialized')
}
if (response?.ok) {
const params = await response.json()
return params
}
const resolvedResponse = await response.json()
LoggerInstance.error(
'Initialize compute failed: ',
response.status,
response.statusText,
resolvedResponse
)
LoggerInstance.error('Payload was:', providerData)
throw new Error(JSON.stringify(resolvedResponse))
}
/**
* Gets the download URL.
* @param {string} did - The DID.
@ -524,7 +608,7 @@ export class Provider {
return consumeUrl
}
/** Instruct the provider to start a compute job
/** Instruct the provider to start a compute job (Old C2D V1) Kept for now, for backwards compatibility
* @param {string} providerUri The provider URI.
* @param {Signer} signer The consumer signer object.
* @param {string} computeEnv The compute environment.
@ -535,7 +619,7 @@ export class Provider {
* @param {ComputeOutput} output The compute job output settings.
* @return {Promise<ComputeJob | ComputeJob[]>} The compute job or jobs.
*/
public async computeStart(
public async computeStartV1(
providerUri: string,
consumer: Signer,
computeEnv: string,
@ -607,6 +691,103 @@ export class Provider {
return null
}
/** Instruct the provider to start a compute job (new C2D V2)
* @param {string} providerUri The provider URI.
* @param {Signer} signer The consumer signer object.
* @param {string} computeEnv The compute environment.
* @param {ComputeAsset} datasets The dataset to start compute on + additionalDatasets (the additional datasets if that is the case)
* @param {ComputeAlgorithm} algorithm The algorithm to start compute with.
* @param {AbortSignal} signal abort signal
* @param {ComputeOutput} output The compute job output settings.
* @param {boolean} freeEnvironment is it a free environment? uses different route
* @return {Promise<ComputeJob | ComputeJob[]>} The compute job or jobs.
*/
public async computeStart(
providerUri: string,
consumer: Signer,
computeEnv: string,
datasets: ComputeAsset[],
algorithm: ComputeAlgorithm,
signal?: AbortSignal,
output?: ComputeOutput,
freeEnvironment?: boolean
): Promise<ComputeJob | ComputeJob[]> {
console.log('called new compute start method...')
console.log('datasets: ', datasets)
console.log('algorithm: ', algorithm)
const providerEndpoints = await this.getEndpoints(providerUri)
const serviceEndpoints = await this.getServiceEndpoints(
providerUri,
providerEndpoints
)
let computeStartUrl = null
if (freeEnvironment) {
computeStartUrl = this.getEndpointURL(serviceEndpoints, 'freeCompute')
? this.getEndpointURL(serviceEndpoints, 'freeCompute').urlPath
: null
} else {
computeStartUrl = this.getEndpointURL(serviceEndpoints, 'computeStart')
? this.getEndpointURL(serviceEndpoints, 'computeStart').urlPath
: null
}
const consumerAddress = await consumer.getAddress()
const nonce = (
(await this.getNonce(
providerUri,
consumerAddress,
signal,
providerEndpoints,
serviceEndpoints
)) + 1
).toString()
let signatureMessage = consumerAddress
signatureMessage += datasets[0].documentId
signatureMessage += nonce
const signature = await this.signProviderRequest(consumer, signatureMessage)
const payload = Object()
payload.consumerAddress = consumerAddress
payload.signature = signature
payload.nonce = nonce
payload.environment = computeEnv
// kept for backwards compatibility (tests running against existing provider)
payload.dataset = datasets[0]
// new field for C2D v2
payload.datasets = datasets
payload.algorithm = algorithm
// if (additionalDatasets) payload.additionalDatasets = additionalDatasets
if (output) payload.output = output
if (!computeStartUrl) return null
let response
try {
response = await fetch(computeStartUrl, {
method: 'POST',
body: JSON.stringify(payload),
headers: { 'Content-Type': 'application/json' },
signal
})
} catch (e) {
LoggerInstance.error('Compute start failed:')
LoggerInstance.error(e)
LoggerInstance.error('Payload was:', payload)
throw new Error('HTTP request failed calling Provider')
}
if (response?.ok) {
const params = await response.json()
return params
}
LoggerInstance.error(
'Compute start failed: ',
response.status,
response.statusText,
await response.json()
)
LoggerInstance.error('Payload was:', payload)
return null
}
/** Instruct the provider to Stop the execution of a to stop a compute job.
* @param {string} did the asset did
* @param {string} consumerAddress The consumer address.

View File

@ -21,6 +21,7 @@ import { ProviderInstance } from '../services/Provider'
import ERC20Template from '@oceanprotocol/contracts/artifacts/contracts/interfaces/IERC20Template.sol/IERC20Template.json'
import AccessListFactory from '@oceanprotocol/contracts/artifacts/contracts/accesslists/AccessListFactory.sol/AccessListFactory.json'
import ERC20Template4 from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20Template4.sol/ERC20Template4.json'
import { FileObjectType } from '../@types'
// import * as hre from 'hardhat'
@ -202,6 +203,15 @@ export async function createAsset(
mpFeeAddress: ZERO_ADDRESS
}
if (
!assetUrl.type ||
![FileObjectType.ARWEAVE, FileObjectType.IPFS, FileObjectType.URL].includes(
assetUrl.type.toLowerCase()
)
) {
console.log('Missing or invalid files object type, defaulting to "url"')
assetUrl.type = FileObjectType.URL
}
// include fileObject in the DT constructor
if (config.sdk === 'oasis') {
datatokenParams.filesObject = assetUrl

View File

@ -627,7 +627,7 @@ describe('Compute-to-data example tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
/// ```
/// <!--
@ -655,7 +655,7 @@ describe('Compute-to-data example tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)

View File

@ -430,7 +430,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
!('error' in providerInitializeComputeResults.algorithm),
@ -461,7 +461,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
freeEnvDatasetTxId = assets[0].transferTxId
@ -501,7 +501,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
@ -530,7 +530,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
assert(computeJobs, 'Cannot start compute job')
@ -563,7 +563,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
!('error' in providerInitializeComputeResults.algorithm),
@ -594,7 +594,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
paidEnvDatasetTxId = assets[0].transferTxId
@ -641,7 +641,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
@ -670,7 +670,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
assert(computeJobs, 'Cannot start compute job')
@ -712,7 +712,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
@ -762,7 +762,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
// freeEnvDatasetTxId = assets[0].transferTxId
@ -797,7 +797,7 @@ describe('Compute flow tests', async () => {
computeEnv.id,
computeValidUntil,
providerUrl,
await consumerAccount.getAddress()
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
@ -846,7 +846,7 @@ describe('Compute flow tests', async () => {
providerUrl,
consumerAccount,
computeEnv.id,
assets[0],
assets,
algo
)
// freeEnvDatasetTxId = assets[0].transferTxId

View File

@ -133,7 +133,8 @@ export async function handleComputeOrder(
- have validOrder and providerFees -> then order is valid but providerFees are not valid, we need to call reuseOrder and pay only providerFees
- no validOrder -> we need to call startOrder, to pay 1 DT & providerFees
*/
if (order.providerFee && order.providerFee.providerFeeAmount) {
const hasProviderFees = order.providerFee && order.providerFee.providerFeeAmount
if (hasProviderFees && Number(order.providerFee.providerFeeAmount) > 0) {
await approveWei(
payerAccount,
config,