1
0
mirror of https://github.com/oceanprotocol/ocean.js.git synced 2024-11-26 20:39:05 +01:00

update to new schema & compute fixes (#1510)

* update to new schema

* fix codeExamples

* fix computeflows
This commit is contained in:
Alex Coseru 2022-06-15 19:03:56 +03:00 committed by GitHub
parent 04735dd824
commit be06f05365
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 591 additions and 89 deletions

View File

@ -52,7 +52,6 @@ jobs:
with: with:
repository: 'oceanprotocol/barge' repository: 'oceanprotocol/barge'
path: 'barge' path: 'barge'
ref: v4
- name: Run Ganache with Barge - name: Run Ganache with Barge
working-directory: ${{ github.workspace }}/barge working-directory: ${{ github.workspace }}/barge
@ -101,7 +100,6 @@ jobs:
with: with:
repository: 'oceanprotocol/barge' repository: 'oceanprotocol/barge'
path: 'barge' path: 'barge'
ref: v4
- name: Login to Docker Hub - name: Login to Docker Hub
if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }} if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }}
@ -115,6 +113,7 @@ jobs:
working-directory: ${{ github.workspace }}/barge working-directory: ${{ github.workspace }}/barge
run: | run: |
bash -x start_ocean.sh --with-provider2 --no-dashboard --with-c2d 2>&1 > start_ocean.log & bash -x start_ocean.sh --with-provider2 --no-dashboard --with-c2d 2>&1 > start_ocean.log &
- run: npm ci - run: npm ci
- run: npm run build:metadata - run: npm run build:metadata

View File

@ -98,6 +98,7 @@ import {
DispenserCreationParams, DispenserCreationParams,
downloadFile, downloadFile,
Erc20CreateParams, Erc20CreateParams,
Files,
FixedRateExchange, FixedRateExchange,
FreCreationParams, FreCreationParams,
getHash, getHash,
@ -153,13 +154,17 @@ We also define some constants that we will use:
We will need a file to publish, so here we define the file that we intend to publish. We will need a file to publish, so here we define the file that we intend to publish.
```Typescript ```Typescript
const ASSET_URL = [ const ASSET_URL: Files = {
{ datatokenAddress: '0x0',
type: 'url', nftAddress: '0x0',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt', files: [
method: 'GET' {
} type: 'url',
] url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
}
``` ```
Next, we define the metadata that will describe our data asset. This is what we call the DDO Next, we define the metadata that will describe our data asset. This is what we call the DDO
@ -167,7 +172,7 @@ Next, we define the metadata that will describe our data asset. This is what we
const DDO = { const DDO = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: '', id: '',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -324,6 +329,8 @@ Now we update the ddo and set the right did
``` ```
Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task
```Typescript ```Typescript
ASSET_URL.datatokenAddress = poolDatatokenAddress
ASSET_URL.nftAddress = poolNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = poolDatatokenAddress DDO.services[0].datatokenAddress = poolDatatokenAddress
@ -587,6 +594,8 @@ Now we are going to update the ddo and set the did
``` ```
Next, let's encrypt the file(s) using provider Next, let's encrypt the file(s) using provider
```Typescript ```Typescript
ASSET_URL.datatokenAddress = freDatatokenAddress
ASSET_URL.nftAddress = freNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = freDatatokenAddress DDO.services[0].datatokenAddress = freDatatokenAddress
@ -801,6 +810,8 @@ Lets start by updating the ddo and setting the did
``` ```
Now we need to encrypt file(s) using provider Now we need to encrypt file(s) using provider
```Typescript ```Typescript
ASSET_URL.datatokenAddress = dispenserDatatokenAddress
ASSET_URL.nftAddress = dispenserNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = dispenserDatatokenAddress DDO.services[0].datatokenAddress = dispenserDatatokenAddress

27
src/@types/File.ts Normal file
View File

@ -0,0 +1,27 @@
export interface UrlFile {
type: 'url'
/**
* File index.
* @type {number}
*/
index?: number
/**
* File URL.
* @type {string}
*/
url: string
/**
* HTTP method used
* @type {string}
*/
method: string
}
export interface Files {
nftAddress: string
datatokenAddress: string
files: UrlFile[]
}

View File

@ -1,4 +1,4 @@
export interface FileMetadata { export interface FileInfo {
/** /**
* File URL. * File URL.
* @type {string} * @type {string}

View File

@ -3,7 +3,8 @@ export * from './Asset'
export * from './DDO/Service' export * from './DDO/Service'
export * from './DDO/Credentials' export * from './DDO/Credentials'
export * from './DDO/Metadata' export * from './DDO/Metadata'
export * from './FileMetadata' export * from './File'
export * from './FileInfo'
export * from './Compute' export * from './Compute'
export * from './Provider' export * from './Provider'
export * from './FixedPrice' export * from './FixedPrice'

View File

@ -1,7 +1,7 @@
import Web3 from 'web3' import Web3 from 'web3'
import { LoggerInstance, getData } from '../utils' import { LoggerInstance, getData } from '../utils'
import { import {
FileMetadata, FileInfo,
ComputeJob, ComputeJob,
ComputeOutput, ComputeOutput,
ComputeAlgorithm, ComputeAlgorithm,
@ -164,21 +164,21 @@ export class Provider {
* @param {number} serviceId the id of the service for which to check the files * @param {number} serviceId the id of the service for which to check the files
* @param {string} providerUri uri of the provider that will be used to check the file * @param {string} providerUri uri of the provider that will be used to check the file
* @param {AbortSignal} signal abort signal * @param {AbortSignal} signal abort signal
* @return {Promise<FileMetadata[]>} urlDetails * @return {Promise<FileInfo[]>} urlDetails
*/ */
public async checkDidFiles( public async checkDidFiles(
did: string, did: string,
serviceId: number, serviceId: number,
providerUri: string, providerUri: string,
signal?: AbortSignal signal?: AbortSignal
): Promise<FileMetadata[]> { ): Promise<FileInfo[]> {
const providerEndpoints = await this.getEndpoints(providerUri) const providerEndpoints = await this.getEndpoints(providerUri)
const serviceEndpoints = await this.getServiceEndpoints( const serviceEndpoints = await this.getServiceEndpoints(
providerUri, providerUri,
providerEndpoints providerEndpoints
) )
const args = { did: did, serviceId: serviceId } const args = { did: did, serviceId: serviceId }
const files: FileMetadata[] = [] const files: FileInfo[] = []
const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') const path = this.getEndpointURL(serviceEndpoints, 'fileinfo')
? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath
: null : null
@ -192,7 +192,7 @@ export class Provider {
}, },
signal: signal signal: signal
}) })
const results: FileMetadata[] = await response.json() const results: FileInfo[] = await response.json()
for (const result of results) { for (const result of results) {
files.push(result) files.push(result)
} }
@ -206,20 +206,20 @@ export class Provider {
* @param {string} url or did * @param {string} url or did
* @param {string} providerUri uri of the provider that will be used to check the file * @param {string} providerUri uri of the provider that will be used to check the file
* @param {AbortSignal} signal abort signal * @param {AbortSignal} signal abort signal
* @return {Promise<FileMetadata[]>} urlDetails * @return {Promise<FileInfo[]>} urlDetails
*/ */
public async checkFileUrl( public async checkFileUrl(
url: string, url: string,
providerUri: string, providerUri: string,
signal?: AbortSignal signal?: AbortSignal
): Promise<FileMetadata[]> { ): Promise<FileInfo[]> {
const providerEndpoints = await this.getEndpoints(providerUri) const providerEndpoints = await this.getEndpoints(providerUri)
const serviceEndpoints = await this.getServiceEndpoints( const serviceEndpoints = await this.getServiceEndpoints(
providerUri, providerUri,
providerEndpoints providerEndpoints
) )
const args = { url: url, type: 'url' } const args = { url: url, type: 'url' }
const files: FileMetadata[] = [] const files: FileInfo[] = []
const path = this.getEndpointURL(serviceEndpoints, 'fileinfo') const path = this.getEndpointURL(serviceEndpoints, 'fileinfo')
? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath ? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath
: null : null
@ -233,7 +233,7 @@ export class Provider {
}, },
signal: signal signal: signal
}) })
const results: FileMetadata[] = await response.json() const results: FileInfo[] = await response.json()
for (const result of results) { for (const result of results) {
files.push(result) files.push(result)
} }

View File

@ -10,6 +10,7 @@ import { minAbi } from './minAbi'
import LoggerInstance from './Logger' import LoggerInstance from './Logger'
import { TransactionReceipt } from 'web3-core' import { TransactionReceipt } from 'web3-core'
import Web3 from 'web3' import Web3 from 'web3'
import BigNumber from 'bignumber.js'
/** /**
* Estimate gas cost for approval function * Estimate gas cost for approval function
@ -82,6 +83,50 @@ export async function approve(
return result return result
} }
/**
* Approve spender to spent amount tokens
* @param {String} account
* @param {String} tokenAddress
* @param {String} spender
* @param {String} amount amount of ERC20 tokens (always expressed as wei)
* @param {boolean} force if true, will overwrite any previous allowence. Else, will check if allowence is enough and will not send a transaction if it's not needed
*/
export async function approveWei(
web3: Web3,
account: string,
tokenAddress: string,
spender: string,
amount: string,
force = false
): Promise<TransactionReceipt | string> {
const tokenContract = new web3.eth.Contract(minAbi, tokenAddress)
if (!force) {
const currentAllowence = await allowanceWei(web3, tokenAddress, account, spender)
if (new BigNumber(currentAllowence).gt(new BigNumber(amount))) {
return currentAllowence
}
}
let result = null
const estGas = await estimateGas(
account,
tokenContract.methods.approve,
spender,
amount
)
try {
result = await tokenContract.methods.approve(spender, amount).send({
from: account,
gas: estGas + 1,
gasPrice: await getFairGasPrice(web3, null)
})
} catch (e) {
LoggerInstance.error(
`ERROR: Failed to approve spender to spend tokens : ${e.message}`
)
}
return result
}
/** /**
* Estimate gas cost for transfer function * Estimate gas cost for transfer function
* @param {String} account * @param {String} account
@ -164,6 +209,24 @@ export async function allowance(
return await unitsToAmount(web3, tokenAddress, trxReceipt, tokenDecimals) return await unitsToAmount(web3, tokenAddress, trxReceipt, tokenDecimals)
} }
/**
* Get Allowance for any erc20
* @param {Web3} web3
* @param {String } tokenAdress
* @param {String} account
* @param {String} spender
*/
export async function allowanceWei(
web3: Web3,
tokenAddress: string,
account: string,
spender: string,
tokenDecimals?: number
): Promise<string> {
const tokenContract = new web3.eth.Contract(minAbi, tokenAddress)
return await tokenContract.methods.allowance(account, spender).call()
}
/** /**
* Get balance for any erc20 * Get balance for any erc20
* @param {Web3} web3 * @param {Web3} web3

View File

@ -98,6 +98,7 @@ import {
DispenserCreationParams, DispenserCreationParams,
downloadFile, downloadFile,
Erc20CreateParams, Erc20CreateParams,
Files,
FixedRateExchange, FixedRateExchange,
FreCreationParams, FreCreationParams,
getHash, getHash,
@ -153,13 +154,17 @@ describe('Marketplace flow tests', async () => {
/// We will need a file to publish, so here we define the file that we intend to publish. /// We will need a file to publish, so here we define the file that we intend to publish.
/// ```Typescript /// ```Typescript
const ASSET_URL = [ const ASSET_URL: Files = {
{ datatokenAddress: '0x0',
type: 'url', nftAddress: '0x0',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt', files: [
method: 'GET' {
} type: 'url',
] url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
}
/// ``` /// ```
/// Next, we define the metadata that will describe our data asset. This is what we call the DDO /// Next, we define the metadata that will describe our data asset. This is what we call the DDO
@ -167,7 +172,7 @@ describe('Marketplace flow tests', async () => {
const DDO = { const DDO = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: '', id: '',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -324,6 +329,8 @@ describe('Marketplace flow tests', async () => {
/// ``` /// ```
/// Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task /// Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task
/// ```Typescript /// ```Typescript
ASSET_URL.datatokenAddress = poolDatatokenAddress
ASSET_URL.nftAddress = poolNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = poolDatatokenAddress DDO.services[0].datatokenAddress = poolDatatokenAddress
@ -587,6 +594,8 @@ describe('Marketplace flow tests', async () => {
/// ``` /// ```
/// Next, let's encrypt the file(s) using provider /// Next, let's encrypt the file(s) using provider
/// ```Typescript /// ```Typescript
ASSET_URL.datatokenAddress = freDatatokenAddress
ASSET_URL.nftAddress = freNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = freDatatokenAddress DDO.services[0].datatokenAddress = freDatatokenAddress
@ -801,6 +810,8 @@ describe('Marketplace flow tests', async () => {
/// ``` /// ```
/// Now we need to encrypt file(s) using provider /// Now we need to encrypt file(s) using provider
/// ```Typescript /// ```Typescript
ASSET_URL.datatokenAddress = dispenserDatatokenAddress
ASSET_URL.nftAddress = dispenserNftAddress
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
DDO.services[0].files = await encryptedFiles DDO.services[0].files = await encryptedFiles
DDO.services[0].datatokenAddress = dispenserDatatokenAddress DDO.services[0].datatokenAddress = dispenserDatatokenAddress

View File

@ -10,7 +10,8 @@ import {
Datatoken, Datatoken,
Nft, Nft,
sleep, sleep,
ZERO_ADDRESS ZERO_ADDRESS,
approveWei
} from '../../src' } from '../../src'
import { import {
Erc20CreateParams, Erc20CreateParams,
@ -18,7 +19,8 @@ import {
ComputeAsset, ComputeAsset,
ComputeAlgorithm, ComputeAlgorithm,
ProviderComputeInitialize, ProviderComputeInitialize,
ConsumeMarketFee ConsumeMarketFee,
Files
} from '../../src/@types' } from '../../src/@types'
let config: Config let config: Config
@ -42,17 +44,32 @@ let resolvedDdoWithNoTimeout
let resolvedAlgoDdoWith1mTimeout let resolvedAlgoDdoWith1mTimeout
let resolvedAlgoDdoWithNoTimeout let resolvedAlgoDdoWithNoTimeout
const assetUrl = [ let freeEnvDatasetTxId
{ let freeEnvAlgoTxId
type: 'url', let paidEnvDatasetTxId
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt', let paidEnvAlgoTxId
method: 'GET'
} // let's have 2 minutes of compute access
] const mytime = new Date()
const computeMinutes = 1
mytime.setMinutes(mytime.getMinutes() + computeMinutes)
let computeValidUntil = Math.floor(mytime.getTime() / 1000)
const assetUrl: Files = {
datatokenAddress: '0x0',
nftAddress: '0x0',
files: [
{
type: 'url',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
}
const ddoWithNoTimeout = { const ddoWithNoTimeout = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c', id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -89,7 +106,7 @@ const ddoWithNoTimeout = {
const ddoWith1mTimeout = { const ddoWith1mTimeout = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c', id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -122,17 +139,21 @@ const ddoWith1mTimeout = {
} }
] ]
} }
const algoAssetUrl = [ const algoAssetUrl: Files = {
{ datatokenAddress: '0x0',
type: 'url', nftAddress: '0x0',
url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js', files: [
method: 'GET' {
} type: 'url',
] url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js',
method: 'GET'
}
]
}
const algoDdoWithNoTimeout = { const algoDdoWithNoTimeout = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c', id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -173,7 +194,7 @@ const algoDdoWithNoTimeout = {
const algoDdoWith1mTimeout = { const algoDdoWith1mTimeout = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c', id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -246,13 +267,15 @@ async function createAsset(
const erc721AddressAsset = result.events.NFTCreated.returnValues[0] const erc721AddressAsset = result.events.NFTCreated.returnValues[0]
const datatokenAddressAsset = result.events.TokenCreated.returnValues[0] const datatokenAddressAsset = result.events.TokenCreated.returnValues[0]
ddo.nftAddress = web3.utils.toChecksumAddress(erc721AddressAsset)
// create the files encrypted string // create the files encrypted string
assetUrl.datatokenAddress = datatokenAddressAsset
assetUrl.nftAddress = ddo.nftAddress
let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl) let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl)
ddo.services[0].files = await providerResponse ddo.services[0].files = await providerResponse
ddo.services[0].datatokenAddress = datatokenAddressAsset ddo.services[0].datatokenAddress = datatokenAddressAsset
ddo.services[0].serviceEndpoint = providerUrl ddo.services[0].serviceEndpoint = providerUrl
// update ddo and set the right did // update ddo and set the right did
ddo.nftAddress = web3.utils.toChecksumAddress(erc721AddressAsset)
ddo.id = ddo.id =
'did:op:' + 'did:op:' +
SHA256(web3.utils.toChecksumAddress(erc721AddressAsset) + chain.toString(10)) SHA256(web3.utils.toChecksumAddress(erc721AddressAsset) + chain.toString(10))
@ -287,11 +310,12 @@ async function handleOrder(
- no validOrder -> we need to call startOrder, to pay 1 DT & providerFees - no validOrder -> we need to call startOrder, to pay 1 DT & providerFees
*/ */
if (order.providerFee && order.providerFee.providerFeeAmount) { if (order.providerFee && order.providerFee.providerFeeAmount) {
await datatoken.approve( await approveWei(
web3,
payerAccount,
order.providerFee.providerFeeToken, order.providerFee.providerFeeToken,
datatokenAddress, datatokenAddress,
order.providerFee.providerFeeAmount, order.providerFee.providerFeeAmount
payerAccount
) )
} }
if (order.validOrder) { if (order.validOrder) {
@ -408,14 +432,11 @@ describe('Simple compute tests', async () => {
assert(computeEnvs, 'No Compute environments found') assert(computeEnvs, 'No Compute environments found')
}) })
it('should start a computeJob', async () => { it('should start a computeJob using the free environment', async () => {
// we choose the first env // we choose the free env
const computeEnv = computeEnvs[0].id const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
const computeConsumerAddress = computeEnvs[0].consumerAddress assert(computeEnv, 'Cannot find the free compute env')
// let's have 10 minutesof compute access
const mytime = new Date()
mytime.setMinutes(mytime.getMinutes() + 19)
const computeValidUntil = Math.floor(mytime.getTime() / 1000)
const assets: ComputeAsset[] = [ const assets: ComputeAsset[] = [
{ {
documentId: resolvedDdoWith1mTimeout.id, documentId: resolvedDdoWith1mTimeout.id,
@ -431,7 +452,7 @@ describe('Simple compute tests', async () => {
providerInitializeComputeResults = await ProviderInstance.initializeCompute( providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets, assets,
algo, algo,
computeEnv, computeEnv.id,
computeValidUntil, computeValidUntil,
providerUrl, providerUrl,
consumerAccount consumerAccount
@ -444,7 +465,7 @@ describe('Simple compute tests', async () => {
providerInitializeComputeResults.algorithm, providerInitializeComputeResults.algorithm,
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress, resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
consumerAccount, consumerAccount,
computeConsumerAddress, computeEnv.consumerAddress,
0 0
) )
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) { for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
@ -452,7 +473,7 @@ describe('Simple compute tests', async () => {
providerInitializeComputeResults.datasets[i], providerInitializeComputeResults.datasets[i],
dtAddressArray[i], dtAddressArray[i],
consumerAccount, consumerAccount,
computeConsumerAddress, computeEnv.consumerAddress,
0 0
) )
} }
@ -460,10 +481,365 @@ describe('Simple compute tests', async () => {
providerUrl, providerUrl,
web3, web3,
consumerAccount, consumerAccount,
computeEnv, computeEnv.id,
assets[0], assets[0],
algo algo
) )
freeEnvDatasetTxId = assets[0].transferTxId
freeEnvAlgoTxId = algo.transferTxId
assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId
})
it('should restart a computeJob without paying anything, because order is valid and providerFees are still valid', async () => {
// we choose the free env
const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
assert(computeEnv, 'Cannot find the free compute env')
const assets: ComputeAsset[] = [
{
documentId: resolvedDdoWith1mTimeout.id,
serviceId: resolvedDdoWith1mTimeout.services[0].id,
transferTxId: freeEnvDatasetTxId
}
]
const algo: ComputeAlgorithm = {
documentId: resolvedAlgoDdoWith1mTimeout.id,
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
transferTxId: freeEnvAlgoTxId
}
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets,
algo,
computeEnv.id,
computeValidUntil,
providerUrl,
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
'We should have a valid order for algorithm'
)
assert(
!providerInitializeComputeResults.algorithm.providerFee,
'We should not pay providerFees again for algorithm'
)
assert(
providerInitializeComputeResults.datasets[0].validOrder,
'We should have a valid order for dataset'
)
assert(
!providerInitializeComputeResults.datasets[0].providerFee,
'We should not pay providerFees again for dataset'
)
algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder
assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder
assert(
algo.transferTxId === freeEnvAlgoTxId &&
assets[0].transferTxId === freeEnvDatasetTxId,
'We should use the same orders, because no fess must be paid'
)
const computeJobs = await ProviderInstance.computeStart(
providerUrl,
web3,
consumerAccount,
computeEnv.id,
assets[0],
algo
)
assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId
})
// moving to paid environments
it('should start a computeJob on a paid environment', async () => {
// we choose the paid env
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
assert(computeEnv, 'Cannot find the paid compute env')
const assets: ComputeAsset[] = [
{
documentId: resolvedDdoWith1mTimeout.id,
serviceId: resolvedDdoWith1mTimeout.services[0].id
}
]
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
const algo: ComputeAlgorithm = {
documentId: resolvedAlgoDdoWith1mTimeout.id,
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id
}
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets,
algo,
computeEnv.id,
computeValidUntil,
providerUrl,
consumerAccount
)
assert(
!('error' in providerInitializeComputeResults.algorithm),
'Cannot order algorithm'
)
algo.transferTxId = await handleOrder(
providerInitializeComputeResults.algorithm,
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
consumerAccount,
computeEnv.consumerAddress,
0
)
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
assets[i].transferTxId = await handleOrder(
providerInitializeComputeResults.datasets[i],
dtAddressArray[i],
consumerAccount,
computeEnv.consumerAddress,
0
)
}
const computeJobs = await ProviderInstance.computeStart(
providerUrl,
web3,
consumerAccount,
computeEnv.id,
assets[0],
algo
)
paidEnvDatasetTxId = assets[0].transferTxId
paidEnvAlgoTxId = algo.transferTxId
assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId
})
it('should restart a computeJob on paid environment, without paying anything, because order is valid and providerFees are still valid', async () => {
// we choose the paid env
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
assert(computeEnv, 'Cannot find the free compute env')
const assets: ComputeAsset[] = [
{
documentId: resolvedDdoWith1mTimeout.id,
serviceId: resolvedDdoWith1mTimeout.services[0].id,
transferTxId: paidEnvDatasetTxId
}
]
const algo: ComputeAlgorithm = {
documentId: resolvedAlgoDdoWith1mTimeout.id,
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
transferTxId: paidEnvAlgoTxId
}
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets,
algo,
computeEnv.id,
computeValidUntil,
providerUrl,
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
'We should have a valid order for algorithm'
)
assert(
!providerInitializeComputeResults.algorithm.providerFee,
'We should not pay providerFees again for algorithm'
)
assert(
providerInitializeComputeResults.datasets[0].validOrder,
'We should have a valid order for dataset'
)
assert(
!providerInitializeComputeResults.datasets[0].providerFee,
'We should not pay providerFees again for dataset'
)
algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder
assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder
assert(
algo.transferTxId === paidEnvAlgoTxId &&
assets[0].transferTxId === paidEnvDatasetTxId,
'We should use the same orders, because no fess must be paid'
)
const computeJobs = await ProviderInstance.computeStart(
providerUrl,
web3,
consumerAccount,
computeEnv.id,
assets[0],
algo
)
assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId
})
// move to reuse Orders
it('Should fast forward time and set a new computeValidUntil', async () => {
const mytime = new Date()
const computeMinutes = 5
mytime.setMinutes(mytime.getMinutes() + computeMinutes)
computeValidUntil = Math.floor(mytime.getTime() / 1000)
})
it('should start a computeJob using the free environment, by paying only providerFee (reuseOrder)', async () => {
// we choose the free env
const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
assert(computeEnv, 'Cannot find the free compute env')
const assets: ComputeAsset[] = [
{
documentId: resolvedDdoWith1mTimeout.id,
serviceId: resolvedDdoWith1mTimeout.services[0].id,
transferTxId: freeEnvDatasetTxId
}
]
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
const algo: ComputeAlgorithm = {
documentId: resolvedAlgoDdoWith1mTimeout.id,
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
transferTxId: freeEnvAlgoTxId
}
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets,
algo,
computeEnv.id,
computeValidUntil,
providerUrl,
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
'We should have a valid order for algorithm'
)
assert(
providerInitializeComputeResults.datasets[0].validOrder,
'We should have a valid order for dataset'
)
assert(
providerInitializeComputeResults.algorithm.providerFee ||
providerInitializeComputeResults.datasets[0].providerFee,
'We should pay providerFees again for algorithm or dataset. Cannot have empty for both'
)
assert(
!('error' in providerInitializeComputeResults.algorithm),
'Cannot order algorithm'
)
algo.transferTxId = await handleOrder(
providerInitializeComputeResults.algorithm,
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
consumerAccount,
computeEnv.consumerAddress,
0
)
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
assets[i].transferTxId = await handleOrder(
providerInitializeComputeResults.datasets[i],
dtAddressArray[i],
consumerAccount,
computeEnv.consumerAddress,
0
)
}
assert(
algo.transferTxId !== freeEnvAlgoTxId ||
assets[0].transferTxId !== freeEnvDatasetTxId,
'We should not use the same orders, because providerFee must be paid'
)
const computeJobs = await ProviderInstance.computeStart(
providerUrl,
web3,
consumerAccount,
computeEnv.id,
assets[0],
algo
)
freeEnvDatasetTxId = assets[0].transferTxId
freeEnvAlgoTxId = algo.transferTxId
assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId
})
it('should start a computeJob using the paid environment, by paying only providerFee (reuseOrder)', async () => {
// we choose the paid env
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
assert(computeEnv, 'Cannot find the free compute env')
const assets: ComputeAsset[] = [
{
documentId: resolvedDdoWith1mTimeout.id,
serviceId: resolvedDdoWith1mTimeout.services[0].id,
transferTxId: paidEnvDatasetTxId
}
]
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
const algo: ComputeAlgorithm = {
documentId: resolvedAlgoDdoWith1mTimeout.id,
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
transferTxId: paidEnvAlgoTxId
}
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
assets,
algo,
computeEnv.id,
computeValidUntil,
providerUrl,
consumerAccount
)
assert(
providerInitializeComputeResults.algorithm.validOrder,
'We should have a valid order for algorithm'
)
assert(
providerInitializeComputeResults.datasets[0].validOrder,
'We should have a valid order for dataset'
)
assert(
providerInitializeComputeResults.algorithm.providerFee ||
providerInitializeComputeResults.datasets[0].providerFee,
'We should pay providerFees again for algorithm or dataset. Cannot have empty for both'
)
assert(
!('error' in providerInitializeComputeResults.algorithm),
'Cannot order algorithm'
)
algo.transferTxId = await handleOrder(
providerInitializeComputeResults.algorithm,
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
consumerAccount,
computeEnv.consumerAddress,
0
)
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
assets[i].transferTxId = await handleOrder(
providerInitializeComputeResults.datasets[i],
dtAddressArray[i],
consumerAccount,
computeEnv.consumerAddress,
0
)
}
assert(
algo.transferTxId !== paidEnvAlgoTxId ||
assets[0].transferTxId !== paidEnvDatasetTxId,
'We should not use the same orders, because providerFee must be paid'
)
const computeJobs = await ProviderInstance.computeStart(
providerUrl,
web3,
consumerAccount,
computeEnv.id,
assets[0],
algo
)
freeEnvDatasetTxId = assets[0].transferTxId
freeEnvAlgoTxId = algo.transferTxId
assert(computeJobs, 'Cannot start compute job') assert(computeJobs, 'Cannot start compute job')
computeJobId = computeJobs[0].jobId computeJobId = computeJobs[0].jobId
}) })
@ -479,7 +855,7 @@ describe('Simple compute tests', async () => {
}) })
it('Get download compute results url', async () => { it('Get download compute results url', async () => {
sleep(10000) await sleep(10000)
const downloadURL = await ProviderInstance.getComputeResultUrl( const downloadURL = await ProviderInstance.getComputeResultUrl(
providerUrl, providerUrl,
web3, web3,

View File

@ -1,7 +1,7 @@
import { assert } from 'chai' import { assert } from 'chai'
import { web3, getTestConfig } from '../config' import { web3, getTestConfig } from '../config'
import { Config, Provider } from '../../src' import { Config, Provider } from '../../src'
import { FileMetadata } from '../../src/@types' import { FileInfo } from '../../src/@types'
describe('Provider tests', async () => { describe('Provider tests', async () => {
let config: Config let config: Config
@ -26,7 +26,7 @@ describe('Provider tests', async () => {
}) })
it('Alice checks fileinfo', async () => { it('Alice checks fileinfo', async () => {
const fileinfo: FileMetadata[] = await providerInstance.checkFileUrl( const fileinfo: FileInfo[] = await providerInstance.checkFileUrl(
'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract.xml.gz-rss.xml', 'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract.xml.gz-rss.xml',
config.providerUri config.providerUri
) )

View File

@ -18,7 +18,8 @@ import {
Erc20CreateParams, Erc20CreateParams,
PoolCreationParams, PoolCreationParams,
FreCreationParams, FreCreationParams,
DispenserCreationParams DispenserCreationParams,
Files
} from '../../src/@types' } from '../../src/@types'
describe('Publish tests', async () => { describe('Publish tests', async () => {
@ -30,18 +31,22 @@ describe('Publish tests', async () => {
let factory: NftFactory let factory: NftFactory
let publisherAccount: string let publisherAccount: string
const assetUrl = [ const assetUrl: Files = {
{ datatokenAddress: '0x0',
type: 'url', nftAddress: '0x0',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt', files: [
method: 'GET' {
} type: 'url',
] url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
}
const genericAsset: DDO = { const genericAsset: DDO = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: '', id: '',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -142,7 +147,8 @@ describe('Publish tests', async () => {
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0] const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0] const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
assetUrl.datatokenAddress = datatokenAddress
assetUrl.nftAddress = nftAddress
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
poolDdo.metadata.name = 'test-dataset-pool' poolDdo.metadata.name = 'test-dataset-pool'
@ -223,7 +229,8 @@ describe('Publish tests', async () => {
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0] const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0] const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
assetUrl.datatokenAddress = datatokenAddress
assetUrl.nftAddress = nftAddress
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
fixedPriceDdo.metadata.name = 'test-dataset-fixedPrice' fixedPriceDdo.metadata.name = 'test-dataset-fixedPrice'
@ -297,7 +304,8 @@ describe('Publish tests', async () => {
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0] const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0] const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
assetUrl.datatokenAddress = datatokenAddress
assetUrl.nftAddress = nftAddress
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl) const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
dispenserDdo.metadata.name = 'test-dataset-dispenser' dispenserDdo.metadata.name = 'test-dataset-dispenser'
dispenserDdo.services[0].files = await encryptedFiles dispenserDdo.services[0].files = await encryptedFiles

View File

@ -13,7 +13,7 @@ import {
downloadFile, downloadFile,
ZERO_ADDRESS ZERO_ADDRESS
} from '../../src' } from '../../src'
import { ProviderFees, Erc20CreateParams, DDO } from '../../src/@types' import { ProviderFees, Erc20CreateParams, DDO, Files } from '../../src/@types'
describe('Simple Publish & consume test', async () => { describe('Simple Publish & consume test', async () => {
let config: Config let config: Config
@ -23,18 +23,22 @@ describe('Simple Publish & consume test', async () => {
let publisherAccount: string let publisherAccount: string
let consumerAccount: string let consumerAccount: string
const assetUrl = [ const assetUrl: Files = {
{ datatokenAddress: '0x0',
type: 'url', nftAddress: '0x0',
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt', files: [
method: 'GET' {
} type: 'url',
] url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
method: 'GET'
}
]
}
const ddo: DDO = { const ddo: DDO = {
'@context': ['https://w3id.org/did/v1'], '@context': ['https://w3id.org/did/v1'],
id: '', id: '',
version: '4.0.0', version: '4.1.0',
chainId: 4, chainId: 4,
nftAddress: '0x0', nftAddress: '0x0',
metadata: { metadata: {
@ -101,6 +105,8 @@ describe('Simple Publish & consume test', async () => {
const datatokenAddress = tx.events.TokenCreated.returnValues[0] const datatokenAddress = tx.events.TokenCreated.returnValues[0]
// create the files encrypted string // create the files encrypted string
assetUrl.datatokenAddress = datatokenAddress
assetUrl.nftAddress = erc721Address
let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl) let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl)
ddo.services[0].files = await providerResponse ddo.services[0].files = await providerResponse
ddo.services[0].datatokenAddress = datatokenAddress ddo.services[0].datatokenAddress = datatokenAddress