mirror of
https://github.com/oceanprotocol/ocean.js.git
synced 2024-11-26 20:39:05 +01:00
Cover all storage type support (#1685)
* adds new test file for all storage type supported (publish/ edit/ consume) * added helper for integration tests * refactor other test files to use helpers
This commit is contained in:
parent
a2e9901697
commit
b98f8a4e14
@ -29,7 +29,7 @@
|
|||||||
"mocha": "TS_NODE_PROJECT='./test/tsconfig.json' mocha --config=test/.mocharc.json --node-env=test --exit",
|
"mocha": "TS_NODE_PROJECT='./test/tsconfig.json' mocha --config=test/.mocharc.json --node-env=test --exit",
|
||||||
"test": "npm run lint && npm run test:unit:cover && npm run test:integration:cover",
|
"test": "npm run lint && npm run test:unit:cover && npm run test:integration:cover",
|
||||||
"test:unit": "npm run mocha -- 'test/unit/**/*.test.ts'",
|
"test:unit": "npm run mocha -- 'test/unit/**/*.test.ts'",
|
||||||
"test:unit:cover": "nyc --report-dir coverage/unit --exclude 'src/@types/**/*' npm run test:unit",
|
"test:unit:cover": "nyc --report-dir coverage/unit npm run test:unit",
|
||||||
"test:integration": "npm run mocha -- 'test/integration/**/*.test.ts'",
|
"test:integration": "npm run mocha -- 'test/integration/**/*.test.ts'",
|
||||||
"test:integration:cover": "nyc --report-dir coverage/integration --no-clean npm run test:integration",
|
"test:integration:cover": "nyc --report-dir coverage/integration --no-clean npm run test:integration",
|
||||||
"create:guide": "./scripts/createCodeExamples.sh test/integration/CodeExamples.test.ts",
|
"create:guide": "./scripts/createCodeExamples.sh test/integration/CodeExamples.test.ts",
|
||||||
@ -96,6 +96,10 @@
|
|||||||
"include": [
|
"include": [
|
||||||
"src/**/*.ts"
|
"src/**/*.ts"
|
||||||
],
|
],
|
||||||
|
"exclude": [
|
||||||
|
"src/@types/**/*",
|
||||||
|
"test/**/*"
|
||||||
|
],
|
||||||
"extension": [
|
"extension": [
|
||||||
".ts"
|
".ts"
|
||||||
],
|
],
|
||||||
|
@ -1,29 +1,16 @@
|
|||||||
import { assert } from 'chai'
|
import { assert } from 'chai'
|
||||||
import { SHA256 } from 'crypto-js'
|
|
||||||
import { AbiItem } from 'web3-utils'
|
import { AbiItem } from 'web3-utils'
|
||||||
import { web3, getTestConfig, getAddresses } from '../config'
|
import { web3, getTestConfig, getAddresses } from '../config'
|
||||||
import {
|
import {
|
||||||
Config,
|
Config,
|
||||||
ProviderInstance,
|
ProviderInstance,
|
||||||
Aquarius,
|
Aquarius,
|
||||||
NftFactory,
|
|
||||||
NftCreateData,
|
|
||||||
Datatoken,
|
Datatoken,
|
||||||
Nft,
|
|
||||||
ZERO_ADDRESS,
|
|
||||||
approveWei,
|
|
||||||
calculateEstimatedGas,
|
calculateEstimatedGas,
|
||||||
sendTx
|
sendTx
|
||||||
} from '../../src'
|
} from '../../src'
|
||||||
import {
|
import { ComputeJob, ComputeAsset, ComputeAlgorithm, Files } from '../../src/@types'
|
||||||
DatatokenCreateParams,
|
import { createAsset, handleComputeOrder } from './helpers'
|
||||||
ComputeJob,
|
|
||||||
ComputeAsset,
|
|
||||||
ComputeAlgorithm,
|
|
||||||
ProviderComputeInitialize,
|
|
||||||
ConsumeMarketFee,
|
|
||||||
Files
|
|
||||||
} from '../../src/@types'
|
|
||||||
|
|
||||||
let config: Config
|
let config: Config
|
||||||
|
|
||||||
@ -233,118 +220,6 @@ const algoDdoWith5mTimeout = {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createAsset(
|
|
||||||
name: string,
|
|
||||||
symbol: string,
|
|
||||||
owner: string,
|
|
||||||
assetUrl: any,
|
|
||||||
ddo: any,
|
|
||||||
providerUrl: string
|
|
||||||
) {
|
|
||||||
const nft = new Nft(web3)
|
|
||||||
const Factory = new NftFactory(addresses.ERC721Factory, web3)
|
|
||||||
|
|
||||||
const chain = await web3.eth.getChainId()
|
|
||||||
ddo.chainId = parseInt(chain.toString(10))
|
|
||||||
const nftParamsAsset: NftCreateData = {
|
|
||||||
name,
|
|
||||||
symbol,
|
|
||||||
templateIndex: 1,
|
|
||||||
tokenURI: 'aaa',
|
|
||||||
transferable: true,
|
|
||||||
owner
|
|
||||||
}
|
|
||||||
const datatokenParams: DatatokenCreateParams = {
|
|
||||||
templateIndex: 1,
|
|
||||||
cap: '100000',
|
|
||||||
feeAmount: '0',
|
|
||||||
paymentCollector: ZERO_ADDRESS,
|
|
||||||
feeToken: ZERO_ADDRESS,
|
|
||||||
minter: owner,
|
|
||||||
mpFeeAddress: ZERO_ADDRESS
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await Factory.createNftWithDatatoken(
|
|
||||||
owner,
|
|
||||||
nftParamsAsset,
|
|
||||||
datatokenParams
|
|
||||||
)
|
|
||||||
|
|
||||||
const nftAddress = result.events.NFTCreated.returnValues[0]
|
|
||||||
const datatokenAddressAsset = result.events.TokenCreated.returnValues[0]
|
|
||||||
ddo.nftAddress = web3.utils.toChecksumAddress(nftAddress)
|
|
||||||
// create the files encrypted string
|
|
||||||
assetUrl.datatokenAddress = datatokenAddressAsset
|
|
||||||
assetUrl.nftAddress = ddo.nftAddress
|
|
||||||
let providerResponse = await ProviderInstance.encrypt(assetUrl, chain, providerUrl)
|
|
||||||
ddo.services[0].files = await providerResponse
|
|
||||||
ddo.services[0].datatokenAddress = datatokenAddressAsset
|
|
||||||
ddo.services[0].serviceEndpoint = providerUrl
|
|
||||||
// update ddo and set the right did
|
|
||||||
ddo.nftAddress = web3.utils.toChecksumAddress(nftAddress)
|
|
||||||
ddo.id =
|
|
||||||
'did:op:' + SHA256(web3.utils.toChecksumAddress(nftAddress) + chain.toString(10))
|
|
||||||
providerResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl)
|
|
||||||
const encryptedResponse = await providerResponse
|
|
||||||
const validateResult = await aquarius.validate(ddo)
|
|
||||||
assert(validateResult.valid, 'Could not validate metadata')
|
|
||||||
await nft.setMetadata(
|
|
||||||
nftAddress,
|
|
||||||
owner,
|
|
||||||
0,
|
|
||||||
providerUrl,
|
|
||||||
'',
|
|
||||||
'0x2',
|
|
||||||
encryptedResponse,
|
|
||||||
validateResult.hash
|
|
||||||
)
|
|
||||||
return ddo.id
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleOrder(
|
|
||||||
order: ProviderComputeInitialize,
|
|
||||||
datatokenAddress: string,
|
|
||||||
payerAccount: string,
|
|
||||||
consumerAccount: string,
|
|
||||||
serviceIndex: number,
|
|
||||||
consumeMarkerFee?: ConsumeMarketFee
|
|
||||||
) {
|
|
||||||
/* We do have 3 possible situations:
|
|
||||||
- have validOrder and no providerFees -> then order is valid, providerFees are valid, just use it in startCompute
|
|
||||||
- have validOrder and providerFees -> then order is valid but providerFees are not valid, we need to call reuseOrder and pay only providerFees
|
|
||||||
- no validOrder -> we need to call startOrder, to pay 1 DT & providerFees
|
|
||||||
*/
|
|
||||||
if (order.providerFee && order.providerFee.providerFeeAmount) {
|
|
||||||
await approveWei(
|
|
||||||
web3,
|
|
||||||
config,
|
|
||||||
payerAccount,
|
|
||||||
order.providerFee.providerFeeToken,
|
|
||||||
datatokenAddress,
|
|
||||||
order.providerFee.providerFeeAmount
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if (order.validOrder) {
|
|
||||||
if (!order.providerFee) return order.validOrder
|
|
||||||
const tx = await datatoken.reuseOrder(
|
|
||||||
datatokenAddress,
|
|
||||||
payerAccount,
|
|
||||||
order.validOrder,
|
|
||||||
order.providerFee
|
|
||||||
)
|
|
||||||
return tx.transactionHash
|
|
||||||
}
|
|
||||||
const tx = await datatoken.startOrder(
|
|
||||||
datatokenAddress,
|
|
||||||
payerAccount,
|
|
||||||
consumerAccount,
|
|
||||||
serviceIndex,
|
|
||||||
order.providerFee,
|
|
||||||
consumeMarkerFee
|
|
||||||
)
|
|
||||||
return tx.transactionHash
|
|
||||||
}
|
|
||||||
|
|
||||||
function delay(interval: number) {
|
function delay(interval: number) {
|
||||||
return it('should delay', (done) => {
|
return it('should delay', (done) => {
|
||||||
setTimeout(() => done(), interval)
|
setTimeout(() => done(), interval)
|
||||||
@ -404,7 +279,9 @@ describe('Simple compute tests', async () => {
|
|||||||
publisherAccount,
|
publisherAccount,
|
||||||
assetUrl,
|
assetUrl,
|
||||||
ddoWith5mTimeout,
|
ddoWith5mTimeout,
|
||||||
providerUrl
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
)
|
)
|
||||||
ddoWithNoTimeoutId = await createAsset(
|
ddoWithNoTimeoutId = await createAsset(
|
||||||
'D1Min',
|
'D1Min',
|
||||||
@ -412,7 +289,9 @@ describe('Simple compute tests', async () => {
|
|||||||
publisherAccount,
|
publisherAccount,
|
||||||
assetUrl,
|
assetUrl,
|
||||||
ddoWithNoTimeout,
|
ddoWithNoTimeout,
|
||||||
providerUrl
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
)
|
)
|
||||||
algoDdoWith5mTimeoutId = await createAsset(
|
algoDdoWith5mTimeoutId = await createAsset(
|
||||||
'A1Min',
|
'A1Min',
|
||||||
@ -420,7 +299,9 @@ describe('Simple compute tests', async () => {
|
|||||||
publisherAccount,
|
publisherAccount,
|
||||||
algoAssetUrl,
|
algoAssetUrl,
|
||||||
algoDdoWith5mTimeout,
|
algoDdoWith5mTimeout,
|
||||||
providerUrl
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
)
|
)
|
||||||
|
|
||||||
algoDdoWithNoTimeoutId = await createAsset(
|
algoDdoWithNoTimeoutId = await createAsset(
|
||||||
@ -429,7 +310,9 @@ describe('Simple compute tests', async () => {
|
|||||||
publisherAccount,
|
publisherAccount,
|
||||||
algoAssetUrl,
|
algoAssetUrl,
|
||||||
algoDdoWithNoTimeout,
|
algoDdoWithNoTimeout,
|
||||||
providerUrl
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -519,20 +402,24 @@ describe('Simple compute tests', async () => {
|
|||||||
!('error' in providerInitializeComputeResults.algorithm),
|
!('error' in providerInitializeComputeResults.algorithm),
|
||||||
'Cannot order algorithm'
|
'Cannot order algorithm'
|
||||||
)
|
)
|
||||||
algo.transferTxId = await handleOrder(
|
algo.transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.algorithm,
|
providerInitializeComputeResults.algorithm,
|
||||||
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||||
assets[i].transferTxId = await handleOrder(
|
assets[i].transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.datasets[i],
|
providerInitializeComputeResults.datasets[i],
|
||||||
dtAddressArray[i],
|
dtAddressArray[i],
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const computeJobs = await ProviderInstance.computeStart(
|
const computeJobs = await ProviderInstance.computeStart(
|
||||||
@ -656,20 +543,24 @@ describe('Simple compute tests', async () => {
|
|||||||
!('error' in providerInitializeComputeResults.algorithm),
|
!('error' in providerInitializeComputeResults.algorithm),
|
||||||
'Cannot order algorithm'
|
'Cannot order algorithm'
|
||||||
)
|
)
|
||||||
algo.transferTxId = await handleOrder(
|
algo.transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.algorithm,
|
providerInitializeComputeResults.algorithm,
|
||||||
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||||
assets[i].transferTxId = await handleOrder(
|
assets[i].transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.datasets[i],
|
providerInitializeComputeResults.datasets[i],
|
||||||
dtAddressArray[i],
|
dtAddressArray[i],
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -818,20 +709,24 @@ describe('Simple compute tests', async () => {
|
|||||||
!('error' in providerInitializeComputeResults.algorithm),
|
!('error' in providerInitializeComputeResults.algorithm),
|
||||||
'Cannot order algorithm'
|
'Cannot order algorithm'
|
||||||
)
|
)
|
||||||
algo.transferTxId = await handleOrder(
|
algo.transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.algorithm,
|
providerInitializeComputeResults.algorithm,
|
||||||
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||||
assets[i].transferTxId = await handleOrder(
|
assets[i].transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.datasets[i],
|
providerInitializeComputeResults.datasets[i],
|
||||||
dtAddressArray[i],
|
dtAddressArray[i],
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
assert(
|
assert(
|
||||||
@ -899,20 +794,24 @@ describe('Simple compute tests', async () => {
|
|||||||
!('error' in providerInitializeComputeResults.algorithm),
|
!('error' in providerInitializeComputeResults.algorithm),
|
||||||
'Cannot order algorithm'
|
'Cannot order algorithm'
|
||||||
)
|
)
|
||||||
algo.transferTxId = await handleOrder(
|
algo.transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.algorithm,
|
providerInitializeComputeResults.algorithm,
|
||||||
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
resolvedAlgoDdoWith5mTimeout.services[0].datatokenAddress,
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||||
assets[i].transferTxId = await handleOrder(
|
assets[i].transferTxId = await handleComputeOrder(
|
||||||
providerInitializeComputeResults.datasets[i],
|
providerInitializeComputeResults.datasets[i],
|
||||||
dtAddressArray[i],
|
dtAddressArray[i],
|
||||||
consumerAccount,
|
consumerAccount,
|
||||||
computeEnv.consumerAddress,
|
computeEnv.consumerAddress,
|
||||||
0
|
0,
|
||||||
|
datatoken,
|
||||||
|
config
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
assert(
|
assert(
|
||||||
|
543
test/integration/PublishEditConsume.test.ts
Normal file
543
test/integration/PublishEditConsume.test.ts
Normal file
@ -0,0 +1,543 @@
|
|||||||
|
import { assert } from 'chai'
|
||||||
|
import { AbiItem } from 'web3-utils'
|
||||||
|
import { web3, getTestConfig, getAddresses } from '../config'
|
||||||
|
import {
|
||||||
|
Config,
|
||||||
|
ProviderInstance,
|
||||||
|
Aquarius,
|
||||||
|
Datatoken,
|
||||||
|
downloadFile,
|
||||||
|
calculateEstimatedGas,
|
||||||
|
sendTx,
|
||||||
|
transfer,
|
||||||
|
SmartContract
|
||||||
|
} from '../../src'
|
||||||
|
import { Files, Smartcontract } from '../../src/@types'
|
||||||
|
import { createAsset, orderAsset, updateAssetMetadata } from './helpers'
|
||||||
|
|
||||||
|
let config: Config
|
||||||
|
|
||||||
|
let aquarius: Aquarius
|
||||||
|
let datatoken: Datatoken
|
||||||
|
let providerUrl: string
|
||||||
|
let consumerAccount: string
|
||||||
|
let publisherAccount: string
|
||||||
|
let addresses: any
|
||||||
|
|
||||||
|
let urlAssetId
|
||||||
|
let resolvedUrlAssetDdo
|
||||||
|
let resolvedUrlAssetDdoAfterUpdate
|
||||||
|
|
||||||
|
let arweaveAssetId
|
||||||
|
let resolvedArweaveAssetDdo
|
||||||
|
let resolvedArweaveAssetDdoAfterUpdate
|
||||||
|
|
||||||
|
let ipfsAssetId
|
||||||
|
let resolvedIpfsAssetDdo
|
||||||
|
let resolvedIpfsAssetDdoAfterUpdate
|
||||||
|
|
||||||
|
let onchainAssetId
|
||||||
|
let resolvedOnchainAssetDdo
|
||||||
|
let resolvedOnchainAssetDdoAfterUpdate
|
||||||
|
|
||||||
|
let grapqlAssetId
|
||||||
|
let resolvedGraphqlAssetDdo
|
||||||
|
let resolvedGraphqlAssetDdoAfterUpdate
|
||||||
|
|
||||||
|
let urlOrderTx
|
||||||
|
let arwaveOrderTx
|
||||||
|
let ipfsOrderTx
|
||||||
|
let onchainOrderTx
|
||||||
|
let grapqlOrderTx
|
||||||
|
|
||||||
|
const urlFile: Files = {
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
nftAddress: '0x0',
|
||||||
|
files: [
|
||||||
|
{
|
||||||
|
type: 'url',
|
||||||
|
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||||
|
method: 'GET'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
const arweaveFile: Files = {
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
nftAddress: '0x0',
|
||||||
|
files: [
|
||||||
|
{
|
||||||
|
type: 'arweave',
|
||||||
|
transactionId: 'USuWnUl3gLPhm4TPbmL6E2a2e2SWMCVo9yWCaapD-98'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
const ifpsFile: Files = {
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
nftAddress: '0x0',
|
||||||
|
files: [
|
||||||
|
{
|
||||||
|
type: 'ipfs',
|
||||||
|
hash: 'QmdMBw956S3i2H2ioS9cERrtxoLJuSsfjzCvkqoDgUa2xm'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
const onchainFile: Files = {
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
nftAddress: '0x0',
|
||||||
|
files: []
|
||||||
|
}
|
||||||
|
|
||||||
|
const grapqlFile: Files = {
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
nftAddress: '0x0',
|
||||||
|
files: [
|
||||||
|
{
|
||||||
|
type: 'graphql',
|
||||||
|
url: 'https://v4.subgraph.goerli.oceanprotocol.com/subgraphs/name/oceanprotocol/ocean-subgraph',
|
||||||
|
query: `"
|
||||||
|
query{
|
||||||
|
nfts(orderBy: createdTimestamp,orderDirection:desc){
|
||||||
|
id
|
||||||
|
symbol
|
||||||
|
createdTimestamp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"`
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
const assetDdo = {
|
||||||
|
'@context': ['https://w3id.org/did/v1'],
|
||||||
|
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
|
||||||
|
version: '4.1.0',
|
||||||
|
chainId: 4,
|
||||||
|
nftAddress: '0x0',
|
||||||
|
metadata: {
|
||||||
|
created: '2021-12-20T14:35:20Z',
|
||||||
|
updated: '2021-12-20T14:35:20Z',
|
||||||
|
type: 'dataset',
|
||||||
|
name: 'Test asset',
|
||||||
|
description: 'desc for the storage type assets',
|
||||||
|
tags: [''],
|
||||||
|
author: 'ocean-protocol',
|
||||||
|
license: 'https://market.oceanprotocol.com/terms',
|
||||||
|
additionalInformation: {
|
||||||
|
termsAndConditions: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
services: [
|
||||||
|
{
|
||||||
|
id: 'testFakeId',
|
||||||
|
type: 'access',
|
||||||
|
files: '',
|
||||||
|
datatokenAddress: '0x0',
|
||||||
|
serviceEndpoint: 'http://172.15.0.4:8030',
|
||||||
|
timeout: 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
function delay(interval: number) {
|
||||||
|
return it('should delay', (done) => {
|
||||||
|
setTimeout(() => done(), interval)
|
||||||
|
}).timeout(interval + 100)
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Publish consume test', async () => {
|
||||||
|
before(async () => {
|
||||||
|
config = await getTestConfig(web3)
|
||||||
|
addresses = getAddresses()
|
||||||
|
aquarius = new Aquarius(config?.metadataCacheUri)
|
||||||
|
providerUrl = config?.providerUri
|
||||||
|
datatoken = new Datatoken(web3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Initialize accounts', async () => {
|
||||||
|
const accounts = await web3.eth.getAccounts()
|
||||||
|
publisherAccount = accounts[0]
|
||||||
|
consumerAccount = accounts[1]
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Mint OCEAN to publisher account', async () => {
|
||||||
|
const minAbi = [
|
||||||
|
{
|
||||||
|
constant: false,
|
||||||
|
inputs: [
|
||||||
|
{ name: 'to', type: 'address' },
|
||||||
|
{ name: 'value', type: 'uint256' }
|
||||||
|
],
|
||||||
|
name: 'mint',
|
||||||
|
outputs: [{ name: '', type: 'bool' }],
|
||||||
|
payable: false,
|
||||||
|
stateMutability: 'nonpayable',
|
||||||
|
type: 'function'
|
||||||
|
}
|
||||||
|
] as AbiItem[]
|
||||||
|
const tokenContract = new web3.eth.Contract(minAbi, addresses.Ocean)
|
||||||
|
const estGas = await calculateEstimatedGas(
|
||||||
|
publisherAccount,
|
||||||
|
tokenContract.methods.mint,
|
||||||
|
publisherAccount,
|
||||||
|
web3.utils.toWei('1000')
|
||||||
|
)
|
||||||
|
await sendTx(
|
||||||
|
publisherAccount,
|
||||||
|
estGas,
|
||||||
|
web3,
|
||||||
|
1,
|
||||||
|
tokenContract.methods.mint,
|
||||||
|
publisherAccount,
|
||||||
|
web3.utils.toWei('1000')
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Send some OCEAN to consumer account', async () => {
|
||||||
|
transfer(web3, config, publisherAccount, addresses.Ocean, consumerAccount, '100')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Should publish the assets', async () => {
|
||||||
|
urlAssetId = await createAsset(
|
||||||
|
'UrlDatatoken',
|
||||||
|
'URLDT',
|
||||||
|
publisherAccount,
|
||||||
|
urlFile,
|
||||||
|
assetDdo,
|
||||||
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(urlAssetId, 'Failed to publish url DDO')
|
||||||
|
|
||||||
|
arweaveAssetId = await createAsset(
|
||||||
|
'ArwaveDatatoken',
|
||||||
|
'ARWAVEDT',
|
||||||
|
publisherAccount,
|
||||||
|
arweaveFile,
|
||||||
|
assetDdo,
|
||||||
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(urlAssetId, 'Failed to arwave publish DDO')
|
||||||
|
|
||||||
|
ipfsAssetId = await createAsset(
|
||||||
|
'IpfsDatatoken',
|
||||||
|
'IPFSDT',
|
||||||
|
publisherAccount,
|
||||||
|
ifpsFile,
|
||||||
|
assetDdo,
|
||||||
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(urlAssetId, 'Failed to publish ipfs DDO')
|
||||||
|
|
||||||
|
const chainFile: Smartcontract = {
|
||||||
|
type: 'smartcontract',
|
||||||
|
address: addresses.Router,
|
||||||
|
abi: {
|
||||||
|
inputs: [],
|
||||||
|
name: 'swapOceanFee',
|
||||||
|
outputs: [{ internalType: 'uint256', name: '', type: 'uint256' }],
|
||||||
|
stateMutability: 'view',
|
||||||
|
type: 'function'
|
||||||
|
},
|
||||||
|
chainId: 8996
|
||||||
|
}
|
||||||
|
|
||||||
|
onchainFile.files[0] = chainFile
|
||||||
|
onchainAssetId = await createAsset(
|
||||||
|
'IpfsDatatoken',
|
||||||
|
'IPFSDT',
|
||||||
|
publisherAccount,
|
||||||
|
onchainFile,
|
||||||
|
assetDdo,
|
||||||
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(onchainAssetId, 'Failed to publish onchain DDO')
|
||||||
|
|
||||||
|
grapqlAssetId = await createAsset(
|
||||||
|
'IpfsDatatoken',
|
||||||
|
'IPFSDT',
|
||||||
|
publisherAccount,
|
||||||
|
grapqlFile,
|
||||||
|
assetDdo,
|
||||||
|
providerUrl,
|
||||||
|
addresses.ERC721Factory,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(grapqlAssetId, 'Failed to publish graphql DDO')
|
||||||
|
})
|
||||||
|
|
||||||
|
delay(10000) // let's wait for aquarius to index the assets
|
||||||
|
|
||||||
|
it('Resolve published assets', async () => {
|
||||||
|
resolvedUrlAssetDdo = await aquarius.waitForAqua(urlAssetId)
|
||||||
|
assert(resolvedUrlAssetDdo, 'Cannot fetch url DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedArweaveAssetDdo = await aquarius.waitForAqua(arweaveAssetId)
|
||||||
|
assert(resolvedArweaveAssetDdo, 'Cannot fetch arwave DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedIpfsAssetDdo = await aquarius.waitForAqua(ipfsAssetId)
|
||||||
|
assert(resolvedIpfsAssetDdo, 'Cannot fetch ipfs DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedOnchainAssetDdo = await aquarius.waitForAqua(onchainAssetId)
|
||||||
|
assert(resolvedOnchainAssetDdo, 'Cannot fetch onchain DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedGraphqlAssetDdo = await aquarius.waitForAqua(grapqlAssetId)
|
||||||
|
assert(resolvedGraphqlAssetDdo, 'Cannot fetch graphql DDO from Aquarius')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Mint datasets datatokens to publisher', async () => {
|
||||||
|
const urlMintTx = await datatoken.mint(
|
||||||
|
resolvedUrlAssetDdo.services[0].datatokenAddress,
|
||||||
|
publisherAccount,
|
||||||
|
'10',
|
||||||
|
consumerAccount
|
||||||
|
)
|
||||||
|
assert(urlMintTx, 'Failed minting url datatoken to consumer.')
|
||||||
|
|
||||||
|
const arwaveMintTx = await datatoken.mint(
|
||||||
|
resolvedArweaveAssetDdo.services[0].datatokenAddress,
|
||||||
|
publisherAccount,
|
||||||
|
'10',
|
||||||
|
consumerAccount
|
||||||
|
)
|
||||||
|
assert(arwaveMintTx, 'Failed minting arwave datatoken to consumer.')
|
||||||
|
|
||||||
|
const ipfsMintTx = await datatoken.mint(
|
||||||
|
resolvedIpfsAssetDdo.services[0].datatokenAddress,
|
||||||
|
publisherAccount,
|
||||||
|
'10',
|
||||||
|
consumerAccount
|
||||||
|
)
|
||||||
|
assert(ipfsMintTx, 'Failed minting ipfs datatoken to consumer.')
|
||||||
|
|
||||||
|
const onchainMintTx = await datatoken.mint(
|
||||||
|
resolvedOnchainAssetDdo.services[0].datatokenAddress,
|
||||||
|
publisherAccount,
|
||||||
|
'10',
|
||||||
|
consumerAccount
|
||||||
|
)
|
||||||
|
assert(onchainMintTx, 'Failed minting onchain datatoken to consumer.')
|
||||||
|
|
||||||
|
const graphqlMintTx = await datatoken.mint(
|
||||||
|
resolvedGraphqlAssetDdo.services[0].datatokenAddress,
|
||||||
|
publisherAccount,
|
||||||
|
'10',
|
||||||
|
consumerAccount
|
||||||
|
)
|
||||||
|
assert(graphqlMintTx, 'Failed minting graphql datatoken to consumer.')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Should order the datasets', async () => {
|
||||||
|
urlOrderTx = await orderAsset(
|
||||||
|
resolvedUrlAssetDdo.id,
|
||||||
|
resolvedUrlAssetDdo.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedUrlAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
datatoken,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
assert(urlOrderTx, 'Ordering url dataset failed.')
|
||||||
|
|
||||||
|
arwaveOrderTx = await orderAsset(
|
||||||
|
resolvedArweaveAssetDdo.id,
|
||||||
|
resolvedArweaveAssetDdo.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedArweaveAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
datatoken,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
assert(arwaveOrderTx, 'Ordering arwave dataset failed.')
|
||||||
|
|
||||||
|
onchainOrderTx = await orderAsset(
|
||||||
|
resolvedOnchainAssetDdo.id,
|
||||||
|
resolvedOnchainAssetDdo.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedOnchainAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
datatoken,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
assert(onchainOrderTx, 'Ordering onchain dataset failed.')
|
||||||
|
|
||||||
|
ipfsOrderTx = await orderAsset(
|
||||||
|
resolvedIpfsAssetDdo.id,
|
||||||
|
resolvedIpfsAssetDdo.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedIpfsAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
datatoken,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
assert(ipfsOrderTx, 'Ordering ipfs dataset failed.')
|
||||||
|
|
||||||
|
grapqlOrderTx = await orderAsset(
|
||||||
|
resolvedGraphqlAssetDdo.id,
|
||||||
|
resolvedGraphqlAssetDdo.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedGraphqlAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
datatoken,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
assert(grapqlOrderTx, 'Ordering graphql dataset failed.')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Should download the datasets files', async () => {
|
||||||
|
const urlDownloadUrl = await ProviderInstance.getDownloadUrl(
|
||||||
|
resolvedUrlAssetDdo.id,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedUrlAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
urlOrderTx.transactionHash,
|
||||||
|
providerUrl,
|
||||||
|
web3
|
||||||
|
)
|
||||||
|
assert(urlDownloadUrl, 'Provider getDownloadUrl failed for url dataset')
|
||||||
|
try {
|
||||||
|
await downloadFile(urlDownloadUrl)
|
||||||
|
} catch (e) {
|
||||||
|
assert.fail(`Download url dataset failed: ${e}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arwaveDownloadURL = await ProviderInstance.getDownloadUrl(
|
||||||
|
resolvedArweaveAssetDdo.id,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedArweaveAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
arwaveOrderTx.transactionHash,
|
||||||
|
providerUrl,
|
||||||
|
web3
|
||||||
|
)
|
||||||
|
assert(arwaveDownloadURL, 'Provider getDownloadUrl failed for arwave dataset')
|
||||||
|
try {
|
||||||
|
await downloadFile(arwaveDownloadURL)
|
||||||
|
} catch (e) {
|
||||||
|
assert.fail(`Download arwave dataset failed: ${e}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ipfsDownloadURL = await ProviderInstance.getDownloadUrl(
|
||||||
|
resolvedIpfsAssetDdo.id,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedIpfsAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
ipfsOrderTx.transactionHash,
|
||||||
|
providerUrl,
|
||||||
|
web3
|
||||||
|
)
|
||||||
|
assert(ipfsDownloadURL, 'Provider getDownloadUrl failed for ipfs dataset')
|
||||||
|
try {
|
||||||
|
await downloadFile(ipfsDownloadURL)
|
||||||
|
} catch (e) {
|
||||||
|
assert.fail(`Download ipfs dataset failed ${e}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const onchainDownloadURL = await ProviderInstance.getDownloadUrl(
|
||||||
|
resolvedOnchainAssetDdo.id,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedOnchainAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
onchainOrderTx.transactionHash,
|
||||||
|
providerUrl,
|
||||||
|
web3
|
||||||
|
)
|
||||||
|
assert(onchainDownloadURL, 'Provider getDownloadUrl failed for onchain dataset')
|
||||||
|
try {
|
||||||
|
await downloadFile(onchainDownloadURL)
|
||||||
|
} catch (e) {
|
||||||
|
assert.fail(`Download onchain dataset failed ${e}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const graphqlDownloadURL = await ProviderInstance.getDownloadUrl(
|
||||||
|
resolvedGraphqlAssetDdo.id,
|
||||||
|
consumerAccount,
|
||||||
|
resolvedGraphqlAssetDdo.services[0].id,
|
||||||
|
0,
|
||||||
|
grapqlOrderTx.transactionHash,
|
||||||
|
providerUrl,
|
||||||
|
web3
|
||||||
|
)
|
||||||
|
assert(graphqlDownloadURL, 'Provider getDownloadUrl failed for graphql dataset')
|
||||||
|
try {
|
||||||
|
await downloadFile(graphqlDownloadURL)
|
||||||
|
} catch (e) {
|
||||||
|
assert.fail(`Download graphql dataset failed ${e}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Should update datasets metadata', async () => {
|
||||||
|
resolvedUrlAssetDdo.metadata.name = 'updated url asset name'
|
||||||
|
const updateUrlTx = await updateAssetMetadata(
|
||||||
|
publisherAccount,
|
||||||
|
resolvedUrlAssetDdo,
|
||||||
|
providerUrl,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(updateUrlTx, 'Failed to update url asset metadata')
|
||||||
|
|
||||||
|
resolvedArweaveAssetDdo.metadata.name = 'updated arwave asset name'
|
||||||
|
const updateArwaveTx = await updateAssetMetadata(
|
||||||
|
publisherAccount,
|
||||||
|
resolvedArweaveAssetDdo,
|
||||||
|
providerUrl,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(updateArwaveTx, 'Failed to update arwave asset metadata')
|
||||||
|
|
||||||
|
resolvedIpfsAssetDdo.metadata.name = 'updated ipfs asset name'
|
||||||
|
const updateIpfsTx = await updateAssetMetadata(
|
||||||
|
publisherAccount,
|
||||||
|
resolvedIpfsAssetDdo,
|
||||||
|
providerUrl,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(updateIpfsTx, 'Failed to update ipfs asset metadata')
|
||||||
|
|
||||||
|
resolvedOnchainAssetDdo.metadata.name = 'updated onchain asset name'
|
||||||
|
const updateOnchainTx = await updateAssetMetadata(
|
||||||
|
publisherAccount,
|
||||||
|
resolvedOnchainAssetDdo,
|
||||||
|
providerUrl,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(updateOnchainTx, 'Failed to update ipfs asset metadata')
|
||||||
|
|
||||||
|
resolvedGraphqlAssetDdo.metadata.name = 'updated graphql asset name'
|
||||||
|
const updateGraphqlTx = await updateAssetMetadata(
|
||||||
|
publisherAccount,
|
||||||
|
resolvedGraphqlAssetDdo,
|
||||||
|
providerUrl,
|
||||||
|
aquarius
|
||||||
|
)
|
||||||
|
assert(updateGraphqlTx, 'Failed to update graphql asset metadata')
|
||||||
|
})
|
||||||
|
|
||||||
|
delay(10000) // let's wait for aquarius to index the updated ddo's
|
||||||
|
|
||||||
|
it('Should resolve updated datasets', async () => {
|
||||||
|
resolvedUrlAssetDdoAfterUpdate = await aquarius.waitForAqua(urlAssetId)
|
||||||
|
assert(resolvedUrlAssetDdoAfterUpdate, 'Cannot fetch url DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedArweaveAssetDdoAfterUpdate = await aquarius.waitForAqua(arweaveAssetId)
|
||||||
|
assert(resolvedArweaveAssetDdoAfterUpdate, 'Cannot fetch arwave DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedIpfsAssetDdoAfterUpdate = await aquarius.waitForAqua(ipfsAssetId)
|
||||||
|
assert(resolvedIpfsAssetDdoAfterUpdate, 'Cannot fetch ipfs DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedOnchainAssetDdoAfterUpdate = await aquarius.waitForAqua(onchainAssetId)
|
||||||
|
assert(resolvedOnchainAssetDdoAfterUpdate, 'Cannot fetch onchain DDO from Aquarius')
|
||||||
|
|
||||||
|
resolvedGraphqlAssetDdoAfterUpdate = await aquarius.waitForAqua(grapqlAssetId)
|
||||||
|
assert(resolvedGraphqlAssetDdoAfterUpdate, 'Cannot fetch onchain DDO from Aquarius')
|
||||||
|
})
|
||||||
|
})
|
@ -1,227 +0,0 @@
|
|||||||
import { assert } from 'chai'
|
|
||||||
import { SHA256 } from 'crypto-js'
|
|
||||||
import { AbiItem } from 'web3-utils'
|
|
||||||
import { web3, getTestConfig, getAddresses } from '../config'
|
|
||||||
import {
|
|
||||||
Config,
|
|
||||||
ProviderInstance,
|
|
||||||
Aquarius,
|
|
||||||
NftFactory,
|
|
||||||
NftCreateData,
|
|
||||||
Datatoken,
|
|
||||||
getHash,
|
|
||||||
Nft,
|
|
||||||
downloadFile,
|
|
||||||
ZERO_ADDRESS,
|
|
||||||
calculateEstimatedGas,
|
|
||||||
sendTx
|
|
||||||
} from '../../src'
|
|
||||||
import { ProviderFees, DatatokenCreateParams, DDO, Files } from '../../src/@types'
|
|
||||||
|
|
||||||
describe('Simple Publish & consume test', async () => {
|
|
||||||
let config: Config
|
|
||||||
let addresses: any
|
|
||||||
let aquarius: Aquarius
|
|
||||||
let providerUrl: any
|
|
||||||
let publisherAccount: string
|
|
||||||
let consumerAccount: string
|
|
||||||
|
|
||||||
const assetUrl: Files = {
|
|
||||||
datatokenAddress: '0x0',
|
|
||||||
nftAddress: '0x0',
|
|
||||||
files: [
|
|
||||||
{
|
|
||||||
type: 'url',
|
|
||||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
|
||||||
method: 'GET'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
const ddo: DDO = {
|
|
||||||
'@context': ['https://w3id.org/did/v1'],
|
|
||||||
id: '',
|
|
||||||
version: '4.1.0',
|
|
||||||
chainId: 4,
|
|
||||||
nftAddress: '0x0',
|
|
||||||
metadata: {
|
|
||||||
created: '2021-12-20T14:35:20Z',
|
|
||||||
updated: '2021-12-20T14:35:20Z',
|
|
||||||
type: 'dataset',
|
|
||||||
name: 'dataset-name',
|
|
||||||
description: 'Ocean protocol test dataset description',
|
|
||||||
author: 'oceanprotocol-team',
|
|
||||||
license: 'MIT'
|
|
||||||
},
|
|
||||||
services: [
|
|
||||||
{
|
|
||||||
id: 'testFakeId',
|
|
||||||
type: 'access',
|
|
||||||
files: '',
|
|
||||||
datatokenAddress: '0x0',
|
|
||||||
serviceEndpoint: 'https://v4.provider.goerli.oceanprotocol.com',
|
|
||||||
timeout: 0
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
before(async () => {
|
|
||||||
config = await getTestConfig(web3)
|
|
||||||
aquarius = new Aquarius(config.metadataCacheUri)
|
|
||||||
providerUrl = config.providerUri
|
|
||||||
|
|
||||||
addresses = getAddresses()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('Initialize accounts', async () => {
|
|
||||||
const accounts = await web3.eth.getAccounts()
|
|
||||||
publisherAccount = accounts[0]
|
|
||||||
consumerAccount = accounts[1]
|
|
||||||
|
|
||||||
// mint Ocean tokens
|
|
||||||
/// <!--
|
|
||||||
// mint ocean to publisherAccount
|
|
||||||
const minAbi = [
|
|
||||||
{
|
|
||||||
constant: false,
|
|
||||||
inputs: [
|
|
||||||
{ name: 'to', type: 'address' },
|
|
||||||
{ name: 'value', type: 'uint256' }
|
|
||||||
],
|
|
||||||
name: 'mint',
|
|
||||||
outputs: [{ name: '', type: 'bool' }],
|
|
||||||
payable: false,
|
|
||||||
stateMutability: 'nonpayable',
|
|
||||||
type: 'function'
|
|
||||||
}
|
|
||||||
] as AbiItem[]
|
|
||||||
const tokenContract = new web3.eth.Contract(minAbi, addresses.Ocean)
|
|
||||||
const estGas = await calculateEstimatedGas(
|
|
||||||
publisherAccount,
|
|
||||||
tokenContract.methods.mint,
|
|
||||||
publisherAccount,
|
|
||||||
web3.utils.toWei('1000')
|
|
||||||
)
|
|
||||||
await sendTx(
|
|
||||||
publisherAccount,
|
|
||||||
estGas,
|
|
||||||
web3,
|
|
||||||
1,
|
|
||||||
tokenContract.methods.mint,
|
|
||||||
publisherAccount,
|
|
||||||
web3.utils.toWei('1000')
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should publish a dataset (create NFT + Datatoken)', async () => {
|
|
||||||
const nft = new Nft(web3)
|
|
||||||
const datatoken = new Datatoken(web3)
|
|
||||||
const Factory = new NftFactory(addresses.ERC721Factory, web3)
|
|
||||||
|
|
||||||
const nftParams: NftCreateData = {
|
|
||||||
name: '72120Bundle',
|
|
||||||
symbol: '72Bundle',
|
|
||||||
templateIndex: 1,
|
|
||||||
tokenURI: 'https://oceanprotocol.com/nft/',
|
|
||||||
transferable: true,
|
|
||||||
owner: publisherAccount
|
|
||||||
}
|
|
||||||
|
|
||||||
const datatokenParams: DatatokenCreateParams = {
|
|
||||||
templateIndex: 1,
|
|
||||||
cap: '100000',
|
|
||||||
feeAmount: '0',
|
|
||||||
paymentCollector: ZERO_ADDRESS,
|
|
||||||
feeToken: ZERO_ADDRESS,
|
|
||||||
minter: publisherAccount,
|
|
||||||
mpFeeAddress: ZERO_ADDRESS
|
|
||||||
}
|
|
||||||
|
|
||||||
const tx = await Factory.createNftWithDatatoken(
|
|
||||||
publisherAccount,
|
|
||||||
nftParams,
|
|
||||||
datatokenParams
|
|
||||||
)
|
|
||||||
const nftAddress = tx.events.NFTCreated.returnValues[0]
|
|
||||||
const datatokenAddress = tx.events.TokenCreated.returnValues[0]
|
|
||||||
|
|
||||||
// create the files encrypted string
|
|
||||||
assetUrl.datatokenAddress = datatokenAddress
|
|
||||||
assetUrl.nftAddress = nftAddress
|
|
||||||
const chain = await web3.eth.getChainId()
|
|
||||||
let providerResponse = await ProviderInstance.encrypt(assetUrl, chain, providerUrl)
|
|
||||||
ddo.services[0].files = await providerResponse
|
|
||||||
ddo.services[0].datatokenAddress = datatokenAddress
|
|
||||||
// update ddo and set the right did
|
|
||||||
ddo.nftAddress = nftAddress
|
|
||||||
ddo.id =
|
|
||||||
'did:op:' + SHA256(web3.utils.toChecksumAddress(nftAddress) + chain.toString(10))
|
|
||||||
|
|
||||||
providerResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl)
|
|
||||||
const encryptedResponse = await providerResponse
|
|
||||||
const metadataHash = getHash(JSON.stringify(ddo))
|
|
||||||
await nft.setMetadata(
|
|
||||||
nftAddress,
|
|
||||||
publisherAccount,
|
|
||||||
0,
|
|
||||||
providerUrl,
|
|
||||||
'',
|
|
||||||
'0x2',
|
|
||||||
encryptedResponse,
|
|
||||||
'0x' + metadataHash
|
|
||||||
)
|
|
||||||
|
|
||||||
const resolvedDDO = await aquarius.waitForAqua(ddo.id)
|
|
||||||
assert(resolvedDDO, 'Cannot fetch DDO from Aquarius')
|
|
||||||
|
|
||||||
// mint 1 Datatoken and send it to the consumer
|
|
||||||
await datatoken.mint(datatokenAddress, publisherAccount, '1', consumerAccount)
|
|
||||||
|
|
||||||
// initialize provider
|
|
||||||
const initializeData = await ProviderInstance.initialize(
|
|
||||||
resolvedDDO.id,
|
|
||||||
resolvedDDO.services[0].id,
|
|
||||||
0,
|
|
||||||
consumerAccount,
|
|
||||||
providerUrl
|
|
||||||
)
|
|
||||||
|
|
||||||
const providerFees: ProviderFees = {
|
|
||||||
providerFeeAddress: initializeData.providerFee.providerFeeAddress,
|
|
||||||
providerFeeToken: initializeData.providerFee.providerFeeToken,
|
|
||||||
providerFeeAmount: initializeData.providerFee.providerFeeAmount,
|
|
||||||
v: initializeData.providerFee.v,
|
|
||||||
r: initializeData.providerFee.r,
|
|
||||||
s: initializeData.providerFee.s,
|
|
||||||
providerData: initializeData.providerFee.providerData,
|
|
||||||
validUntil: initializeData.providerFee.validUntil
|
|
||||||
}
|
|
||||||
|
|
||||||
// make the payment
|
|
||||||
const txid = await datatoken.startOrder(
|
|
||||||
datatokenAddress,
|
|
||||||
consumerAccount,
|
|
||||||
consumerAccount,
|
|
||||||
0,
|
|
||||||
providerFees
|
|
||||||
)
|
|
||||||
|
|
||||||
// get the url
|
|
||||||
const downloadURL = await ProviderInstance.getDownloadUrl(
|
|
||||||
ddo.id,
|
|
||||||
consumerAccount,
|
|
||||||
ddo.services[0].id,
|
|
||||||
0,
|
|
||||||
txid.transactionHash,
|
|
||||||
providerUrl,
|
|
||||||
web3
|
|
||||||
)
|
|
||||||
|
|
||||||
assert(downloadURL, 'Provider getDownloadUrl failed')
|
|
||||||
try {
|
|
||||||
await downloadFile(downloadURL)
|
|
||||||
} catch (e) {
|
|
||||||
assert.fail('Download failed')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
201
test/integration/helpers.ts
Normal file
201
test/integration/helpers.ts
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
import { SHA256 } from 'crypto-js'
|
||||||
|
import {
|
||||||
|
Aquarius,
|
||||||
|
DatatokenCreateParams,
|
||||||
|
Nft,
|
||||||
|
NftCreateData,
|
||||||
|
NftFactory,
|
||||||
|
ProviderInstance,
|
||||||
|
ZERO_ADDRESS,
|
||||||
|
approveWei,
|
||||||
|
ProviderComputeInitialize,
|
||||||
|
ConsumeMarketFee,
|
||||||
|
Datatoken,
|
||||||
|
Config,
|
||||||
|
DDO,
|
||||||
|
ProviderFees
|
||||||
|
} from '../../src'
|
||||||
|
import { web3 } from '../config'
|
||||||
|
|
||||||
|
export async function createAsset(
|
||||||
|
name: string,
|
||||||
|
symbol: string,
|
||||||
|
owner: string,
|
||||||
|
assetUrl: any,
|
||||||
|
ddo: any,
|
||||||
|
providerUrl: string,
|
||||||
|
nftContractAddress: string, // addresses.ERC721Factory,
|
||||||
|
aquariusInstance: Aquarius
|
||||||
|
) {
|
||||||
|
const nft = new Nft(web3)
|
||||||
|
const Factory = new NftFactory(nftContractAddress, web3)
|
||||||
|
|
||||||
|
const chain = await web3.eth.getChainId()
|
||||||
|
ddo.chainId = parseInt(chain.toString(10))
|
||||||
|
const nftParamsAsset: NftCreateData = {
|
||||||
|
name,
|
||||||
|
symbol,
|
||||||
|
templateIndex: 1,
|
||||||
|
tokenURI: 'aaa',
|
||||||
|
transferable: true,
|
||||||
|
owner
|
||||||
|
}
|
||||||
|
const datatokenParams: DatatokenCreateParams = {
|
||||||
|
templateIndex: 1,
|
||||||
|
cap: '100000',
|
||||||
|
feeAmount: '0',
|
||||||
|
paymentCollector: ZERO_ADDRESS,
|
||||||
|
feeToken: ZERO_ADDRESS,
|
||||||
|
minter: owner,
|
||||||
|
mpFeeAddress: ZERO_ADDRESS
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await Factory.createNftWithDatatoken(
|
||||||
|
owner,
|
||||||
|
nftParamsAsset,
|
||||||
|
datatokenParams
|
||||||
|
)
|
||||||
|
|
||||||
|
const nftAddress = result.events.NFTCreated.returnValues[0]
|
||||||
|
const datatokenAddressAsset = result.events.TokenCreated.returnValues[0]
|
||||||
|
ddo.nftAddress = web3.utils.toChecksumAddress(nftAddress)
|
||||||
|
// create the files encrypted string
|
||||||
|
assetUrl.datatokenAddress = datatokenAddressAsset
|
||||||
|
assetUrl.nftAddress = ddo.nftAddress
|
||||||
|
let providerResponse = await ProviderInstance.encrypt(assetUrl, chain, providerUrl)
|
||||||
|
ddo.services[0].files = await providerResponse
|
||||||
|
ddo.services[0].datatokenAddress = datatokenAddressAsset
|
||||||
|
ddo.services[0].serviceEndpoint = providerUrl
|
||||||
|
// update ddo and set the right did
|
||||||
|
ddo.nftAddress = web3.utils.toChecksumAddress(nftAddress)
|
||||||
|
ddo.id =
|
||||||
|
'did:op:' + SHA256(web3.utils.toChecksumAddress(nftAddress) + chain.toString(10))
|
||||||
|
providerResponse = await ProviderInstance.encrypt(ddo, chain, providerUrl)
|
||||||
|
const encryptedResponse = await providerResponse
|
||||||
|
const validateResult = await aquariusInstance.validate(ddo)
|
||||||
|
await nft.setMetadata(
|
||||||
|
nftAddress,
|
||||||
|
owner,
|
||||||
|
0,
|
||||||
|
providerUrl,
|
||||||
|
'',
|
||||||
|
'0x2',
|
||||||
|
encryptedResponse,
|
||||||
|
validateResult.hash
|
||||||
|
)
|
||||||
|
return ddo.id
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateAssetMetadata(
|
||||||
|
owner: string,
|
||||||
|
updatedDdo: DDO,
|
||||||
|
providerUrl: string,
|
||||||
|
aquariusInstance: Aquarius
|
||||||
|
) {
|
||||||
|
const nft = new Nft(web3)
|
||||||
|
const providerResponse = await ProviderInstance.encrypt(
|
||||||
|
updatedDdo,
|
||||||
|
updatedDdo.chainId,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
const encryptedResponse = await providerResponse
|
||||||
|
const validateResult = await aquariusInstance.validate(updatedDdo)
|
||||||
|
const updateDdoTX = await nft.setMetadata(
|
||||||
|
updatedDdo.nftAddress,
|
||||||
|
owner,
|
||||||
|
0,
|
||||||
|
providerUrl,
|
||||||
|
'',
|
||||||
|
'0x2',
|
||||||
|
encryptedResponse,
|
||||||
|
validateResult.hash
|
||||||
|
)
|
||||||
|
return updateDdoTX
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function handleComputeOrder(
|
||||||
|
order: ProviderComputeInitialize,
|
||||||
|
datatokenAddress: string,
|
||||||
|
payerAccount: string,
|
||||||
|
consumerAccount: string,
|
||||||
|
serviceIndex: number,
|
||||||
|
datatoken: Datatoken,
|
||||||
|
config: Config,
|
||||||
|
consumeMarkerFee?: ConsumeMarketFee
|
||||||
|
) {
|
||||||
|
/* We do have 3 possible situations:
|
||||||
|
- have validOrder and no providerFees -> then order is valid, providerFees are valid, just use it in startCompute
|
||||||
|
- have validOrder and providerFees -> then order is valid but providerFees are not valid, we need to call reuseOrder and pay only providerFees
|
||||||
|
- no validOrder -> we need to call startOrder, to pay 1 DT & providerFees
|
||||||
|
*/
|
||||||
|
if (order.providerFee && order.providerFee.providerFeeAmount) {
|
||||||
|
await approveWei(
|
||||||
|
web3,
|
||||||
|
config,
|
||||||
|
payerAccount,
|
||||||
|
order.providerFee.providerFeeToken,
|
||||||
|
datatokenAddress,
|
||||||
|
order.providerFee.providerFeeAmount
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (order.validOrder) {
|
||||||
|
if (!order.providerFee) return order.validOrder
|
||||||
|
const tx = await datatoken.reuseOrder(
|
||||||
|
datatokenAddress,
|
||||||
|
payerAccount,
|
||||||
|
order.validOrder,
|
||||||
|
order.providerFee
|
||||||
|
)
|
||||||
|
return tx.transactionHash
|
||||||
|
}
|
||||||
|
const tx = await datatoken.startOrder(
|
||||||
|
datatokenAddress,
|
||||||
|
payerAccount,
|
||||||
|
consumerAccount,
|
||||||
|
serviceIndex,
|
||||||
|
order.providerFee,
|
||||||
|
consumeMarkerFee
|
||||||
|
)
|
||||||
|
return tx.transactionHash
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function orderAsset(
|
||||||
|
did: string,
|
||||||
|
datatokenAddress: string,
|
||||||
|
consumerAccount: string,
|
||||||
|
serviceId: string,
|
||||||
|
serviceIndex: number,
|
||||||
|
datatoken: Datatoken,
|
||||||
|
providerUrl: string
|
||||||
|
) {
|
||||||
|
const initializeData = await ProviderInstance.initialize(
|
||||||
|
did, // resolvedDdoAfterUpdate.id,
|
||||||
|
serviceId, // resolvedDdoAfterUpdate.services[0].id,
|
||||||
|
serviceIndex,
|
||||||
|
consumerAccount,
|
||||||
|
providerUrl
|
||||||
|
)
|
||||||
|
|
||||||
|
console.log(`initializeData fees for did:${did} == ${initializeData.providerFee}`)
|
||||||
|
|
||||||
|
const providerFees: ProviderFees = {
|
||||||
|
providerFeeAddress: initializeData.providerFee.providerFeeAddress,
|
||||||
|
providerFeeToken: initializeData.providerFee.providerFeeToken,
|
||||||
|
providerFeeAmount: initializeData.providerFee.providerFeeAmount,
|
||||||
|
v: initializeData.providerFee.v,
|
||||||
|
r: initializeData.providerFee.r,
|
||||||
|
s: initializeData.providerFee.s,
|
||||||
|
providerData: initializeData.providerFee.providerData,
|
||||||
|
validUntil: initializeData.providerFee.validUntil
|
||||||
|
}
|
||||||
|
|
||||||
|
// make the payment
|
||||||
|
const orderTx = await datatoken.startOrder(
|
||||||
|
datatokenAddress, // resolvedDdoAfterUpdate.services[0].datatokenAddress,
|
||||||
|
consumerAccount,
|
||||||
|
consumerAccount,
|
||||||
|
0,
|
||||||
|
providerFees
|
||||||
|
)
|
||||||
|
return orderTx
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user