mirror of
https://github.com/oceanprotocol/ocean.js.git
synced 2024-11-26 20:39:05 +01:00
update to new schema & compute fixes (#1510)
* update to new schema * fix codeExamples * fix computeflows
This commit is contained in:
parent
04735dd824
commit
be06f05365
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@ -52,7 +52,6 @@ jobs:
|
||||
with:
|
||||
repository: 'oceanprotocol/barge'
|
||||
path: 'barge'
|
||||
ref: v4
|
||||
|
||||
- name: Run Ganache with Barge
|
||||
working-directory: ${{ github.workspace }}/barge
|
||||
@ -101,7 +100,6 @@ jobs:
|
||||
with:
|
||||
repository: 'oceanprotocol/barge'
|
||||
path: 'barge'
|
||||
ref: v4
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }}
|
||||
@ -115,6 +113,7 @@ jobs:
|
||||
working-directory: ${{ github.workspace }}/barge
|
||||
run: |
|
||||
bash -x start_ocean.sh --with-provider2 --no-dashboard --with-c2d 2>&1 > start_ocean.log &
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run build:metadata
|
||||
|
||||
|
@ -98,6 +98,7 @@ import {
|
||||
DispenserCreationParams,
|
||||
downloadFile,
|
||||
Erc20CreateParams,
|
||||
Files,
|
||||
FixedRateExchange,
|
||||
FreCreationParams,
|
||||
getHash,
|
||||
@ -153,13 +154,17 @@ We also define some constants that we will use:
|
||||
|
||||
We will need a file to publish, so here we define the file that we intend to publish.
|
||||
```Typescript
|
||||
const ASSET_URL = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
const ASSET_URL: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Next, we define the metadata that will describe our data asset. This is what we call the DDO
|
||||
@ -167,7 +172,7 @@ Next, we define the metadata that will describe our data asset. This is what we
|
||||
const DDO = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: '',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -324,6 +329,8 @@ Now we update the ddo and set the right did
|
||||
```
|
||||
Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task
|
||||
```Typescript
|
||||
ASSET_URL.datatokenAddress = poolDatatokenAddress
|
||||
ASSET_URL.nftAddress = poolNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = poolDatatokenAddress
|
||||
@ -587,6 +594,8 @@ Now we are going to update the ddo and set the did
|
||||
```
|
||||
Next, let's encrypt the file(s) using provider
|
||||
```Typescript
|
||||
ASSET_URL.datatokenAddress = freDatatokenAddress
|
||||
ASSET_URL.nftAddress = freNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = freDatatokenAddress
|
||||
@ -801,6 +810,8 @@ Lets start by updating the ddo and setting the did
|
||||
```
|
||||
Now we need to encrypt file(s) using provider
|
||||
```Typescript
|
||||
ASSET_URL.datatokenAddress = dispenserDatatokenAddress
|
||||
ASSET_URL.nftAddress = dispenserNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = dispenserDatatokenAddress
|
||||
|
27
src/@types/File.ts
Normal file
27
src/@types/File.ts
Normal file
@ -0,0 +1,27 @@
|
||||
export interface UrlFile {
|
||||
type: 'url'
|
||||
|
||||
/**
|
||||
* File index.
|
||||
* @type {number}
|
||||
*/
|
||||
index?: number
|
||||
|
||||
/**
|
||||
* File URL.
|
||||
* @type {string}
|
||||
*/
|
||||
url: string
|
||||
|
||||
/**
|
||||
* HTTP method used
|
||||
* @type {string}
|
||||
*/
|
||||
method: string
|
||||
}
|
||||
|
||||
export interface Files {
|
||||
nftAddress: string
|
||||
datatokenAddress: string
|
||||
files: UrlFile[]
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
export interface FileMetadata {
|
||||
export interface FileInfo {
|
||||
/**
|
||||
* File URL.
|
||||
* @type {string}
|
@ -3,7 +3,8 @@ export * from './Asset'
|
||||
export * from './DDO/Service'
|
||||
export * from './DDO/Credentials'
|
||||
export * from './DDO/Metadata'
|
||||
export * from './FileMetadata'
|
||||
export * from './File'
|
||||
export * from './FileInfo'
|
||||
export * from './Compute'
|
||||
export * from './Provider'
|
||||
export * from './FixedPrice'
|
||||
|
@ -1,7 +1,7 @@
|
||||
import Web3 from 'web3'
|
||||
import { LoggerInstance, getData } from '../utils'
|
||||
import {
|
||||
FileMetadata,
|
||||
FileInfo,
|
||||
ComputeJob,
|
||||
ComputeOutput,
|
||||
ComputeAlgorithm,
|
||||
@ -164,21 +164,21 @@ export class Provider {
|
||||
* @param {number} serviceId the id of the service for which to check the files
|
||||
* @param {string} providerUri uri of the provider that will be used to check the file
|
||||
* @param {AbortSignal} signal abort signal
|
||||
* @return {Promise<FileMetadata[]>} urlDetails
|
||||
* @return {Promise<FileInfo[]>} urlDetails
|
||||
*/
|
||||
public async checkDidFiles(
|
||||
did: string,
|
||||
serviceId: number,
|
||||
providerUri: string,
|
||||
signal?: AbortSignal
|
||||
): Promise<FileMetadata[]> {
|
||||
): Promise<FileInfo[]> {
|
||||
const providerEndpoints = await this.getEndpoints(providerUri)
|
||||
const serviceEndpoints = await this.getServiceEndpoints(
|
||||
providerUri,
|
||||
providerEndpoints
|
||||
)
|
||||
const args = { did: did, serviceId: serviceId }
|
||||
const files: FileMetadata[] = []
|
||||
const files: FileInfo[] = []
|
||||
const path = this.getEndpointURL(serviceEndpoints, 'fileinfo')
|
||||
? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath
|
||||
: null
|
||||
@ -192,7 +192,7 @@ export class Provider {
|
||||
},
|
||||
signal: signal
|
||||
})
|
||||
const results: FileMetadata[] = await response.json()
|
||||
const results: FileInfo[] = await response.json()
|
||||
for (const result of results) {
|
||||
files.push(result)
|
||||
}
|
||||
@ -206,20 +206,20 @@ export class Provider {
|
||||
* @param {string} url or did
|
||||
* @param {string} providerUri uri of the provider that will be used to check the file
|
||||
* @param {AbortSignal} signal abort signal
|
||||
* @return {Promise<FileMetadata[]>} urlDetails
|
||||
* @return {Promise<FileInfo[]>} urlDetails
|
||||
*/
|
||||
public async checkFileUrl(
|
||||
url: string,
|
||||
providerUri: string,
|
||||
signal?: AbortSignal
|
||||
): Promise<FileMetadata[]> {
|
||||
): Promise<FileInfo[]> {
|
||||
const providerEndpoints = await this.getEndpoints(providerUri)
|
||||
const serviceEndpoints = await this.getServiceEndpoints(
|
||||
providerUri,
|
||||
providerEndpoints
|
||||
)
|
||||
const args = { url: url, type: 'url' }
|
||||
const files: FileMetadata[] = []
|
||||
const files: FileInfo[] = []
|
||||
const path = this.getEndpointURL(serviceEndpoints, 'fileinfo')
|
||||
? this.getEndpointURL(serviceEndpoints, 'fileinfo').urlPath
|
||||
: null
|
||||
@ -233,7 +233,7 @@ export class Provider {
|
||||
},
|
||||
signal: signal
|
||||
})
|
||||
const results: FileMetadata[] = await response.json()
|
||||
const results: FileInfo[] = await response.json()
|
||||
for (const result of results) {
|
||||
files.push(result)
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import { minAbi } from './minAbi'
|
||||
import LoggerInstance from './Logger'
|
||||
import { TransactionReceipt } from 'web3-core'
|
||||
import Web3 from 'web3'
|
||||
import BigNumber from 'bignumber.js'
|
||||
|
||||
/**
|
||||
* Estimate gas cost for approval function
|
||||
@ -82,6 +83,50 @@ export async function approve(
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Approve spender to spent amount tokens
|
||||
* @param {String} account
|
||||
* @param {String} tokenAddress
|
||||
* @param {String} spender
|
||||
* @param {String} amount amount of ERC20 tokens (always expressed as wei)
|
||||
* @param {boolean} force if true, will overwrite any previous allowence. Else, will check if allowence is enough and will not send a transaction if it's not needed
|
||||
*/
|
||||
export async function approveWei(
|
||||
web3: Web3,
|
||||
account: string,
|
||||
tokenAddress: string,
|
||||
spender: string,
|
||||
amount: string,
|
||||
force = false
|
||||
): Promise<TransactionReceipt | string> {
|
||||
const tokenContract = new web3.eth.Contract(minAbi, tokenAddress)
|
||||
if (!force) {
|
||||
const currentAllowence = await allowanceWei(web3, tokenAddress, account, spender)
|
||||
if (new BigNumber(currentAllowence).gt(new BigNumber(amount))) {
|
||||
return currentAllowence
|
||||
}
|
||||
}
|
||||
let result = null
|
||||
const estGas = await estimateGas(
|
||||
account,
|
||||
tokenContract.methods.approve,
|
||||
spender,
|
||||
amount
|
||||
)
|
||||
|
||||
try {
|
||||
result = await tokenContract.methods.approve(spender, amount).send({
|
||||
from: account,
|
||||
gas: estGas + 1,
|
||||
gasPrice: await getFairGasPrice(web3, null)
|
||||
})
|
||||
} catch (e) {
|
||||
LoggerInstance.error(
|
||||
`ERROR: Failed to approve spender to spend tokens : ${e.message}`
|
||||
)
|
||||
}
|
||||
return result
|
||||
}
|
||||
/**
|
||||
* Estimate gas cost for transfer function
|
||||
* @param {String} account
|
||||
@ -164,6 +209,24 @@ export async function allowance(
|
||||
return await unitsToAmount(web3, tokenAddress, trxReceipt, tokenDecimals)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Allowance for any erc20
|
||||
* @param {Web3} web3
|
||||
* @param {String } tokenAdress
|
||||
* @param {String} account
|
||||
* @param {String} spender
|
||||
*/
|
||||
export async function allowanceWei(
|
||||
web3: Web3,
|
||||
tokenAddress: string,
|
||||
account: string,
|
||||
spender: string,
|
||||
tokenDecimals?: number
|
||||
): Promise<string> {
|
||||
const tokenContract = new web3.eth.Contract(minAbi, tokenAddress)
|
||||
return await tokenContract.methods.allowance(account, spender).call()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get balance for any erc20
|
||||
* @param {Web3} web3
|
||||
|
@ -98,6 +98,7 @@ import {
|
||||
DispenserCreationParams,
|
||||
downloadFile,
|
||||
Erc20CreateParams,
|
||||
Files,
|
||||
FixedRateExchange,
|
||||
FreCreationParams,
|
||||
getHash,
|
||||
@ -153,13 +154,17 @@ describe('Marketplace flow tests', async () => {
|
||||
|
||||
/// We will need a file to publish, so here we define the file that we intend to publish.
|
||||
/// ```Typescript
|
||||
const ASSET_URL = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
const ASSET_URL: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
/// ```
|
||||
|
||||
/// Next, we define the metadata that will describe our data asset. This is what we call the DDO
|
||||
@ -167,7 +172,7 @@ describe('Marketplace flow tests', async () => {
|
||||
const DDO = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: '',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -324,6 +329,8 @@ describe('Marketplace flow tests', async () => {
|
||||
/// ```
|
||||
/// Next we encrypt the file or files using Ocean Provider. The provider is an off chain proxy built specifically for this task
|
||||
/// ```Typescript
|
||||
ASSET_URL.datatokenAddress = poolDatatokenAddress
|
||||
ASSET_URL.nftAddress = poolNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = poolDatatokenAddress
|
||||
@ -587,6 +594,8 @@ describe('Marketplace flow tests', async () => {
|
||||
/// ```
|
||||
/// Next, let's encrypt the file(s) using provider
|
||||
/// ```Typescript
|
||||
ASSET_URL.datatokenAddress = freDatatokenAddress
|
||||
ASSET_URL.nftAddress = freNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = freDatatokenAddress
|
||||
@ -801,6 +810,8 @@ describe('Marketplace flow tests', async () => {
|
||||
/// ```
|
||||
/// Now we need to encrypt file(s) using provider
|
||||
/// ```Typescript
|
||||
ASSET_URL.datatokenAddress = dispenserDatatokenAddress
|
||||
ASSET_URL.nftAddress = dispenserNftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(ASSET_URL, providerUrl)
|
||||
DDO.services[0].files = await encryptedFiles
|
||||
DDO.services[0].datatokenAddress = dispenserDatatokenAddress
|
||||
|
@ -10,7 +10,8 @@ import {
|
||||
Datatoken,
|
||||
Nft,
|
||||
sleep,
|
||||
ZERO_ADDRESS
|
||||
ZERO_ADDRESS,
|
||||
approveWei
|
||||
} from '../../src'
|
||||
import {
|
||||
Erc20CreateParams,
|
||||
@ -18,7 +19,8 @@ import {
|
||||
ComputeAsset,
|
||||
ComputeAlgorithm,
|
||||
ProviderComputeInitialize,
|
||||
ConsumeMarketFee
|
||||
ConsumeMarketFee,
|
||||
Files
|
||||
} from '../../src/@types'
|
||||
|
||||
let config: Config
|
||||
@ -42,17 +44,32 @@ let resolvedDdoWithNoTimeout
|
||||
let resolvedAlgoDdoWith1mTimeout
|
||||
let resolvedAlgoDdoWithNoTimeout
|
||||
|
||||
const assetUrl = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
let freeEnvDatasetTxId
|
||||
let freeEnvAlgoTxId
|
||||
let paidEnvDatasetTxId
|
||||
let paidEnvAlgoTxId
|
||||
|
||||
// let's have 2 minutes of compute access
|
||||
const mytime = new Date()
|
||||
const computeMinutes = 1
|
||||
mytime.setMinutes(mytime.getMinutes() + computeMinutes)
|
||||
let computeValidUntil = Math.floor(mytime.getTime() / 1000)
|
||||
|
||||
const assetUrl: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
const ddoWithNoTimeout = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -89,7 +106,7 @@ const ddoWithNoTimeout = {
|
||||
const ddoWith1mTimeout = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -122,17 +139,21 @@ const ddoWith1mTimeout = {
|
||||
}
|
||||
]
|
||||
}
|
||||
const algoAssetUrl = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
const algoAssetUrl: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/test-algorithm/master/javascript/algo.js',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
const algoDdoWithNoTimeout = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -173,7 +194,7 @@ const algoDdoWithNoTimeout = {
|
||||
const algoDdoWith1mTimeout = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: 'did:op:efba17455c127a885ec7830d687a8f6e64f5ba559f8506f8723c1f10f05c049c',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -246,13 +267,15 @@ async function createAsset(
|
||||
|
||||
const erc721AddressAsset = result.events.NFTCreated.returnValues[0]
|
||||
const datatokenAddressAsset = result.events.TokenCreated.returnValues[0]
|
||||
ddo.nftAddress = web3.utils.toChecksumAddress(erc721AddressAsset)
|
||||
// create the files encrypted string
|
||||
assetUrl.datatokenAddress = datatokenAddressAsset
|
||||
assetUrl.nftAddress = ddo.nftAddress
|
||||
let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl)
|
||||
ddo.services[0].files = await providerResponse
|
||||
ddo.services[0].datatokenAddress = datatokenAddressAsset
|
||||
ddo.services[0].serviceEndpoint = providerUrl
|
||||
// update ddo and set the right did
|
||||
ddo.nftAddress = web3.utils.toChecksumAddress(erc721AddressAsset)
|
||||
ddo.id =
|
||||
'did:op:' +
|
||||
SHA256(web3.utils.toChecksumAddress(erc721AddressAsset) + chain.toString(10))
|
||||
@ -287,11 +310,12 @@ async function handleOrder(
|
||||
- no validOrder -> we need to call startOrder, to pay 1 DT & providerFees
|
||||
*/
|
||||
if (order.providerFee && order.providerFee.providerFeeAmount) {
|
||||
await datatoken.approve(
|
||||
await approveWei(
|
||||
web3,
|
||||
payerAccount,
|
||||
order.providerFee.providerFeeToken,
|
||||
datatokenAddress,
|
||||
order.providerFee.providerFeeAmount,
|
||||
payerAccount
|
||||
order.providerFee.providerFeeAmount
|
||||
)
|
||||
}
|
||||
if (order.validOrder) {
|
||||
@ -408,14 +432,11 @@ describe('Simple compute tests', async () => {
|
||||
assert(computeEnvs, 'No Compute environments found')
|
||||
})
|
||||
|
||||
it('should start a computeJob', async () => {
|
||||
// we choose the first env
|
||||
const computeEnv = computeEnvs[0].id
|
||||
const computeConsumerAddress = computeEnvs[0].consumerAddress
|
||||
// let's have 10 minutesof compute access
|
||||
const mytime = new Date()
|
||||
mytime.setMinutes(mytime.getMinutes() + 19)
|
||||
const computeValidUntil = Math.floor(mytime.getTime() / 1000)
|
||||
it('should start a computeJob using the free environment', async () => {
|
||||
// we choose the free env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
|
||||
assert(computeEnv, 'Cannot find the free compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
@ -431,7 +452,7 @@ describe('Simple compute tests', async () => {
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
@ -444,7 +465,7 @@ describe('Simple compute tests', async () => {
|
||||
providerInitializeComputeResults.algorithm,
|
||||
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
|
||||
consumerAccount,
|
||||
computeConsumerAddress,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||
@ -452,7 +473,7 @@ describe('Simple compute tests', async () => {
|
||||
providerInitializeComputeResults.datasets[i],
|
||||
dtAddressArray[i],
|
||||
consumerAccount,
|
||||
computeConsumerAddress,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
}
|
||||
@ -460,10 +481,365 @@ describe('Simple compute tests', async () => {
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
freeEnvDatasetTxId = assets[0].transferTxId
|
||||
freeEnvAlgoTxId = algo.transferTxId
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
|
||||
it('should restart a computeJob without paying anything, because order is valid and providerFees are still valid', async () => {
|
||||
// we choose the free env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
|
||||
assert(computeEnv, 'Cannot find the free compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
serviceId: resolvedDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: freeEnvDatasetTxId
|
||||
}
|
||||
]
|
||||
const algo: ComputeAlgorithm = {
|
||||
documentId: resolvedAlgoDdoWith1mTimeout.id,
|
||||
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: freeEnvAlgoTxId
|
||||
}
|
||||
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.validOrder,
|
||||
'We should have a valid order for algorithm'
|
||||
)
|
||||
assert(
|
||||
!providerInitializeComputeResults.algorithm.providerFee,
|
||||
'We should not pay providerFees again for algorithm'
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.datasets[0].validOrder,
|
||||
'We should have a valid order for dataset'
|
||||
)
|
||||
assert(
|
||||
!providerInitializeComputeResults.datasets[0].providerFee,
|
||||
'We should not pay providerFees again for dataset'
|
||||
)
|
||||
algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder
|
||||
assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder
|
||||
assert(
|
||||
algo.transferTxId === freeEnvAlgoTxId &&
|
||||
assets[0].transferTxId === freeEnvDatasetTxId,
|
||||
'We should use the same orders, because no fess must be paid'
|
||||
)
|
||||
const computeJobs = await ProviderInstance.computeStart(
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
|
||||
// moving to paid environments
|
||||
|
||||
it('should start a computeJob on a paid environment', async () => {
|
||||
// we choose the paid env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
|
||||
assert(computeEnv, 'Cannot find the paid compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
serviceId: resolvedDdoWith1mTimeout.services[0].id
|
||||
}
|
||||
]
|
||||
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
|
||||
const algo: ComputeAlgorithm = {
|
||||
documentId: resolvedAlgoDdoWith1mTimeout.id,
|
||||
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id
|
||||
}
|
||||
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
)
|
||||
assert(
|
||||
!('error' in providerInitializeComputeResults.algorithm),
|
||||
'Cannot order algorithm'
|
||||
)
|
||||
algo.transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.algorithm,
|
||||
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||
assets[i].transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.datasets[i],
|
||||
dtAddressArray[i],
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
}
|
||||
|
||||
const computeJobs = await ProviderInstance.computeStart(
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
paidEnvDatasetTxId = assets[0].transferTxId
|
||||
paidEnvAlgoTxId = algo.transferTxId
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
|
||||
it('should restart a computeJob on paid environment, without paying anything, because order is valid and providerFees are still valid', async () => {
|
||||
// we choose the paid env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
|
||||
assert(computeEnv, 'Cannot find the free compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
serviceId: resolvedDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: paidEnvDatasetTxId
|
||||
}
|
||||
]
|
||||
const algo: ComputeAlgorithm = {
|
||||
documentId: resolvedAlgoDdoWith1mTimeout.id,
|
||||
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: paidEnvAlgoTxId
|
||||
}
|
||||
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.validOrder,
|
||||
'We should have a valid order for algorithm'
|
||||
)
|
||||
assert(
|
||||
!providerInitializeComputeResults.algorithm.providerFee,
|
||||
'We should not pay providerFees again for algorithm'
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.datasets[0].validOrder,
|
||||
'We should have a valid order for dataset'
|
||||
)
|
||||
assert(
|
||||
!providerInitializeComputeResults.datasets[0].providerFee,
|
||||
'We should not pay providerFees again for dataset'
|
||||
)
|
||||
algo.transferTxId = providerInitializeComputeResults.algorithm.validOrder
|
||||
assets[0].transferTxId = providerInitializeComputeResults.datasets[0].validOrder
|
||||
assert(
|
||||
algo.transferTxId === paidEnvAlgoTxId &&
|
||||
assets[0].transferTxId === paidEnvDatasetTxId,
|
||||
'We should use the same orders, because no fess must be paid'
|
||||
)
|
||||
const computeJobs = await ProviderInstance.computeStart(
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
|
||||
// move to reuse Orders
|
||||
it('Should fast forward time and set a new computeValidUntil', async () => {
|
||||
const mytime = new Date()
|
||||
const computeMinutes = 5
|
||||
mytime.setMinutes(mytime.getMinutes() + computeMinutes)
|
||||
computeValidUntil = Math.floor(mytime.getTime() / 1000)
|
||||
})
|
||||
|
||||
it('should start a computeJob using the free environment, by paying only providerFee (reuseOrder)', async () => {
|
||||
// we choose the free env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin === 0)
|
||||
assert(computeEnv, 'Cannot find the free compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
serviceId: resolvedDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: freeEnvDatasetTxId
|
||||
}
|
||||
]
|
||||
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
|
||||
const algo: ComputeAlgorithm = {
|
||||
documentId: resolvedAlgoDdoWith1mTimeout.id,
|
||||
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: freeEnvAlgoTxId
|
||||
}
|
||||
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.validOrder,
|
||||
'We should have a valid order for algorithm'
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.datasets[0].validOrder,
|
||||
'We should have a valid order for dataset'
|
||||
)
|
||||
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.providerFee ||
|
||||
providerInitializeComputeResults.datasets[0].providerFee,
|
||||
'We should pay providerFees again for algorithm or dataset. Cannot have empty for both'
|
||||
)
|
||||
|
||||
assert(
|
||||
!('error' in providerInitializeComputeResults.algorithm),
|
||||
'Cannot order algorithm'
|
||||
)
|
||||
algo.transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.algorithm,
|
||||
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||
assets[i].transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.datasets[i],
|
||||
dtAddressArray[i],
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
}
|
||||
assert(
|
||||
algo.transferTxId !== freeEnvAlgoTxId ||
|
||||
assets[0].transferTxId !== freeEnvDatasetTxId,
|
||||
'We should not use the same orders, because providerFee must be paid'
|
||||
)
|
||||
const computeJobs = await ProviderInstance.computeStart(
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
freeEnvDatasetTxId = assets[0].transferTxId
|
||||
freeEnvAlgoTxId = algo.transferTxId
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
|
||||
it('should start a computeJob using the paid environment, by paying only providerFee (reuseOrder)', async () => {
|
||||
// we choose the paid env
|
||||
const computeEnv = computeEnvs.find((ce) => ce.priceMin !== 0)
|
||||
assert(computeEnv, 'Cannot find the free compute env')
|
||||
|
||||
const assets: ComputeAsset[] = [
|
||||
{
|
||||
documentId: resolvedDdoWith1mTimeout.id,
|
||||
serviceId: resolvedDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: paidEnvDatasetTxId
|
||||
}
|
||||
]
|
||||
const dtAddressArray = [resolvedDdoWith1mTimeout.services[0].datatokenAddress]
|
||||
const algo: ComputeAlgorithm = {
|
||||
documentId: resolvedAlgoDdoWith1mTimeout.id,
|
||||
serviceId: resolvedAlgoDdoWith1mTimeout.services[0].id,
|
||||
transferTxId: paidEnvAlgoTxId
|
||||
}
|
||||
|
||||
providerInitializeComputeResults = await ProviderInstance.initializeCompute(
|
||||
assets,
|
||||
algo,
|
||||
computeEnv.id,
|
||||
computeValidUntil,
|
||||
providerUrl,
|
||||
consumerAccount
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.validOrder,
|
||||
'We should have a valid order for algorithm'
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.datasets[0].validOrder,
|
||||
'We should have a valid order for dataset'
|
||||
)
|
||||
assert(
|
||||
providerInitializeComputeResults.algorithm.providerFee ||
|
||||
providerInitializeComputeResults.datasets[0].providerFee,
|
||||
'We should pay providerFees again for algorithm or dataset. Cannot have empty for both'
|
||||
)
|
||||
|
||||
assert(
|
||||
!('error' in providerInitializeComputeResults.algorithm),
|
||||
'Cannot order algorithm'
|
||||
)
|
||||
algo.transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.algorithm,
|
||||
resolvedAlgoDdoWith1mTimeout.services[0].datatokenAddress,
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
for (let i = 0; i < providerInitializeComputeResults.datasets.length; i++) {
|
||||
assets[i].transferTxId = await handleOrder(
|
||||
providerInitializeComputeResults.datasets[i],
|
||||
dtAddressArray[i],
|
||||
consumerAccount,
|
||||
computeEnv.consumerAddress,
|
||||
0
|
||||
)
|
||||
}
|
||||
assert(
|
||||
algo.transferTxId !== paidEnvAlgoTxId ||
|
||||
assets[0].transferTxId !== paidEnvDatasetTxId,
|
||||
'We should not use the same orders, because providerFee must be paid'
|
||||
)
|
||||
const computeJobs = await ProviderInstance.computeStart(
|
||||
providerUrl,
|
||||
web3,
|
||||
consumerAccount,
|
||||
computeEnv.id,
|
||||
assets[0],
|
||||
algo
|
||||
)
|
||||
freeEnvDatasetTxId = assets[0].transferTxId
|
||||
freeEnvAlgoTxId = algo.transferTxId
|
||||
assert(computeJobs, 'Cannot start compute job')
|
||||
computeJobId = computeJobs[0].jobId
|
||||
})
|
||||
@ -479,7 +855,7 @@ describe('Simple compute tests', async () => {
|
||||
})
|
||||
|
||||
it('Get download compute results url', async () => {
|
||||
sleep(10000)
|
||||
await sleep(10000)
|
||||
const downloadURL = await ProviderInstance.getComputeResultUrl(
|
||||
providerUrl,
|
||||
web3,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { assert } from 'chai'
|
||||
import { web3, getTestConfig } from '../config'
|
||||
import { Config, Provider } from '../../src'
|
||||
import { FileMetadata } from '../../src/@types'
|
||||
import { FileInfo } from '../../src/@types'
|
||||
|
||||
describe('Provider tests', async () => {
|
||||
let config: Config
|
||||
@ -26,7 +26,7 @@ describe('Provider tests', async () => {
|
||||
})
|
||||
|
||||
it('Alice checks fileinfo', async () => {
|
||||
const fileinfo: FileMetadata[] = await providerInstance.checkFileUrl(
|
||||
const fileinfo: FileInfo[] = await providerInstance.checkFileUrl(
|
||||
'https://dumps.wikimedia.org/enwiki/latest/enwiki-latest-abstract.xml.gz-rss.xml',
|
||||
config.providerUri
|
||||
)
|
||||
|
@ -18,7 +18,8 @@ import {
|
||||
Erc20CreateParams,
|
||||
PoolCreationParams,
|
||||
FreCreationParams,
|
||||
DispenserCreationParams
|
||||
DispenserCreationParams,
|
||||
Files
|
||||
} from '../../src/@types'
|
||||
|
||||
describe('Publish tests', async () => {
|
||||
@ -30,18 +31,22 @@ describe('Publish tests', async () => {
|
||||
let factory: NftFactory
|
||||
let publisherAccount: string
|
||||
|
||||
const assetUrl = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
const assetUrl: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const genericAsset: DDO = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: '',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -142,7 +147,8 @@ describe('Publish tests', async () => {
|
||||
|
||||
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
|
||||
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
|
||||
|
||||
assetUrl.datatokenAddress = datatokenAddress
|
||||
assetUrl.nftAddress = nftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
|
||||
|
||||
poolDdo.metadata.name = 'test-dataset-pool'
|
||||
@ -223,7 +229,8 @@ describe('Publish tests', async () => {
|
||||
|
||||
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
|
||||
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
|
||||
|
||||
assetUrl.datatokenAddress = datatokenAddress
|
||||
assetUrl.nftAddress = nftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
|
||||
|
||||
fixedPriceDdo.metadata.name = 'test-dataset-fixedPrice'
|
||||
@ -297,7 +304,8 @@ describe('Publish tests', async () => {
|
||||
|
||||
const nftAddress = bundleNFT.events.NFTCreated.returnValues[0]
|
||||
const datatokenAddress = bundleNFT.events.TokenCreated.returnValues[0]
|
||||
|
||||
assetUrl.datatokenAddress = datatokenAddress
|
||||
assetUrl.nftAddress = nftAddress
|
||||
const encryptedFiles = await ProviderInstance.encrypt(assetUrl, providerUrl)
|
||||
dispenserDdo.metadata.name = 'test-dataset-dispenser'
|
||||
dispenserDdo.services[0].files = await encryptedFiles
|
||||
|
@ -13,7 +13,7 @@ import {
|
||||
downloadFile,
|
||||
ZERO_ADDRESS
|
||||
} from '../../src'
|
||||
import { ProviderFees, Erc20CreateParams, DDO } from '../../src/@types'
|
||||
import { ProviderFees, Erc20CreateParams, DDO, Files } from '../../src/@types'
|
||||
|
||||
describe('Simple Publish & consume test', async () => {
|
||||
let config: Config
|
||||
@ -23,18 +23,22 @@ describe('Simple Publish & consume test', async () => {
|
||||
let publisherAccount: string
|
||||
let consumerAccount: string
|
||||
|
||||
const assetUrl = [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
const assetUrl: Files = {
|
||||
datatokenAddress: '0x0',
|
||||
nftAddress: '0x0',
|
||||
files: [
|
||||
{
|
||||
type: 'url',
|
||||
url: 'https://raw.githubusercontent.com/oceanprotocol/testdatasets/main/shs_dataset_test.txt',
|
||||
method: 'GET'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const ddo: DDO = {
|
||||
'@context': ['https://w3id.org/did/v1'],
|
||||
id: '',
|
||||
version: '4.0.0',
|
||||
version: '4.1.0',
|
||||
chainId: 4,
|
||||
nftAddress: '0x0',
|
||||
metadata: {
|
||||
@ -101,6 +105,8 @@ describe('Simple Publish & consume test', async () => {
|
||||
const datatokenAddress = tx.events.TokenCreated.returnValues[0]
|
||||
|
||||
// create the files encrypted string
|
||||
assetUrl.datatokenAddress = datatokenAddress
|
||||
assetUrl.nftAddress = erc721Address
|
||||
let providerResponse = await ProviderInstance.encrypt(assetUrl, providerUrl)
|
||||
ddo.services[0].files = await providerResponse
|
||||
ddo.services[0].datatokenAddress = datatokenAddress
|
||||
|
Loading…
x
Reference in New Issue
Block a user