1
0
mirror of https://github.com/oceanprotocol/market.git synced 2024-12-02 05:57:29 +01:00

fix barge connection (#1729)

* adding network 8896 (barge)

* Update ocean.ts

* development network with env variable

* temp patch for provider  url check

* removed logs

* fix typing error

* set local provider url to asset metadata

* clean development config

* wip make use of barge addresses

* update env vars from script

* more fixes

* cleanup

* update readme

* more readme updates

* cleanup fixes

* more fixes

* script readme updates

* update readme

* update readme

* bump oceanjs

* fix tests after oceanjs upgrade

* adding custom provider for mac barge

* fix test app.config path

* remove log

* added NEXT_PUBLIC_PROVIDER_URL to load dev env

* added env variable for mac on load dev env

* fre fixes

* review suggestions

* Update README.md

Co-authored-by: Jamie Hewitt <jamie@oceanprotocol.com>

* add private key example

* bump oceanlib

* fix build

* fix provider uri for mac

* add custom rpc env var example

* fix build

* update barge env vars script

* remove brage from supported and default chainIds by default

* remove log

---------

Co-authored-by: Bogdan Fazakas <bogdan.fazakas@gmail.com>
Co-authored-by: Jamie Hewitt <jamie@oceanprotocol.com>
This commit is contained in:
EnzoVezzaro 2023-07-17 16:42:32 -04:00 committed by GitHub
parent 2e69739778
commit 99090ee058
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 222 additions and 64 deletions

View File

@ -16,3 +16,15 @@
# Privacy Preference Center # Privacy Preference Center
#NEXT_PUBLIC_PRIVACY_PREFERENCE_CENTER="true" #NEXT_PUBLIC_PRIVACY_PREFERENCE_CENTER="true"
# Development Preference Center
#NEXT_PUBLIC_NFT_FACTORY_ADDRESS='0xxx'
#NEXT_PUBLIC_OPF_COMMUNITY_FEE_COLECTOR='0xxx'
#NEXT_PUBLIC_FIXED_RATE_EXCHANGE_ADDRESS='0xxx'
#NEXT_PUBLIC_DISPENSER_ADDRESS='0xxx'
#NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS='0xxx'
#NEXT_PUBLIC_MARKET_DEVELOPMENT='true'
#NEXT_PUBLIC_PROVIDER_URL="http://xxx:xxx"
#NEXT_PUBLIC_SUBGRAPH_URI="http://xxx:xxx"
#NEXT_PUBLIC_METADATACACHE_URI="http://xxx:xxx"
#NEXT_PUBLIC_RPC_URI="http://xxx:xxx"

View File

@ -157,9 +157,9 @@ export const assets: AssetExtended[] = [
allocated: 45554.69921875, allocated: 45554.69921875,
orders: 1, orders: 1,
price: { price: {
value: 3231343254, tokenAddress: '0x282d8efCe846A88B159800bd4130ad77443Fa1A1',
tokenAddress: '0xCfDdA22C9837aE76E0faA845354f33C62E03653a', tokenSymbol: 'mOCEAN',
tokenSymbol: 'OCEAN' value: 100
} }
}, },
version: '4.1.0', version: '4.1.0',
@ -1113,9 +1113,7 @@ export const assets: AssetExtended[] = [
allocated: 11159.279296875, allocated: 11159.279296875,
orders: 1, orders: 1,
price: { price: {
value: 3231343254, value: 0
tokenAddress: '0xCfDdA22C9837aE76E0faA845354f33C62E03653a',
tokenSymbol: 'OCEAN'
} }
}, },
version: '4.1.0', version: '4.1.0',
@ -1198,9 +1196,7 @@ export const assets: AssetExtended[] = [
stats: { stats: {
orders: 0, orders: 0,
price: { price: {
value: 3231343254, value: 0
tokenAddress: '0xCfDdA22C9837aE76E0faA845354f33C62E03653a',
tokenSymbol: 'OCEAN'
} }
}, },
version: '4.1.0', version: '4.1.0',
@ -1448,9 +1444,7 @@ export const assets: AssetExtended[] = [
stats: { stats: {
orders: 0, orders: 0,
price: { price: {
value: 3231343254, value: 0
tokenAddress: '0xCfDdA22C9837aE76E0faA845354f33C62E03653a',
tokenSymbol: 'OCEAN'
} }
}, },
version: '4.1.0', version: '4.1.0',

View File

@ -65,34 +65,30 @@ This will start the development server under
### Local components with Barge ### Local components with Barge
If you prefer to connect to locally running components instead of remote connections, you can spin up [`barge`](https://github.com/oceanprotocol/barge) and use a local Ganache network in another terminal before running `npm start`: Using the `ocean-market` with `barge` components is recommended for advanced users, if you are new we advice you to use the `ocean-market` first with remote networks. If you prefer to connect to locally running components instead of remote connections, you can spin up [`barge`](https://github.com/oceanprotocol/barge) and use a local Ganache network in another terminal before running `npm start`. To fully test all [The Graph](https://thegraph.com) integrations, you have to start barge with the local Graph node:
```bash ```bash
git clone git@github.com:oceanprotocol/barge.git git clone git@github.com:oceanprotocol/barge.git
cd barge cd barge
# startup with local Ganache node # startup with local Ganache and Graph nodes
./start_ocean.sh ./start_ocean.sh --with-thegraph
``` ```
Barge will deploy contracts to the local Ganache node which will take some time. At the end the compiled artifacts need to be copied over to this project into `node_modules/@oceanprotocol/contracts/artifacts`. This script will do that for you: Barge will deploy contracts to the local Ganache node which will take some time. At the end the compiled artifacts need to imported over to this project as environment variables. The `set-barge-env` script will do that for you and set the env variables to use this local connection in `.env` in the app. You also need to append the `chainIdsSupported` array with the barge's ganache chainId (`8996`) in the `app.config.js` file.
If you are using `macOS` operating system you should also make same changes to the provider url since the default barge ip can not be accessed due to some network constraints on `macOs`. So we should be using the `127.0.0.1:8030` (if you have changed the provider port please use that here as well) for each direct call from the market to provider, but we should keep the internal barge url `http://172.15.0.4:8030/` (this is the default ip:port for provider in barge, if changed please use the according url). So on inside `src/@utils/provider.ts` if on `macOS` you can hardcode this env variable `NEXT_PUBLIC_PROVIDER_URL` or set
`127.0.0.1:8030` as `providerUrl` on all the methods that call `ProviderInstance` methods. (eg: `getEncryptedFiles`, `getFileDidInfo`, `downloadFile` etc). You should use the same provider url for `src/@utils/nft.ts` inside `setNFTMetadataAndTokenURI` and `setNftMetadata` and `src/components/Publish/index.tsx` inisde `encrypt` method (if you set the env variable there's no need to do this). You also need to use local ip's for the subgraph (`127.0.0.1` instead of `172.15.0.15`) and the metadatacache (`127.0.0.1` instead of `172.15.0.5`).
Once you want to switch back to using the market agains remote networks you need to comment or remove the env vars that are set by `set-barge-env` script.
```bash ```bash
./scripts/copy-contracts.sh cd market
``` npm run set-barge-env
Finally, set environment variables to use this local connection in `.env` in the app:
```bash
# modify env variables
cp .env.example .env
npm start npm start
``` ```
To use the app together with MetaMask, importing one of the accounts auto-generated by the Ganache container is the easiest way to have test ETH available. All of them have 100 ETH by default. Upon start, the `ocean_ganache_1` container will print out the private keys of multiple accounts in its logs. Pick one of them and import into MetaMask. To use the app together with MetaMask, importing one of the accounts auto-generated by the Ganache container is the easiest way to have test ETH available. All of them have 100 ETH by default. Upon start, the `ocean_ganache_1` container will print out the private keys of multiple accounts in its logs. Pick one of them and import into MetaMask. Barge private key example : `0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58`
To fully test all [The Graph](https://thegraph.com) integrations, you have to run your own local Graph node with our [`ocean-subgraph`](https://github.com/oceanprotocol/ocean-subgraph) deployed to it. Barge does not include a local subgraph so by default, the `subgraphUri` is hardcoded to the Goerli subgraph in our [`getDevelopmentConfig` function](https://github.com/oceanprotocol/market/blob/d0b1534d105e5dcb3790c65d4bb04ff1d2dbc575/src/utils/ocean.ts#L31).
> Cleaning all Docker images so they are fetched freshly is often a good idea to make sure no issues are caused by old or stale images: `docker system prune --all --volumes` > Cleaning all Docker images so they are fetched freshly is often a good idea to make sure no issues are caused by old or stale images: `docker system prune --all --volumes`

View File

@ -16,6 +16,10 @@ module.exports = {
// List of all supported chainIds. Used to populate the Chains user preferences list. // List of all supported chainIds. Used to populate the Chains user preferences list.
chainIdsSupported: [1, 137, 5, 80001], chainIdsSupported: [1, 137, 5, 80001],
customProviderUrl: process.env.NEXT_PUBLIC_PROVIDER_URL,
infuraProjectId: process.env.NEXT_PUBLIC_INFURA_PROJECT_ID || 'xxx',
defaultDatatokenTemplateIndex: 2, defaultDatatokenTemplateIndex: 2,
// The ETH address the marketplace fee will be sent to. // The ETH address the marketplace fee will be sent to.
marketFeeAddress: marketFeeAddress:

View File

@ -10,6 +10,7 @@
"build:static": "npm run build && next export", "build:static": "npm run build && next export",
"serve": "serve -s public/", "serve": "serve -s public/",
"pregenerate": "bash scripts/pregenerate.sh", "pregenerate": "bash scripts/pregenerate.sh",
"set-barge-env": "bash scripts/barge-env.sh",
"test": "npm run pregenerate && npm run lint && npm run type-check && npm run jest", "test": "npm run pregenerate && npm run lint && npm run type-check && npm run jest",
"jest": "jest -c .jest/jest.config.js", "jest": "jest -c .jest/jest.config.js",
"jest:watch": "jest -c .jest/jest.config.js --watch", "jest:watch": "jest -c .jest/jest.config.js --watch",

2
scripts/barge-env.sh Normal file
View File

@ -0,0 +1,2 @@
# Set
node ./scripts/load-development-addresses.js

View File

@ -0,0 +1,76 @@
const fs = require('fs')
const os = require('os')
function getLocalAddresses() {
const data = JSON.parse(
// eslint-disable-next-line security/detect-non-literal-fs-filename
fs.readFileSync(
`${os.homedir}/.ocean/ocean-contracts/artifacts/address.json`,
'utf8'
)
)
return data.development
}
function updateEnvVariable(key, value) {
fs.readFile('.env', 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
const lines = data.split('\n')
let keyExists = false
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
if (line.startsWith(key + '=')) {
lines[i] = `${key}=${value}`
keyExists = true
break
}
}
if (!keyExists) {
lines.push(`${key}=${value}`)
}
const updatedContent = lines.join('\n')
fs.writeFile('.env', updatedContent, 'utf8', (err) => {
if (err) {
console.error(err)
return
}
console.log(
`Successfully ${
keyExists ? 'updated' : 'added'
} the ${key} environment variable.`
)
})
})
}
const addresses = getLocalAddresses()
updateEnvVariable('NEXT_PUBLIC_NFT_FACTORY_ADDRESS', addresses.ERC721Factory)
updateEnvVariable(
'NEXT_PUBLIC_OPF_COMMUNITY_FEE_COLECTOR',
addresses.OPFCommunityFeeCollector
)
updateEnvVariable(
'NEXT_PUBLIC_FIXED_RATE_EXCHANGE_ADDRESS',
addresses.FixedPrice
)
updateEnvVariable('NEXT_PUBLIC_DISPENSER_ADDRESS', addresses.Dispenser)
updateEnvVariable('NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS', addresses.Ocean)
updateEnvVariable('NEXT_PUBLIC_MARKET_DEVELOPMENT', true)
updateEnvVariable(
'#NEXT_PUBLIC_PROVIDER_URL',
'"http://127.0.0.1:8030" # only for mac'
)
updateEnvVariable(
`#NEXT_PUBLIC_SUBGRAPH_URI',"http://127.0.0.1:9000" # only for mac`
)
updateEnvVariable(
'#NEXT_PUBLIC_METADATACACHE_URI',
'"http://127.0.0.1:5000" # only for mac'
)

View File

@ -1,11 +1,33 @@
#!/usr/bin/env node #!/usr/bin/env node
'use strict' 'use strict'
const bargeNetwork = {
name: 'Ethereum Barge',
chain: 'ETH',
icon: 'ethereum',
rpc: ['http://127.0.0.1:8545'],
faucets: [],
nativeCurrency: {
name: 'Ether',
symbol: 'ETH',
decimals: 18
},
infoURL: 'https://ethereum.org',
shortName: 'eth',
chainId: 8996,
networkId: 8996,
slip44: 60,
ens: {},
explorers: []
}
const axios = require('axios') const axios = require('axios')
// https://github.com/ethereum-lists/chains // https://github.com/ethereum-lists/chains
const chainDataUrl = 'https://chainid.network/chains.json' const chainDataUrl = 'https://chainid.network/chains.json'
axios(chainDataUrl).then((response) => { axios(chainDataUrl).then((response) => {
response.data.push(bargeNetwork)
// const networks ={...response.data, ...bargeNetwork}
process.stdout.write(JSON.stringify(response.data, null, ' ')) process.stdout.write(JSON.stringify(response.data, null, ' '))
}) })

View File

@ -11,7 +11,7 @@ import { Config, LoggerInstance, Purgatory } from '@oceanprotocol/lib'
import { CancelToken } from 'axios' import { CancelToken } from 'axios'
import { getAsset } from '@utils/aquarius' import { getAsset } from '@utils/aquarius'
import { useCancelToken } from '@hooks/useCancelToken' import { useCancelToken } from '@hooks/useCancelToken'
import { getOceanConfig, getDevelopmentConfig } from '@utils/ocean' import { getOceanConfig, sanitizeDevelopmentConfig } from '@utils/ocean'
import { getAccessDetails } from '@utils/accessDetailsAndPricing' import { getAccessDetails } from '@utils/accessDetailsAndPricing'
import { useIsMounted } from '@hooks/useIsMounted' import { useIsMounted } from '@hooks/useIsMounted'
import { useMarketMetadata } from './MarketMetadata' import { useMarketMetadata } from './MarketMetadata'
@ -180,13 +180,13 @@ function AssetProvider({
// ----------------------------------- // -----------------------------------
useEffect(() => { useEffect(() => {
if (!asset?.chainId) return if (!asset?.chainId) return
const config = getOceanConfig(asset?.chainId)
const oceanConfig = { const oceanConfig = {
...getOceanConfig(asset?.chainId), ...config,
// add local dev values // add local dev values
...(asset?.chainId === 8996 && { ...(asset?.chainId === 8996 && {
...getDevelopmentConfig() ...sanitizeDevelopmentConfig(config)
}) })
} }
setOceanConfig(oceanConfig) setOceanConfig(oceanConfig)

View File

@ -43,14 +43,13 @@ function MarketMetadataProvider({
null, null,
getQueryContext(appConfig.chainIdsSupported[i]) getQueryContext(appConfig.chainIdsSupported[i])
) )
opcData.push({ opcData.push({
chainId: appConfig.chainIdsSupported[i], chainId: appConfig.chainIdsSupported[i],
approvedTokens: response.data?.opc.approvedTokens.map( approvedTokens: response.data?.opc?.approvedTokens?.map(
(token) => token.address (token) => token.address
), ),
swapApprovedFee: response.data?.opc.swapOceanFee, swapApprovedFee: response.data?.opc?.swapOceanFee,
swapNotApprovedFee: response.data?.opc.swapNonOceanFee swapNotApprovedFee: response.data?.opc?.swapNonOceanFee
} as OpcFee) } as OpcFee)
} }
LoggerInstance.log('[MarketMetadata] Got new data.', { LoggerInstance.log('[MarketMetadata] Got new data.', {

View File

@ -14,7 +14,8 @@ import { getFixedBuyPrice } from './ocean/fixedRateExchange'
import Decimal from 'decimal.js' import Decimal from 'decimal.js'
import { import {
consumeMarketOrderFee, consumeMarketOrderFee,
publisherMarketOrderFee publisherMarketOrderFee,
customProviderUrl
} from '../../app.config' } from '../../app.config'
import { Signer } from 'ethers' import { Signer } from 'ethers'
import { toast } from 'react-toastify' import { toast } from 'react-toastify'
@ -186,7 +187,7 @@ export async function getOrderPriceAndFees(
asset?.services[0].id, asset?.services[0].id,
0, 0,
accountId, accountId,
asset?.services[0].serviceEndpoint customProviderUrl || asset?.services[0].serviceEndpoint
)) ))
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))

View File

@ -10,6 +10,7 @@ import {
getErrorMessage getErrorMessage
} from '@oceanprotocol/lib' } from '@oceanprotocol/lib'
import { SvgWaves } from './SvgWaves' import { SvgWaves } from './SvgWaves'
import { customProviderUrl } from '../../app.config'
import { Signer, ethers } from 'ethers' import { Signer, ethers } from 'ethers'
import { toast } from 'react-toastify' import { toast } from 'react-toastify'
@ -108,7 +109,7 @@ export async function setNftMetadata(
encryptedDdo = await ProviderInstance.encrypt( encryptedDdo = await ProviderInstance.encrypt(
asset, asset,
asset.chainId, asset.chainId,
asset.services[0].serviceEndpoint, customProviderUrl || asset.services[0].serviceEndpoint,
signal signal
) )
} catch (err) { } catch (err) {
@ -150,7 +151,7 @@ export async function setNFTMetadataAndTokenURI(
encryptedDdo = await ProviderInstance.encrypt( encryptedDdo = await ProviderInstance.encrypt(
asset, asset,
asset.chainId, asset.chainId,
asset.services[0].serviceEndpoint, customProviderUrl || asset.services[0].serviceEndpoint,
signal signal
) )
} catch (err) { } catch (err) {

View File

@ -1,10 +1,31 @@
import { ConfigHelper, Config } from '@oceanprotocol/lib' import { ConfigHelper, Config } from '@oceanprotocol/lib'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import abiDatatoken from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20TemplateEnterprise.sol/ERC20TemplateEnterprise.json' import abiDatatoken from '@oceanprotocol/contracts/artifacts/contracts/templates/ERC20TemplateEnterprise.sol/ERC20TemplateEnterprise.json'
/**
This function takes a Config object as an input and returns a new sanitized Config object
The new Config object has the same properties as the input object, but with some values replaced by environment variables if they exist
Also adds missing contract addresses deployed when running barge locally
@param {Config} config - The input Config object
@returns {Config} A new Config object
*/
export function sanitizeDevelopmentConfig(config: Config): Config {
return {
subgraphUri: process.env.NEXT_PUBLIC_SUBGRAPH_URI || config.subgraphUri,
metadataCacheUri:
process.env.NEXT_PUBLIC_METADATACACHE_URI || config.metadataCacheUri,
providerUri: process.env.NEXT_PUBLIC_PROVIDER_URL || config.providerUri,
nodeUri: process.env.NEXT_PUBLIC_RPC_URL || config.nodeUri,
fixedRateExchangeAddress:
process.env.NEXT_PUBLIC_FIXED_RATE_EXCHANGE_ADDRESS,
dispenserAddress: process.env.NEXT_PUBLIC_DISPENSER_ADDRESS,
oceanTokenAddress: process.env.NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS,
nftFactoryAddress: process.env.NEXT_PUBLIC_NFT_FACTORY_ADDRESS
} as Config
}
export function getOceanConfig(network: string | number): Config { export function getOceanConfig(network: string | number): Config {
const config = new ConfigHelper().getConfig( let config = new ConfigHelper().getConfig(
network, network,
network === 'polygon' || network === 'polygon' ||
network === 'moonbeamalpha' || network === 'moonbeamalpha' ||
@ -12,10 +33,14 @@ export function getOceanConfig(network: string | number): Config {
network === 'bsc' || network === 'bsc' ||
network === 56 || network === 56 ||
network === 'gaiaxtestnet' || network === 'gaiaxtestnet' ||
network === 2021000 network === 2021000 ||
network === 8996
? undefined ? undefined
: process.env.NEXT_PUBLIC_INFURA_PROJECT_ID : process.env.NEXT_PUBLIC_INFURA_PROJECT_ID
) as Config ) as Config
if (network === 8996) {
config = { ...config, ...sanitizeDevelopmentConfig(config) }
}
return config as Config return config as Config
} }

View File

@ -19,7 +19,8 @@ import { getOceanConfig } from './ocean'
import { import {
marketFeeAddress, marketFeeAddress,
consumeMarketOrderFee, consumeMarketOrderFee,
consumeMarketFixedSwapFee consumeMarketFixedSwapFee,
customProviderUrl
} from '../../app.config' } from '../../app.config'
import { toast } from 'react-toastify' import { toast } from 'react-toastify'
@ -35,7 +36,7 @@ async function initializeProvider(
asset.services[0].id, asset.services[0].id,
0, 0,
accountId, accountId,
asset.services[0].serviceEndpoint customProviderUrl || asset.services[0].serviceEndpoint
) )
return provider return provider
} catch (error) { } catch (error) {
@ -136,7 +137,7 @@ export async function order(
orderParams._consumeMarketFee orderParams._consumeMarketFee
) )
} }
if (asset.accessDetails.templateId === 2) { if (asset.accessDetails?.templateId === 2) {
const tx: any = await approve( const tx: any = await approve(
signer, signer,
config, config,
@ -175,7 +176,7 @@ export async function order(
orderParams._consumeMarketFee orderParams._consumeMarketFee
) )
} }
if (asset.accessDetails.templateId === 2) { if (asset.accessDetails?.templateId === 2) {
return await datatoken.buyFromDispenserAndOrder( return await datatoken.buyFromDispenserAndOrder(
asset.services[0].datatokenAddress, asset.services[0].datatokenAddress,
orderParams, orderParams,

View File

@ -15,6 +15,8 @@ import {
AbiItem, AbiItem,
getErrorMessage getErrorMessage
} from '@oceanprotocol/lib' } from '@oceanprotocol/lib'
// if customProviderUrl is set, we need to call provider using this custom endpoint
import { customProviderUrl } from '../../app.config'
import { QueryHeader } from '@shared/FormInput/InputElement/Headers' import { QueryHeader } from '@shared/FormInput/InputElement/Headers'
import { Signer } from 'ethers' import { Signer } from 'ethers'
import { getValidUntilTime } from './compute' import { getValidUntilTime } from './compute'
@ -50,7 +52,7 @@ export async function initializeProviderForCompute(
computeAlgo, computeAlgo,
computeEnv?.id, computeEnv?.id,
validUntil, validUntil,
dataset.services[0].serviceEndpoint, customProviderUrl || dataset.services[0].serviceEndpoint,
accountId accountId
) )
} catch (error) { } catch (error) {
@ -69,7 +71,11 @@ export async function getEncryptedFiles(
): Promise<string> { ): Promise<string> {
try { try {
// https://github.com/oceanprotocol/provider/blob/v4main/API.md#encrypt-endpoint // https://github.com/oceanprotocol/provider/blob/v4main/API.md#encrypt-endpoint
const response = await ProviderInstance.encrypt(files, chainId, providerUrl) const response = await ProviderInstance.encrypt(
files,
chainId,
customProviderUrl || providerUrl
)
return response return response
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
@ -88,7 +94,7 @@ export async function getFileDidInfo(
const response = await ProviderInstance.checkDidFiles( const response = await ProviderInstance.checkDidFiles(
did, did,
serviceId, serviceId,
providerUrl, customProviderUrl || providerUrl,
withChecksum withChecksum
) )
return response return response
@ -125,7 +131,10 @@ export async function getFileInfo(
hash: file hash: file
} }
try { try {
response = await ProviderInstance.getFileInfo(fileIPFS, providerUrl) response = await ProviderInstance.getFileInfo(
fileIPFS,
customProviderUrl || providerUrl
)
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
LoggerInstance.error('[Provider Get File info] Error:', message) LoggerInstance.error('[Provider Get File info] Error:', message)
@ -139,7 +148,10 @@ export async function getFileInfo(
transactionId: file transactionId: file
} }
try { try {
response = await ProviderInstance.getFileInfo(fileArweave, providerUrl) response = await ProviderInstance.getFileInfo(
fileArweave,
customProviderUrl || providerUrl
)
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
LoggerInstance.error('[Provider Get File info] Error:', message) LoggerInstance.error('[Provider Get File info] Error:', message)
@ -155,7 +167,10 @@ export async function getFileInfo(
query query
} }
try { try {
response = await ProviderInstance.getFileInfo(fileGraphql, providerUrl) response = await ProviderInstance.getFileInfo(
fileGraphql,
customProviderUrl || providerUrl
)
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
LoggerInstance.error('[Provider Get File info] Error:', message) LoggerInstance.error('[Provider Get File info] Error:', message)
@ -174,7 +189,7 @@ export async function getFileInfo(
try { try {
response = await ProviderInstance.getFileInfo( response = await ProviderInstance.getFileInfo(
fileSmartContract, fileSmartContract,
providerUrl customProviderUrl || providerUrl
) )
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
@ -192,7 +207,10 @@ export async function getFileInfo(
method method
} }
try { try {
response = await ProviderInstance.getFileInfo(fileUrl, providerUrl) response = await ProviderInstance.getFileInfo(
fileUrl,
customProviderUrl || providerUrl
)
} catch (error) { } catch (error) {
const message = getErrorMessage(JSON.parse(error.message)) const message = getErrorMessage(JSON.parse(error.message))
LoggerInstance.error('[Provider Get File info] Error:', message) LoggerInstance.error('[Provider Get File info] Error:', message)
@ -217,7 +235,7 @@ export async function downloadFile(
asset.services[0].id, asset.services[0].id,
0, 0,
validOrderTx || asset.accessDetails.validOrderTx, validOrderTx || asset.accessDetails.validOrderTx,
asset.services[0].serviceEndpoint, customProviderUrl || asset.services[0].serviceEndpoint,
signer signer
) )
} catch (error) { } catch (error) {

View File

@ -38,7 +38,7 @@ export default function Tags({
max = max || items.length max = max || items.length
const remainder = items.length - max const remainder = items.length - max
// filter out empty array items, and restrict to `max` // filter out empty array items, and restrict to `max`
const tags = items.filter((tag) => tag !== '').slice(0, max) const tags = items?.filter((tag) => tag !== '').slice(0, max)
const shouldShowMore = showMore && remainder > 0 const shouldShowMore = showMore && remainder > 0
const classes = className ? `${styles.tags} ${className}` : styles.tags const classes = className ? `${styles.tags} ${className}` : styles.tags

View File

@ -24,7 +24,8 @@ import {
marketFeeAddress, marketFeeAddress,
publisherMarketOrderFee, publisherMarketOrderFee,
publisherMarketFixedSwapFee, publisherMarketFixedSwapFee,
defaultDatatokenTemplateIndex defaultDatatokenTemplateIndex,
customProviderUrl
} from '../../../app.config' } from '../../../app.config'
import { sanitizeUrl } from '@utils/url' import { sanitizeUrl } from '@utils/url'
import { getContainerChecksum } from '@utils/docker' import { getContainerChecksum } from '@utils/docker'
@ -202,14 +203,15 @@ export async function createTokensAndPricing(
values.metadata.transferable values.metadata.transferable
) )
LoggerInstance.log('[publish] Creating NFT with metadata', nftCreateData) LoggerInstance.log('[publish] Creating NFT with metadata', nftCreateData)
// TODO: cap is hardcoded for now to 1000, this needs to be discussed at some point // TODO: cap is hardcoded for now to 1000, this needs to be discussed at some point
const ercParams: DatatokenCreateParams = { const ercParams: DatatokenCreateParams = {
templateIndex: defaultDatatokenTemplateIndex, templateIndex: defaultDatatokenTemplateIndex,
minter: accountId, minter: accountId,
paymentCollector: accountId, paymentCollector: accountId,
mpFeeAddress: marketFeeAddress, mpFeeAddress: marketFeeAddress,
feeToken: values.pricing.baseToken.address, feeToken:
process.env.NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS ||
values.pricing.baseToken.address,
feeAmount: publisherMarketOrderFee, feeAmount: publisherMarketOrderFee,
// max number // max number
cap: '115792089237316195423570985008687907853269984665640564039457', cap: '115792089237316195423570985008687907853269984665640564039457',
@ -225,10 +227,14 @@ export async function createTokensAndPricing(
case 'fixed': { case 'fixed': {
const freParams: FreCreationParams = { const freParams: FreCreationParams = {
fixedRateAddress: config.fixedRateExchangeAddress, fixedRateAddress: config.fixedRateExchangeAddress,
baseTokenAddress: values.pricing.baseToken.address, baseTokenAddress: process.env.NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS
? process.env.NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS
: values.pricing.baseToken.address,
owner: accountId, owner: accountId,
marketFeeCollector: marketFeeAddress, marketFeeCollector: marketFeeAddress,
baseTokenDecimals: values.pricing.baseToken.decimals, baseTokenDecimals: process.env.NEXT_PUBLIC_OCEAN_TOKEN_ADDRESS
? 18
: values.pricing.baseToken.decimals,
datatokenDecimals: 18, datatokenDecimals: 18,
fixedRate: values.pricing.price.toString(), fixedRate: values.pricing.price.toString(),
marketFee: publisherMarketFixedSwapFee, marketFee: publisherMarketFixedSwapFee,

View File

@ -23,6 +23,7 @@ import { getOceanConfig } from '@utils/ocean'
import { validationSchema } from './_validation' import { validationSchema } from './_validation'
import { useAbortController } from '@hooks/useAbortController' import { useAbortController } from '@hooks/useAbortController'
import { setNFTMetadataAndTokenURI } from '@utils/nft' import { setNFTMetadataAndTokenURI } from '@utils/nft'
import { customProviderUrl } from '../../../app.config'
import { useAccount, useNetwork, useSigner } from 'wagmi' import { useAccount, useNetwork, useSigner } from 'wagmi'
export default function PublishPage({ export default function PublishPage({
@ -144,7 +145,7 @@ export default function PublishPage({
ddoEncrypted = await ProviderInstance.encrypt( ddoEncrypted = await ProviderInstance.encrypt(
ddo, ddo,
ddo.chainId, ddo.chainId,
values.services[0].providerUrl.url, customProviderUrl || values.services[0].providerUrl.url,
newAbortController() newAbortController()
) )
} catch (error) { } catch (error) {
@ -165,7 +166,6 @@ export default function PublishPage({
status: 'success' status: 'success'
} }
})) }))
return { ddo, ddoEncrypted } return { ddo, ddoEncrypted }
} catch (error) { } catch (error) {
LoggerInstance.error('[publish] error', error.message) LoggerInstance.error('[publish] error', error.message)