[Ethereum] How to access Swarm content from ethereum smart contract using oraclize

dapp-developmentoraclesswarm

I am trying to access the content in a file that I uploaded to Swarm using oraclize. I found a smart contract in https://github.com/oraclize/ethereum-examples/blob/master/solidity/Swarm.sol

I was just trying to print the 'hello world' (the content of the swarm hash the author specified in the contract). So, I simply mined the contract. Then used the details in the JavaScript as follows (using meteor.js):

//variables

contractAddressSwarm = "0x87b620f0731e15cb1655e216a87c986154ba4248"

ABIArraySwarm = [{"constant":false,"inputs":[{"name":"myid","type":"bytes32"},{"name":"result","type":"string"}],"name":"__callback","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"myid","type":"bytes32"},{"name":"result","type":"string"},{"name":"proof","type":"bytes"}],"name":"__callback","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"swarmContent","outputs":[{"name":"","type":"string"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"update","outputs":[],"payable":true,"type":"function"},{"inputs":[],"payable":false,"type":"constructor"},{"anonymous":false,"inputs":[{"indexed":false,"name":"description","type":"string"}],"name":"newOraclizeQuery","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"swarmContent","type":"string"}],"name":"newSwarmContent","type":"event"}]

//Print swarm content in the console when a button is clicked

'click #buttonSwarm'(event, instance) {
var template= Template.instance();

myContract = web3.eth.contract(ABIArraySwarm).at(contractAddressSwarm);

console.log("Swarm Contract execution");

myContract.swarmContent(function(err, res){
TemplateVar.set(template, "counter", res);
console.log(TemplateVar.get(template,"counter"));
});
},

….

But every time it is printing 'null'. What am I doing wrong in the callback?

Best Answer

Marco from Oraclize here. The support for Swarm is experimental, so there may be some disruptions of the datasource service, as you have experienced. It is now up again and working correctly. You can test it here.

Related Topic