In my previous post Old meets new, the 1-Wire Weather Station on the SPARK Core. (part 3) the weather station transformed more and more into an device. But we are still not satisfied with this local webserver approach were currently all kinds of fancy dashboards and data visualization systems are available. Spark.io promised us an data visualization tool for Q4 2014, but unfortunately this is delayed to Q2 2015 (https://www.spark.io/features#dataviz). Therefore I searched for other possibilities and the first one is the very popular https://plot.ly/ tool. There is even a plotly library available for the SPARK Core (https://github.com/bigmacd/spark-plotly).
// This #include statement was automatically added by the Spark IDE. #include "spark-plotly/spark-plotly.h" #include "OneWire/OneWire.h" #include "OWSensor.h" #define NUM_SENSORS 10 #define DATA_POINTS 2 char* data_point_tokens[DATA_POINTS] = {"token1", "token2"}; plotly graph = plotly("my_plotly_username", "my_plotly_api_key", data_point_tokens, "streamname", DATA_POINTS); OneWire one = OneWire(D3); uint8_t resp[9]; char myIpAddress[24]; char tempfStr[16]; double temperature = 0; int test = 0; //unsigned int lastTime = 0; OWSensor sensors[NUM_SENSORS]; int checkIndex = 0; void getTempBytes(uint8_t *rom) { // Get the temp one.reset(); one.write(0x55); one.write_bytes(rom,8); one.write(0x44); delay(10); //ask for the temperature from one.reset(); one.write(0x55); one.write_bytes(rom, 8); one.write(0xBE); one.read_bytes(resp, 9); } float getTemp(char unit) { byte MSB = resp[1]; byte LSB = resp[0]; float tempRead = ((MSB << 8) | LSB); //using two's compliment if (unit == 'F') { float TemperatureSum = tempRead / 16; //Multiply by 9, then divide by 5, then add 32 float fahrenheit = ((TemperatureSum * 9) / 5) + 32; if (fahrenheit > 7000) { fahrenheit = 7404 - fahrenheit; } return fahrenheit; } else { float celcius = tempRead * 0.0625; return celcius; } } void findDevices() { uint8_t addr[12]; int found = 0; while(one.search(addr)) { Serial.print("Found device: "); char *owID = new char[24]; sprintf(owID, "%02x%02x%02x%02x%02x%02x%02x%02x%02x", addr[0], addr[1], addr[2] , addr[3] , addr[4] , addr[5], addr[6], addr[7] , addr[8] ); sensors[found].id = owID; for(int i=0;i<9;i++) { sensors[found].rom[i] = addr[i]; } sensors[found].updated = 0; Serial.print(owID); if (addr[0] == 0x22) { //ds1822 temp sensor getTempBytes(sensors[found].rom); temperature = getTemp('C'); sensors[found].value = temperature; Serial.print(" Temperature: " + String(temperature)); sensors[found].updated = millis(); } Serial.println(""); found++; } } void setup() { Serial.begin(9600); findDevices(); Spark.variable("temperature", &temperature, DOUBLE); Spark.variable("ipAddress", myIpAddress, STRING); IPAddress myIp = WiFi.localIP(); sprintf(myIpAddress, "%d.%d.%d.%d", myIp[0], myIp[1], myIp[2], myIp[3]); graph.init(); graph.fileopt = "extend"; graph.openStream(); } void loop() { Serial.println("waiting 3 seconds..."); delay(3000); findDevices(); unsigned int now = millis(); // just for test graph.plot(now, 1, data_point_tokens[0]); graph.plot(now, 2, data_point_tokens[1]); }
Unfortunately I did not succeed in getting this to work. I followed the arduino instructions at https://github.com/plotly/arduino-api, but got stuck at point 7 of the quick start .
Furthermore, the Spark core won’t allow a reflash after I load this code and I had to with to the DFU loader to get my Core up and running again. Other users on the Spark.io forum encounter similar problems (Spark core won't allow a reflash after I load this code - Spark). Maybe the total size of the program is an issue.
Many more commercial and free dashboards are available. A very special one is Atomiot - Atomiot, which is special dedicated to the Spark Core platform. I decided use Atomiot for now and wait for Spark.io's own visualization tool in Q2 2015. Big advantage of Atomiot is that there is no extra library needed. By just putting
Spark.variable("name", &variable, TYPE);
in your code you make a link between the Core and the outside world.
So by just adding a few lines to the original program makes it a real IoT device:
// This #include statement was automatically added by the Spark IDE. #include "OneWire/OneWire.h" #include "OWSensor.h" #define NUM_SENSORS 10 OneWire one = OneWire(D3); uint8_t resp[9]; char myIpAddress[24]; char tempfStr[16]; double temperature1 = 0; double temperature2 = 0; int test = 0; OWSensor sensors[NUM_SENSORS]; int checkIndex = 0; void getTempBytes(uint8_t *rom) { // Get the temp one.reset(); one.write(0x55); one.write_bytes(rom,8); one.write(0x44); delay(10); //ask for the temperature from one.reset(); one.write(0x55); one.write_bytes(rom, 8); one.write(0xBE); one.read_bytes(resp, 9); } float getTemp(char unit) { byte MSB = resp[1]; byte LSB = resp[0]; float tempRead = ((MSB << 8) | LSB); //using two's compliment if (unit == 'F') { float TemperatureSum = tempRead / 16; //Multiply by 9, then divide by 5, then add 32 float fahrenheit = ((TemperatureSum * 9) / 5) + 32; if (fahrenheit > 7000) { fahrenheit = 7404 - fahrenheit; } return fahrenheit; } else { float celcius = tempRead * 0.0625; return celcius; } } void findDevices() { uint8_t addr[12]; int found = 0; while(one.search(addr)) { char *owID = new char[24]; sprintf(owID, "%02x%02x%02x%02x%02x%02x%02x%02x", addr[0], addr[1], addr[2] , addr[3] , addr[4] , addr[5], addr[6], addr[7] ); sensors[found].id = owID; for(int i=0;i<8;i++) { sensors[found].rom[i] = addr[i]; } sensors[found].updated = 0; if (addr[0] == 0x22) { //ds1822 temp sensor getTempBytes(sensors[found].rom); sensors[found].value = getTemp('C'); sensors[found].updated = millis(); } found++; } } void getValues() { for(int i=0;i<NUM_SENSORS;i++) { if (sensors[i].id != NULL) { String strid(sensors[i].id); if (strid == "22e8300300000011") { temperature1 = sensors[i].value; } if (strid == "224a3f030000004c") { temperature2 = sensors[i].value; } } } } void setup() { findDevices(); Spark.variable("temperature1", &temperature1, DOUBLE); Spark.variable("temperature2", &temperature2, DOUBLE); Spark.variable("ipAddress", myIpAddress, STRING); IPAddress myIp = WiFi.localIP(); sprintf(myIpAddress, "%d.%d.%d.%d", myIp[0], myIp[1], myIp[2], myIp[3]); } void loop() { delay(100000); findDevices(); getValues(); }
After creating an account on Atomiot - Atomiot, and filling in your Spark.io access token and device id you are able to read the Spark.variables defined on the Core. In this case temperature1, temperature2 and ipAddress. The latter doesn't make much sense anymore now we don't use a local web browser.
From here you can create an schedule and from the schedule a nice graph.
That's it for now, I'm open for other ideas for dashboards and visualization tools.
Next steps will be the implementation of all weather sensors and powering the device using a solar pannel and battery.
stay tuned!