Please help to convert json output from rest query from Influxdb to table in power bi
In order to help you I need at least the Json structure…
Also, which is the exact problem on the PowerBI/InfluxDB side?
My json structure is as follows
{
“results”: [
{
“statement_id”: 0,
“series”: [
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “MainActivePowerTotal”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
587.67744140625
],
[
“2020-02-17T06:30:00Z”,
596.9282315882241
],
[
“2020-02-17T06:40:00Z”,
549.469442267167
],
[
“2020-02-17T06:50:00Z”,
541.917407989502
],
[
“2020-02-17T07:00:00Z”,
561.8912205051731
],
[
“2020-02-17T07:10:00Z”,
545.8261212604801
],
[
“2020-02-17T07:20:00Z”,
568.2466936809261
],
[
“2020-02-17T07:30:00Z”,
569.2568552901105
],
[
“2020-02-17T07:40:00Z”,
575.8338697479992
],
[
“2020-02-17T07:50:00Z”,
576.8921612995426
],
[
“2020-02-17T08:00:00Z”,
573.4999496459961
],
[
“2020-02-17T08:10:00Z”,
560.0957954220655
],
[
“2020-02-17T08:20:00Z”,
562.0249328613281
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “MainCurrentAvg”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
907.6358032226562
],
[
“2020-02-17T06:30:00Z”,
915.0737813313802
],
[
“2020-02-17T06:40:00Z”,
850.6507209329044
],
[
“2020-02-17T06:50:00Z”,
815.8712986537388
],
[
“2020-02-17T07:00:00Z”,
865.1920640733507
],
[
“2020-02-17T07:10:00Z”,
842.1758460998535
],
[
“2020-02-17T07:20:00Z”,
873.9825903320312
],
[
“2020-02-17T07:30:00Z”,
872.6841081891741
],
[
“2020-02-17T07:40:00Z”,
884.9004669189453
],
[
“2020-02-17T07:50:00Z”,
894.0495697021485
],
[
“2020-02-17T08:00:00Z”,
877.8777683803013
],
[
“2020-02-17T08:10:00Z”,
859.5932713558799
],
[
“2020-02-17T08:20:00Z”,
875.2303161621094
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “MainEnergy”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
5102723.4
],
[
“2020-02-17T06:30:00Z”,
5102773.659090909
],
[
“2020-02-17T06:40:00Z”,
5102874.0625
],
[
“2020-02-17T06:50:00Z”,
5102967.76
],
[
“2020-02-17T07:00:00Z”,
5103057.42
],
[
“2020-02-17T07:10:00Z”,
5103156.75
],
[
“2020-02-17T07:20:00Z”,
5103249.166666667
],
[
“2020-02-17T07:30:00Z”,
5103336.5
],
[
“2020-02-17T07:40:00Z”,
5103431.604166667
],
[
“2020-02-17T07:50:00Z”,
5103533.652173913
],
[
“2020-02-17T08:00:00Z”,
5103625.95
],
[
“2020-02-17T08:10:00Z”,
5103731.1
],
[
“2020-02-17T08:20:00Z”,
5103774.625
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “MainPowerFactor”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
0.9097714920838674
],
[
“2020-02-17T06:30:00Z”,
0.9139599757535117
],
[
“2020-02-17T06:40:00Z”,
0.9151813518710252
],
[
“2020-02-17T06:50:00Z”,
0.9137259721755981
],
[
“2020-02-17T07:00:00Z”,
0.9180003716832116
],
[
“2020-02-17T07:10:00Z”,
0.9136283340908232
],
[
“2020-02-17T07:20:00Z”,
0.9169310607561251
],
[
“2020-02-17T07:30:00Z”,
0.9202882022392459
],
[
“2020-02-17T07:40:00Z”,
0.9164717424483526
],
[
“2020-02-17T07:50:00Z”,
0.9177247940040216
],
[
“2020-02-17T08:00:00Z”,
0.924390922232372
],
[
“2020-02-17T08:10:00Z”,
0.9215794191474006
],
[
“2020-02-17T08:20:00Z”,
0.9185435771942139
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “VoltageA”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
237.6288833618164
],
[
“2020-02-17T06:30:00Z”,
237.8891835530599
],
[
“2020-02-17T06:40:00Z”,
238.23394147087546
],
[
“2020-02-17T06:50:00Z”,
238.68151047650505
],
[
“2020-02-17T07:00:00Z”,
236.02180252075195
],
[
“2020-02-17T07:10:00Z”,
235.0208740234375
],
[
“2020-02-17T07:20:00Z”,
235.1172063918341
],
[
“2020-02-17T07:30:00Z”,
235.41241794162326
],
[
“2020-02-17T07:40:00Z”,
235.31271240234375
],
[
“2020-02-17T07:50:00Z”,
235.31833618164063
],
[
“2020-02-17T08:00:00Z”,
235.36876317074424
],
[
“2020-02-17T08:10:00Z”,
235.18864922774466
],
[
“2020-02-17T08:20:00Z”,
234.9145263671875
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “VoltageB”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
237.04432678222656
],
[
“2020-02-17T06:30:00Z”,
237.54657406277127
],
[
“2020-02-17T06:40:00Z”,
238.14813714278372
],
[
“2020-02-17T06:50:00Z”,
238.4341817220052
],
[
“2020-02-17T07:00:00Z”,
236.1190538406372
],
[
“2020-02-17T07:10:00Z”,
234.7789579119001
],
[
“2020-02-17T07:20:00Z”,
234.93265702989368
],
[
“2020-02-17T07:30:00Z”,
235.25931895862925
],
[
“2020-02-17T07:40:00Z”,
235.1957550048828
],
[
“2020-02-17T07:50:00Z”,
235.32901000976562
],
[
“2020-02-17T08:00:00Z”,
235.26353615208677
],
[
“2020-02-17T08:10:00Z”,
235.0978963751542
],
[
“2020-02-17T08:20:00Z”,
null
]
]
},
{
“name”: “Electrical”,
“tags”: {
“resource_name”: “VoltageC”
},
“columns”: [
“time”,
“mean”
],
“values”: [
[
“2020-02-17T06:10:00Z”,
null
],
[
“2020-02-17T06:20:00Z”,
238.33627700805664
],
[
“2020-02-17T06:30:00Z”,
238.7968485514323
],
[
“2020-02-17T06:40:00Z”,
239.19335005018445
],
[
“2020-02-17T06:50:00Z”,
239.64923213078424
],
[
“2020-02-17T07:00:00Z”,
237.22103881835938
],
[
“2020-02-17T07:10:00Z”,
235.9083513532366
],
[
“2020-02-17T07:20:00Z”,
235.97623879568917
],
[
“2020-02-17T07:30:00Z”,
236.32627563476564
],
[
“2020-02-17T07:40:00Z”,
236.2939652663011
],
[
“2020-02-17T07:50:00Z”,
236.29410298665366
],
[
“2020-02-17T08:00:00Z”,
236.39215850830078
],
[
“2020-02-17T08:10:00Z”,
236.1125602722168
],
[
“2020-02-17T08:20:00Z”,
235.96351623535156
]
]
}
]
}
]
}
I require the resource names as collumns and its corresponding mean values as rows. The time is common to all resource names.
Further is there a function to dynamically convert any multi arrayed json to a table?
Hello @LochanaMendis,
Welcome perhaps this thread or community user can provide you with additional help:
I’ve found a way to read those data in a decent way, without using custom M code
Please note that the json you pasted is not a valid json (at least to me) since it does not use the standard double quotes ("). Anyway I saved it to a flie and loaded it, below the code
let
Source = Json.Document(File.Contents("C:\Users\gluisotto\Desktop\TestDataInflux.json")),
results = Source[results],
results1 = results{0},
series = results1[series],
#"Converted to Table" = Table.FromList(series, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
#"Expanded Column1" = Table.ExpandRecordColumn(#"Converted to Table", "Column1", {"name", "tags", "columns", "values"}, {"Column1.name", "Column1.tags", "Column1.columns", "Column1.values"}),
#"Expanded Column1.tags" = Table.ExpandRecordColumn(#"Expanded Column1", "Column1.tags", {"resource_name"}, {"Column1.tags.resource_name"}),
#"Expanded Column1.values" = Table.ExpandListColumn(#"Expanded Column1.tags", "Column1.values"),
#"Extracted Values" = Table.TransformColumns(#"Expanded Column1.values", {"Column1.values", each Text.Combine(List.Transform(_, Text.From), ","), type text}),
#"Split Column by Delimiter" = Table.SplitColumn(#"Extracted Values", "Column1.values", Splitter.SplitTextByDelimiter(",", QuoteStyle.Csv), {"Column1.values.1", "Column1.values.2"}),
#"Changed Type" = Table.TransformColumnTypes(#"Split Column by Delimiter",{{"Column1.values.1", type datetime}, {"Column1.values.2", type number}})
in
#"Changed Type"
As you will see, the result is not bad, but is still a bit raw:
- the columns are not renamed dynamically, I’ve looked a bit on the web but did not find a quick way to so. (but since the query itself is static you can also set the names manually, if you change the query, you will also change the field names)
- There is the column “columns”, of type list which contains the names of the value columns, you may want to remove that form the final result
A nicer result might be obtained with this code, which also pivotes the table, to turns your table structure from a list of rows with “attribute | value” to column for each “attribute” (the tag “resource_name” in this case)
let
Source = Json.Document(File.Contents("C:\Users\gluisotto\Desktop\TestDataInflux.json")),
results = Source[results],
results1 = results{0},
series = results1[series],
#"Converted to Table" = Table.FromList(series, Splitter.SplitByNothing(), null, null, ExtraValues.Error),
#"Expanded Column1" = Table.ExpandRecordColumn(#"Converted to Table", "Column1", {"name", "tags", "columns", "values"}, {"Column1.name", "Column1.tags", "Column1.columns", "Column1.values"}),
#"Expanded Column1.tags" = Table.ExpandRecordColumn(#"Expanded Column1", "Column1.tags", {"resource_name"}, {"Column1.tags.resource_name"}),
#"Expanded Column1.values" = Table.ExpandListColumn(#"Expanded Column1.tags", "Column1.values"),
#"Extracted Values" = Table.TransformColumns(#"Expanded Column1.values", {"Column1.values", each Text.Combine(List.Transform(_, Text.From), ","), type text}),
#"Split Column by Delimiter" = Table.SplitColumn(#"Extracted Values", "Column1.values", Splitter.SplitTextByDelimiter(",", QuoteStyle.Csv), {"Column1.values.1", "Column1.values.2"}),
#"Changed Type" = Table.TransformColumnTypes(#"Split Column by Delimiter",{{"Column1.values.1", type datetime}, {"Column1.values.2", type number}}),
#"Removed Columns" = Table.RemoveColumns(#"Changed Type",{"Column1.columns"}),
#"Renamed Columns" = Table.RenameColumns(#"Removed Columns",{{"Column1.values.1", "Time"}}),
#"Reordered Columns" = Table.ReorderColumns(#"Renamed Columns",{"Column1.name", "Time", "Column1.tags.resource_name", "Column1.values.2"}),
#"Pivoted Column" = Table.Pivot(#"Reordered Columns", List.Distinct(#"Reordered Columns"[Column1.tags.resource_name]), "Column1.tags.resource_name", "Column1.values.2", List.Sum)
in
#"Pivoted Column"