I am trying to add the support of 2.0 version of influxdb using influxdb python client library

I am replicating the already existing support of influxdb 1.8 in my django website, I am keeping the 1.8 version and adding the 2.0 version too.

chart_query = {
    'uptime': {
        'influxdb': (
            "SELECT MEAN({field_name})*100 AS uptime FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'packet_loss': {
        'influxdb': (
            "SELECT MEAN(loss) AS packet_loss FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'rtt': {
        'influxdb': (
            "SELECT MEAN(rtt_avg) AS RTT_average, MEAN(rtt_max) AS "
            "RTT_max, MEAN(rtt_min) AS RTT_min FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'wifi_clients': {
        'influxdb': (
            "SELECT COUNT(DISTINCT({field_name})) AS wifi_clients FROM {key} "
            "WHERE time >= '{time}' {end_date} AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' AND ifname = '{ifname}' "
            "GROUP BY time(1d)"
        )
    },
    'general_wifi_clients': {
        'influxdb': (
            "SELECT COUNT(DISTINCT({field_name})) AS wifi_clients FROM {key} "
            "WHERE time >= '{time}' {end_date} {organization_id} {location_id} {floorplan_id} "
            "GROUP BY time(1d)"
        )
    },
    'traffic': {
        'influxdb': (
            "SELECT SUM(tx_bytes) / 1000000000 AS upload, "
            "SUM(rx_bytes) / 1000000000 AS download FROM {key} "
            "WHERE time >= '{time}' {end_date} AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' AND ifname = '{ifname}' "
            "GROUP BY time(1d)"
        )
    },
    'general_traffic': {
        'influxdb': (
            "SELECT SUM(tx_bytes) / 1000000000 AS upload, "
            "SUM(rx_bytes) / 1000000000 AS download FROM {key} "
            "WHERE time >= '{time}' {end_date} {organization_id} {location_id} "
            "{floorplan_id} {ifname} "
            "GROUP BY time(1d)"
        )
    },
    'memory': {
        'influxdb': (
            "SELECT MEAN(percent_used) AS memory_usage "
            "FROM {key} WHERE time >= '{time}' {end_date} AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'cpu': {
        'influxdb': (
            "SELECT MEAN(cpu_usage) AS CPU_load FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'disk': {
        'influxdb': (
            "SELECT MEAN(used_disk) AS disk_usage FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'signal_strength': {
        'influxdb': (
            "SELECT ROUND(MEAN(signal_strength)) AS signal_strength, "
            "ROUND(MEAN(signal_power)) AS signal_power FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'signal_quality': {
        'influxdb': (
            "SELECT ROUND(MEAN(signal_quality)) AS signal_quality, "
            "ROUND(MEAN(snr)) AS signal_to_noise_ratio FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'access_tech': {
        'influxdb': (
            "SELECT MODE(access_tech) AS access_tech FROM {key} WHERE "
            "time >= '{time}' {end_date} AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'bandwidth': {
        'influxdb': (
            "SELECT MEAN(sent_bps_tcp) / 1000000000 AS TCP, "
            "MEAN(sent_bps_udp) / 1000000000 AS UDP FROM {key} WHERE "
            "time >= '{time}' AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'transfer': {
        'influxdb': (
            "SELECT SUM(sent_bytes_tcp) / 1000000000 AS TCP,"
            "SUM(sent_bytes_udp) / 1000000000 AS UDP FROM {key} WHERE "
            "time >= '{time}' AND content_type = '{content_type}' AND "
            "object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'retransmits': {
        'influxdb': (
            "SELECT MEAN(retransmits) AS retransmits FROM {key} "
            "WHERE time >= '{time}' AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'jitter': {
        'influxdb': (
            "SELECT MEAN(jitter) AS jitter FROM {key} "
            "WHERE time >= '{time}' AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'datagram': {
        'influxdb': (
            "SELECT MEAN(lost_packets) AS lost_datagram,"
            "MEAN(total_packets) AS total_datagram FROM {key} WHERE "
            "time >= '{time}' AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
    'datagram_loss': {
        'influxdb': (
            "SELECT MEAN(lost_percent) AS datagram_loss FROM {key} "
            "WHERE time >= '{time}' AND content_type = '{content_type}' "
            "AND object_id = '{object_id}' GROUP BY time(1d)"
        )
    },
}

default_chart_query = [
    "SELECT {field_name} FROM {key} WHERE time >= '{time}' {end_date}",
    " AND content_type = '{content_type}' AND object_id = '{object_id}'",
]

device_data_query = (
    "SELECT data FROM {0}.{1} WHERE pk = '{2}' " "ORDER BY time DESC LIMIT 1"
)

This is the already existing query file.

I have been successful in writing an example data, but I am unable to understand that how do I write so many queries and fit it in flux_query int the below file:
Can you share an example because I searched a lot for this but didn’t get any such example:

the query_data.py file is:

import influxdb_client

# Configuration for your InfluxDB client
bucket = "myname"
org = "myname"
token = "something=="
url = "http://localhost:9086"

client = influxdb_client.InfluxDBClient(
    url=url,
    token=token,
    org=org
)

query_api = client.query_api()

def execute_query(flux_query):
    print("Executing Query:", flux_query)  
    result = query_api.query(org=org, query=flux_query)
    results = []
    for table in result:
        for record in table.records:
            results.append((record.get_field(), record.get_value()))
    return results

# Simple uptime query
flux_query = f'''
from(bucket: "{bucket}")
    |> range(start: -1d)
'''

# Execute and print results
results = execute_query(flux_query)
print("Results for simple uptime query:", results)

I want all the above queries to be written under “flux_query” here : flux_query = f’‘’
from(bucket: “{bucket}”)
|> range(start: -1d)
‘’’

How can I achieve this, so that I am able to write all the queries after converting them from influxql to flux?

Sharing my write_data.py too:

import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS

bucket = "mybucket"
org = "myorg"
token = "something=="
url = "http://localhost:9086"

client = influxdb_client.InfluxDBClient(
    url=url,
    token=token,
    org=org
)

write_api = client.write_api(write_options=SYNCHRONOUS)

p = influxdb_client.Point("my_measurement").tag("location", "Prague").field("temperature", 25.3)

try:
    write_api.write(bucket=bucket, org=org, record=p)
    print("Data written successfully.")
except Exception as e:
    print(f"Failed to write data: {e}")

client.close()