Files
Ai/scripts/export_location_data_seed.py
2026-05-13 16:45:54 +03:30

56 lines
1.7 KiB
Python

import json
from pathlib import Path
import MySQLdb
from dotenv import dotenv_values
TABLES = [
"location_data_soillocation",
"location_data_blocksubdivision",
"location_data_remotesensingrun",
"location_data_remotesensingsubdivisionresult",
"location_data_remotesensingclusterblock",
"location_data_remotesensingclusterassignment",
"location_data_analysisgridcell",
"location_data_analysisgridobservation",
"location_data_remotesensingsubdivisionoption",
"location_data_remotesensingsubdivisionoptionblock",
"location_data_remotesensingsubdivisionoptionassignment",
"dashboard_data_ndviobservation",
]
def main() -> None:
env = dotenv_values(Path(__file__).resolve().parent.parent / ".env")
conn = MySQLdb.connect(
host=env.get("DB_HOST", "127.0.0.1"),
port=int(env.get("DB_PORT", 3306)),
user=env.get("DB_USER", ""),
passwd=env.get("DB_PASSWORD", ""),
db=env.get("DB_NAME", ""),
charset="utf8mb4",
)
out: dict[str, list[dict]] = {}
try:
with conn as cursor:
for table in TABLES:
cursor.execute(f"SELECT * FROM {table}")
columns = [col[0] for col in cursor.description]
rows = []
for raw_row in cursor.fetchall():
row = {}
for key, value in zip(columns, raw_row):
if isinstance(value, (bytes, bytearray)):
value = value.decode("utf-8")
row[key] = value
rows.append(row)
out[table] = rows
finally:
conn.close()
print(json.dumps(out, ensure_ascii=False, indent=2, default=str))
if __name__ == "__main__":
main()