diff --git a/app/db.server.ts b/app/db.server.ts index 4eb9ae51..a032ea7d 100644 --- a/app/db.server.ts +++ b/app/db.server.ts @@ -1,45 +1,45 @@ -import { drizzle, type PostgresJsDatabase } from "drizzle-orm/postgres-js"; -import postgres from "postgres"; -import invariant from "tiny-invariant"; -import * as schema from "./schema"; - -let drizzleClient: PostgresJsDatabase; +import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js' +import postgres, { type Sql } from 'postgres' +import invariant from 'tiny-invariant' +import * as schema from './schema' +let drizzleClient: PostgresJsDatabase +let pg: Sql declare global { - var __db__: PostgresJsDatabase; + var __db__: + | { + drizzle: PostgresJsDatabase + pg: Sql + } + | undefined } -// this is needed because in development we don't want to restart -// the server with every change, but we want to make sure we don't -// create a new connection to the DB with every change either. -// in production we'll have a single connection to the DB. -if (process.env.NODE_ENV === "production") { - drizzleClient = getClient(); +if (process.env.NODE_ENV === 'production') { + const { drizzle, pg: rawPg } = initClient() + drizzleClient = drizzle + pg = rawPg } else { - if (!global.__db__) { - global.__db__ = getClient(); - } - drizzleClient = global.__db__; + if (!global.__db__) { + global.__db__ = initClient() + } + drizzleClient = global.__db__.drizzle + pg = global.__db__.pg } -function getClient() { - const { DATABASE_URL } = process.env; - invariant(typeof DATABASE_URL === "string", "DATABASE_URL env var not set"); +function initClient() { + const { DATABASE_URL } = process.env + invariant(typeof DATABASE_URL === 'string', 'DATABASE_URL env var not set') - const databaseUrl = new URL(DATABASE_URL); + const databaseUrl = new URL(DATABASE_URL) + console.log(`🔌 setting up drizzle client to ${databaseUrl.host}`) - console.log(`🔌 setting up drizzle client to ${databaseUrl.host}`); + const rawPg = postgres(DATABASE_URL, { + ssl: process.env.PG_CLIENT_SSL === 'true' ? true : false, + }) - // NOTE: during development if you change anything in this function, remember - // that this only runs once per server restart and won't automatically be - // re-run per request like everything else is. So if you need to change - // something in this file, you'll need to manually restart the server. - const queryClient = postgres(DATABASE_URL, { - ssl: process.env.PG_CLIENT_SSL === "true" ? true : false, - }); - const client = drizzle(queryClient, { schema }); + const drizzleDb = drizzle(rawPg, { schema }) - return client; + return { drizzle: drizzleDb, pg: rawPg } } -export { drizzleClient }; +export { drizzleClient, pg } diff --git a/app/lib/api-schemas/boxes-data-query-schema.ts b/app/lib/api-schemas/boxes-data-query-schema.ts new file mode 100644 index 00000000..d946b5c2 --- /dev/null +++ b/app/lib/api-schemas/boxes-data-query-schema.ts @@ -0,0 +1,176 @@ +import { z } from 'zod' +import { type DeviceExposureType } from '~/schema' +import { StandardResponse } from '~/utils/response-utils' + +export type BoxesDataColumn = + | 'createdAt' + | 'value' + | 'lat' + | 'lon' + | 'height' + | 'boxid' + | 'boxName' + | 'exposure' + | 'sensorId' + | 'phenomenon' + | 'unit' + | 'sensorType' + +const BoxesDataQuerySchemaBase = z + .object({ + phenomenon: z.string().optional(), + + boxid: z + .union([ + z.string().transform((s) => s.split(',').map((x) => x.trim())), + z + .array(z.string()) + .transform((arr) => arr.map((s) => String(s).trim())), + ]) + .optional(), + bbox: z + .union([ + z.string().transform((s) => s.split(',').map((x) => Number(x.trim()))), + z + .array(z.union([z.string(), z.number()])) + .transform((arr) => arr.map((x) => Number(x))), + ]) + .refine((arr) => arr.length === 4 && arr.every((n) => !isNaN(n)), { + message: 'bbox must contain exactly 4 numeric coordinates', + }) + .optional(), + + exposure: z + .union([ + z + .string() + .transform((s) => + s.split(',').map((x) => x.trim() as DeviceExposureType), + ), + z + .array(z.string()) + .transform((arr) => + arr.map((s) => String(s).trim() as DeviceExposureType), + ), + ]) + .optional(), + + grouptag: z.string().optional(), + + fromDate: z + .string() + .transform((s) => new Date(s)) + .refine((d) => !isNaN(d.getTime()), { + message: 'from-date is invalid', + }) + .optional() + .default(() => + new Date(Date.now() - 2 * 24 * 60 * 60 * 1000).toISOString(), + ), + toDate: z + .string() + .transform((s) => new Date(s)) + .refine((d) => !isNaN(d.getTime()), { + message: 'to-date is invalid', + }) + .optional() + .default(() => new Date().toISOString()), + + format: z + .enum(['csv', 'json'], { + errorMap: () => ({ message: "Format must be either 'csv' or 'json'" }), + }) + .default('csv'), + + // Columns to include + columns: z + .union([ + z + .string() + .transform((s) => + s.split(',').map((x) => x.trim() as BoxesDataColumn), + ), + z + .array(z.string()) + .transform((arr) => + arr.map((s) => String(s).trim() as BoxesDataColumn), + ), + ]) + .default([ + 'sensorId', + 'createdAt', + 'value', + 'lat', + 'lon', + ] as BoxesDataColumn[]), + + download: z + .union([z.string(), z.boolean()]) + .transform((v) => { + if (typeof v === 'boolean') return v + return v !== 'false' && v !== '0' + }) + .default(true), + + delimiter: z.enum(['comma', 'semicolon']).default('comma'), + }) + // Validate: must have boxid or bbox, but not both + .superRefine((data, ctx) => { + if (!data.boxid && !data.bbox && !data.grouptag) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: 'please specify either boxid, bbox or grouptag', + path: ['boxid'], + }) + } + + if (!data.phenomenon && !data.grouptag) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: + 'phenomenon parameter is required when grouptag is not provided', + path: ['phenomenon'], + }) + } + }) + +export type BoxesDataQueryParams = z.infer + +/** + * Parse and validate query parameters from request. + * Supports both GET query params and POST JSON body. + */ +export async function parseBoxesDataQuery( + request: Request, +): Promise { + const url = new URL(request.url) + let params: Record + if (request.method !== 'GET') { + const contentType = request.headers.get('content-type') || '' + if (contentType.includes('application/json')) { + try { + params = await request.json() + } catch { + params = Object.fromEntries(url.searchParams) + } + } else { + params = Object.fromEntries(url.searchParams) + } + } else { + params = Object.fromEntries(url.searchParams) + } + + const parseResult = BoxesDataQuerySchemaBase.safeParse(params) + + if (!parseResult.success) { + const firstError = parseResult.error.errors[0] + const message = firstError.message || 'Invalid query parameters' + + if (firstError.path.includes('bbox')) { + throw StandardResponse.unprocessableContent(message) + } + throw StandardResponse.badRequest(message) + } + + return parseResult.data +} diff --git a/app/lib/measurement-service.server.ts b/app/lib/measurement-service.server.ts index d46af144..0bd4d964 100644 --- a/app/lib/measurement-service.server.ts +++ b/app/lib/measurement-service.server.ts @@ -1,19 +1,34 @@ -import { decodeMeasurements, hasDecoder } from "~/lib/decoding-service.server"; -import { type DeviceWithoutSensors, getDeviceWithoutSensors, getDevice, findAccessToken } from "~/models/device.server"; -import { saveMeasurements } from "~/models/measurement.server"; -import { getSensorsWithLastMeasurement, getSensorWithLastMeasurement } from "~/models/sensor.server"; -import { type SensorWithLatestMeasurement } from "~/schema"; +import { type BoxesDataColumn } from './api-schemas/boxes-data-query-schema' +import { decodeMeasurements, hasDecoder } from '~/lib/decoding-service.server' +import { + type DeviceWithoutSensors, + getDeviceWithoutSensors, + getDevice, + findAccessToken, +} from '~/models/device.server' +import { saveMeasurements } from '~/models/measurement.server' +import { + getSensorsWithLastMeasurement, + getSensorWithLastMeasurement, +} from '~/models/sensor.server' +import { type SensorWithLatestMeasurement } from '~/schema' -export type DeviceWithSensors = DeviceWithoutSensors & {sensors: SensorWithLatestMeasurement[]} - -export async function getLatestMeasurementsForSensor(boxId: string, sensorId: string, count?: number): - Promise { +export type DeviceWithSensors = DeviceWithoutSensors & { + sensors: SensorWithLatestMeasurement[] +} - const device: DeviceWithoutSensors = await getDeviceWithoutSensors({ id: boxId }); - if (!device) return null; +export async function getLatestMeasurementsForSensor( + boxId: string, + sensorId: string, + count?: number, +): Promise { + const device: DeviceWithoutSensors = await getDeviceWithoutSensors({ + id: boxId, + }) + if (!device) return null - // single sensor, no need for having info about device - return await getSensorWithLastMeasurement(device.id, sensorId, count); + // single sensor, no need for having info about device + return await getSensorWithLastMeasurement(device.id, sensorId, count) } /** @@ -22,184 +37,271 @@ export async function getLatestMeasurementsForSensor(boxId: string, sensorId: st * @param sensorId * @param count */ -export async function getLatestMeasurements ( - boxId: string, - count?: number, +export async function getLatestMeasurements( + boxId: string, + count?: number, ): Promise { - const device: DeviceWithoutSensors = await getDeviceWithoutSensors({ id: boxId }); - if (!device) return null; + const device: DeviceWithoutSensors = await getDeviceWithoutSensors({ + id: boxId, + }) + if (!device) return null - const sensorsWithMeasurements = await getSensorsWithLastMeasurement( - device.id, count); + const sensorsWithMeasurements = await getSensorsWithLastMeasurement( + device.id, + count, + ) - const deviceWithSensors: DeviceWithSensors = device as DeviceWithSensors - deviceWithSensors.sensors = sensorsWithMeasurements; - return deviceWithSensors; -}; + const deviceWithSensors: DeviceWithSensors = device as DeviceWithSensors + deviceWithSensors.sensors = sensorsWithMeasurements + return deviceWithSensors +} interface PostMeasurementsOptions { - contentType: string; - luftdaten: boolean; - hackair: boolean; - authorization?: string | null; + contentType: string + luftdaten: boolean + hackair: boolean + authorization?: string | null } interface SingleMeasurementBody { - value: number; - createdAt?: string; - location?: [number, number, number] | { lat: number; lng: number; height?: number }; + value: number + createdAt?: string + location?: + | [number, number, number] + | { lat: number; lng: number; height?: number } } interface LocationData { - lng: number; - lat: number; - height?: number; + lng: number + lat: number + height?: number } -const normalizeLocation = (location: SingleMeasurementBody['location']): LocationData | null => { - if (!location) return null; - - if (Array.isArray(location)) { - if (location.length < 2) return null; - return { - lng: location[0], - lat: location[1], - height: location[2], - }; - } - - if (typeof location === 'object' && 'lat' in location && 'lng' in location) { - return { - lng: location.lng, - lat: location.lat, - height: location.height, - }; - } - - return null; -}; +const normalizeLocation = ( + location: SingleMeasurementBody['location'], +): LocationData | null => { + if (!location) return null + + if (Array.isArray(location)) { + if (location.length < 2) return null + return { + lng: location[0], + lat: location[1], + height: location[2], + } + } + + if (typeof location === 'object' && 'lat' in location && 'lng' in location) { + return { + lng: location.lng, + lat: location.lat, + height: location.height, + } + } + + return null +} const validateLocationCoordinates = (loc: LocationData): boolean => { - return loc.lng >= -180 && loc.lng <= 180 && - loc.lat >= -90 && loc.lat <= 90; -}; + return loc.lng >= -180 && loc.lng <= 180 && loc.lat >= -90 && loc.lat <= 90 +} export const postNewMeasurements = async ( - deviceId: string, - body: any, - options: PostMeasurementsOptions, + deviceId: string, + body: any, + options: PostMeasurementsOptions, ): Promise => { - const { luftdaten, hackair, authorization } = options; - let { contentType } = options; - - if (hackair) { - contentType = "hackair"; - } else if (luftdaten) { - contentType = "luftdaten"; - } - - if (!hasDecoder(contentType)) { - throw new Error("UnsupportedMediaTypeError: Unsupported content-type."); - } - - const device = await getDevice({id: deviceId}); - if (!device) { - throw new Error("NotFoundError: Device not found"); - } - - if (device.useAuth) { - const deviceAccessToken = await findAccessToken(deviceId); - - if (deviceAccessToken?.token && deviceAccessToken.token !== authorization) { - const error = new Error("Device access token not valid!"); - error.name = "UnauthorizedError"; - throw error; - } - } - - const measurements = await decodeMeasurements(body, { - contentType, - sensors: device.sensors, - }); - - await saveMeasurements(device, measurements); -}; + const { luftdaten, hackair, authorization } = options + let { contentType } = options + + if (hackair) { + contentType = 'hackair' + } else if (luftdaten) { + contentType = 'luftdaten' + } + + if (!hasDecoder(contentType)) { + throw new Error('UnsupportedMediaTypeError: Unsupported content-type.') + } + + const device = await getDevice({ id: deviceId }) + if (!device) { + throw new Error('NotFoundError: Device not found') + } + + if (device.useAuth) { + const deviceAccessToken = await findAccessToken(deviceId) + + if (deviceAccessToken?.token && deviceAccessToken.token !== authorization) { + const error = new Error('Device access token not valid!') + error.name = 'UnauthorizedError' + throw error + } + } + + const measurements = await decodeMeasurements(body, { + contentType, + sensors: device.sensors, + }) + + await saveMeasurements(device, measurements) +} export const postSingleMeasurement = async ( - deviceId: string, - sensorId: string, - body: SingleMeasurementBody, - authorization?: string | null, + deviceId: string, + sensorId: string, + body: SingleMeasurementBody, + authorization?: string | null, ): Promise => { - try { - if (typeof body.value !== 'number' || isNaN(body.value)) { - const error = new Error("Invalid measurement value"); - error.name = "UnprocessableEntityError"; - throw error; - } - - const device = await getDevice({ id: deviceId }); - - if (!device) { - const error = new Error("Device not found"); - error.name = "NotFoundError"; - throw error; - } - - const sensor = device.sensors?.find((s: any) => s.id === sensorId); - if (!sensor) { - const error = new Error("Sensor not found on device"); - error.name = "NotFoundError"; - throw error; - } - - if (device.useAuth) { - const deviceAccessToken = await findAccessToken(deviceId); - - if (deviceAccessToken?.token && deviceAccessToken.token !== authorization) { - const error = new Error("Device access token not valid!"); - error.name = "UnauthorizedError"; - throw error; - } - } - - let timestamp: Date | undefined; - if (body.createdAt) { - timestamp = new Date(body.createdAt); - - if (isNaN(timestamp.getTime())) { - const error = new Error("Invalid timestamp format"); - error.name = "UnprocessableEntityError"; - throw error; - } - } - - let locationData: LocationData | null = null; - if (body.location) { - locationData = normalizeLocation(body.location); - - if (locationData && !validateLocationCoordinates(locationData)) { - const error = new Error("Invalid location coordinates"); - error.name = "UnprocessableEntityError"; - throw error; - } - } - - const measurements = [{ - sensor_id: sensorId, - value: body.value, - createdAt: timestamp, - location: locationData, - }]; - - await saveMeasurements(device, measurements); - } catch (error) { - if (error instanceof Error && - ['UnauthorizedError', 'NotFoundError', 'UnprocessableEntityError'].includes(error.name)) { - throw error; - } - - console.error('Error in postSingleMeasurement:', error); - throw error; - } -}; \ No newline at end of file + try { + if (typeof body.value !== 'number' || isNaN(body.value)) { + const error = new Error('Invalid measurement value') + error.name = 'UnprocessableEntityError' + throw error + } + + const device = await getDevice({ id: deviceId }) + + if (!device) { + const error = new Error('Device not found') + error.name = 'NotFoundError' + throw error + } + + const sensor = device.sensors?.find((s: any) => s.id === sensorId) + if (!sensor) { + const error = new Error('Sensor not found on device') + error.name = 'NotFoundError' + throw error + } + + if (device.useAuth) { + const deviceAccessToken = await findAccessToken(deviceId) + + if ( + deviceAccessToken?.token && + deviceAccessToken.token !== authorization + ) { + const error = new Error('Device access token not valid!') + error.name = 'UnauthorizedError' + throw error + } + } + + let timestamp: Date | undefined + if (body.createdAt) { + timestamp = new Date(body.createdAt) + + if (isNaN(timestamp.getTime())) { + const error = new Error('Invalid timestamp format') + error.name = 'UnprocessableEntityError' + throw error + } + } + + let locationData: LocationData | null = null + if (body.location) { + locationData = normalizeLocation(body.location) + + if (locationData && !validateLocationCoordinates(locationData)) { + const error = new Error('Invalid location coordinates') + error.name = 'UnprocessableEntityError' + throw error + } + } + + const measurements = [ + { + sensor_id: sensorId, + value: body.value, + createdAt: timestamp, + location: locationData, + }, + ] + + await saveMeasurements(device, measurements) + } catch (error) { + if ( + error instanceof Error && + [ + 'UnauthorizedError', + 'NotFoundError', + 'UnprocessableEntityError', + ].includes(error.name) + ) { + throw error + } + + console.error('Error in postSingleMeasurement:', error) + throw error + } +} + +/** + * Transform a measurement row into an object with requested columns. + * - prefer measurement location if present + * - otherwise fall back to sensor/device location (sensorsMap) + */ +export function transformMeasurement( + m: { + sensorId: string + createdAt: Date | null + value: number | null + locationId: bigint | null + }, + sensorsMap: Record, + locationsMap: Record, + columns: BoxesDataColumn[], +) { + const sensor = sensorsMap[m.sensorId] + const measurementLocation = m.locationId + ? locationsMap[m.locationId.toString()] + : null + + const result: Record = {} + + for (const col of columns) { + switch (col) { + case 'createdAt': + result.createdAt = m.createdAt ? m.createdAt.toISOString() : null + break + case 'value': + result.value = m.value + break + case 'lat': + result.lat = measurementLocation?.lat ?? sensor?.lat + break + case 'lon': + result.lon = measurementLocation?.lon ?? sensor?.lon + break + case 'height': + result.height = measurementLocation?.height ?? sensor?.height ?? null + break + case 'boxid': + result.boxid = sensor?.boxid + break + case 'boxName': + result.boxName = sensor?.boxName + break + case 'exposure': + result.exposure = sensor?.exposure + break + case 'sensorId': + result.sensorId = sensor?.sensorId + break + case 'phenomenon': + result.phenomenon = sensor?.phenomenon + break + case 'unit': + result.unit = sensor?.unit + break + case 'sensorType': + result.sensorType = sensor?.sensorType + break + default: + break + } + } + + return result +} diff --git a/app/models/measurement.query.server.ts b/app/models/measurement.query.server.ts new file mode 100644 index 00000000..d91f194b --- /dev/null +++ b/app/models/measurement.query.server.ts @@ -0,0 +1,165 @@ +import { and, sql, eq, desc, gte, lte } from 'drizzle-orm' +import { drizzleClient } from '~/db.server' +import { + measurement, + location, + measurements10minView, + measurements1dayView, + measurements1hourView, + measurements1monthView, + measurements1yearView, +} from '~/schema' + +// This function retrieves measurements from the database based on the provided parameters. +export function getMeasurement( + sensorId: string, + aggregation: string, + startDate?: Date, + endDate?: Date, +) { + // If both start date and end date are provided, filter measurements within the specified time range. + if (startDate && endDate) { + // Check the aggregation level for measurements and fetch accordingly. + if (aggregation === '10m') { + return drizzleClient + .select() + .from(measurements10minView) + .where( + and( + eq(measurements10minView.sensorId, sensorId), + gte(measurements10minView.time, startDate), + lte(measurements10minView.time, endDate), + ), + ) + .orderBy(desc(measurements10minView.time)) + } else if (aggregation === '1h') { + return drizzleClient + .select() + .from(measurements1hourView) + .where( + and( + eq(measurements1hourView.sensorId, sensorId), + gte(measurements1hourView.time, startDate), + lte(measurements1hourView.time, endDate), + ), + ) + .orderBy(desc(measurements1hourView.time)) + } else if (aggregation === '1d') { + return drizzleClient + .select() + .from(measurements1dayView) + .where( + and( + eq(measurements1dayView.sensorId, sensorId), + gte(measurements1dayView.time, startDate), + lte(measurements1dayView.time, endDate), + ), + ) + .orderBy(desc(measurements1dayView.time)) + } else if (aggregation === '1m') { + return drizzleClient + .select() + .from(measurements1monthView) + .where( + and( + eq(measurements1monthView.sensorId, sensorId), + gte(measurements1monthView.time, startDate), + lte(measurements1monthView.time, endDate), + ), + ) + .orderBy(desc(measurements1monthView.time)) + } else if (aggregation === '1y') { + return drizzleClient + .select() + .from(measurements1yearView) + .where( + and( + eq(measurements1yearView.sensorId, sensorId), + gte(measurements1yearView.time, startDate), + lte(measurements1yearView.time, endDate), + ), + ) + .orderBy(desc(measurements1yearView.time)) + } + // If aggregation is not specified or different from "15m" and "1d", fetch default measurements. + return drizzleClient.query.measurement.findMany({ + where: (measurement, { eq, gte, lte }) => + and( + eq(measurement.sensorId, sensorId), + gte(measurement.time, startDate), + lte(measurement.time, endDate), + ), + orderBy: [desc(measurement.time)], + with: { + location: { + // https://github.com/drizzle-team/drizzle-orm/pull/2778 + // with: { + // geometry: true + // }, + columns: { + id: true, + }, + extras: { + x: sql`ST_X(${location.location})`.as('x'), + y: sql`ST_Y(${location.location})`.as('y'), + }, + }, + }, + }) + } + + // If only aggregation is provided, fetch measurements without considering time range. + if (aggregation === '10m') { + return drizzleClient + .select() + .from(measurements10minView) + .where(eq(measurements10minView.sensorId, sensorId)) + .orderBy(desc(measurements10minView.time)) + } else if (aggregation === '1h') { + return drizzleClient + .select() + .from(measurements1hourView) + .where(eq(measurements1hourView.sensorId, sensorId)) + .orderBy(desc(measurements1hourView.time)) + } else if (aggregation === '1d') { + return drizzleClient + .select() + .from(measurements1dayView) + .where(eq(measurements1dayView.sensorId, sensorId)) + .orderBy(desc(measurements1dayView.time)) + } else if (aggregation === '1m') { + return drizzleClient + .select() + .from(measurements1monthView) + .where(eq(measurements1monthView.sensorId, sensorId)) + .orderBy(desc(measurements1monthView.time)) + } else if (aggregation === '1y') { + return drizzleClient + .select() + .from(measurements1yearView) + .where(eq(measurements1yearView.sensorId, sensorId)) + .orderBy(desc(measurements1yearView.time)) + } + + // If neither start date nor aggregation are specified, fetch default measurements with a limit of 20000. + return drizzleClient.query.measurement.findMany({ + where: (measurement, { eq }) => eq(measurement.sensorId, sensorId), + orderBy: [desc(measurement.time)], + with: { + location: { + // https://github.com/drizzle-team/drizzle-orm/pull/2778 + // with: { + // geometry: true + // }, + columns: { + id: true, + }, + extras: { + x: sql`ST_X(${location.location})`.as('x'), + y: sql`ST_Y(${location.location})`.as('y'), + }, + }, + }, + limit: 3600, // 60 measurements per hour * 24 hours * 2.5 days + }) +} diff --git a/app/models/measurement.server.ts b/app/models/measurement.server.ts index 5aa9ac9e..3cca9bc0 100644 --- a/app/models/measurement.server.ts +++ b/app/models/measurement.server.ts @@ -1,15 +1,15 @@ -import { and, desc, eq, gte, lte, sql } from "drizzle-orm"; -import { drizzleClient } from "~/db.server"; +import { and, desc, eq, gte, lte, sql } from 'drizzle-orm' +import { drizzleClient } from '~/db.server' import { - type LastMeasurement, - location, - measurement, - measurements10minView, - measurements1dayView, - measurements1hourView, - measurements1monthView, - measurements1yearView, -} from "~/schema"; + type LastMeasurement, + location, + measurement, + measurements10minView, + measurements1dayView, + measurements1hourView, + measurements1monthView, + measurements1yearView, +} from '~/schema' import { type MinimalDevice, type MeasurementWithLocation, @@ -22,240 +22,242 @@ import { // This function retrieves measurements from the database based on the provided parameters. export function getMeasurement( - sensorId: string, - aggregation: string, - startDate?: Date, - endDate?: Date, + sensorId: string, + aggregation: string, + startDate?: Date, + endDate?: Date, ) { - // If both start date and end date are provided, filter measurements within the specified time range. - if (startDate && endDate) { - // Check the aggregation level for measurements and fetch accordingly. - if (aggregation === "10m") { - return drizzleClient - .select() - .from(measurements10minView) - .where( - and( - eq(measurements10minView.sensorId, sensorId), - gte(measurements10minView.time, startDate), - lte(measurements10minView.time, endDate), - ), - ) - .orderBy(desc(measurements10minView.time)); - } else if (aggregation === "1h") { - return drizzleClient - .select() - .from(measurements1hourView) - .where( - and( - eq(measurements1hourView.sensorId, sensorId), - gte(measurements1hourView.time, startDate), - lte(measurements1hourView.time, endDate), - ), - ) - .orderBy(desc(measurements1hourView.time)); - } else if (aggregation === "1d") { - return drizzleClient - .select() - .from(measurements1dayView) - .where( - and( - eq(measurements1dayView.sensorId, sensorId), - gte(measurements1dayView.time, startDate), - lte(measurements1dayView.time, endDate), - ), - ) - .orderBy(desc(measurements1dayView.time)); - } else if (aggregation === "1m") { - return drizzleClient - .select() - .from(measurements1monthView) - .where( - and( - eq(measurements1monthView.sensorId, sensorId), - gte(measurements1monthView.time, startDate), - lte(measurements1monthView.time, endDate), - ), - ) - .orderBy(desc(measurements1monthView.time)); - } else if (aggregation === "1y") { - return drizzleClient - .select() - .from(measurements1yearView) - .where( - and( - eq(measurements1yearView.sensorId, sensorId), - gte(measurements1yearView.time, startDate), - lte(measurements1yearView.time, endDate), - ), - ) - .orderBy(desc(measurements1yearView.time)); - } - // If aggregation is not specified or different from "15m" and "1d", fetch default measurements. - return drizzleClient.query.measurement.findMany({ - where: (measurement, { eq, gte, lte }) => - and( - eq(measurement.sensorId, sensorId), - gte(measurement.time, startDate), - lte(measurement.time, endDate), - ), - orderBy: [desc(measurement.time)], - with: { - location: { - // https://github.com/drizzle-team/drizzle-orm/pull/2778 - // with: { - // geometry: true - // }, - columns: { - id: true, - }, - extras: { - x: sql`ST_X(${location.location})`.as("x"), - y: sql`ST_Y(${location.location})`.as("y"), - }, - }, - }, - }); - } + // If both start date and end date are provided, filter measurements within the specified time range. + if (startDate && endDate) { + // Check the aggregation level for measurements and fetch accordingly. + if (aggregation === '10m') { + return drizzleClient + .select() + .from(measurements10minView) + .where( + and( + eq(measurements10minView.sensorId, sensorId), + gte(measurements10minView.time, startDate), + lte(measurements10minView.time, endDate), + ), + ) + .orderBy(desc(measurements10minView.time)) + } else if (aggregation === '1h') { + return drizzleClient + .select() + .from(measurements1hourView) + .where( + and( + eq(measurements1hourView.sensorId, sensorId), + gte(measurements1hourView.time, startDate), + lte(measurements1hourView.time, endDate), + ), + ) + .orderBy(desc(measurements1hourView.time)) + } else if (aggregation === '1d') { + return drizzleClient + .select() + .from(measurements1dayView) + .where( + and( + eq(measurements1dayView.sensorId, sensorId), + gte(measurements1dayView.time, startDate), + lte(measurements1dayView.time, endDate), + ), + ) + .orderBy(desc(measurements1dayView.time)) + } else if (aggregation === '1m') { + return drizzleClient + .select() + .from(measurements1monthView) + .where( + and( + eq(measurements1monthView.sensorId, sensorId), + gte(measurements1monthView.time, startDate), + lte(measurements1monthView.time, endDate), + ), + ) + .orderBy(desc(measurements1monthView.time)) + } else if (aggregation === '1y') { + return drizzleClient + .select() + .from(measurements1yearView) + .where( + and( + eq(measurements1yearView.sensorId, sensorId), + gte(measurements1yearView.time, startDate), + lte(measurements1yearView.time, endDate), + ), + ) + .orderBy(desc(measurements1yearView.time)) + } + // If aggregation is not specified or different from "15m" and "1d", fetch default measurements. + return drizzleClient.query.measurement.findMany({ + where: (measurement, { eq, gte, lte }) => + and( + eq(measurement.sensorId, sensorId), + gte(measurement.time, startDate), + lte(measurement.time, endDate), + ), + orderBy: [desc(measurement.time)], + with: { + location: { + // https://github.com/drizzle-team/drizzle-orm/pull/2778 + // with: { + // geometry: true + // }, + columns: { + id: true, + }, + extras: { + x: sql`ST_X(${location.location})`.as('x'), + y: sql`ST_Y(${location.location})`.as('y'), + }, + }, + }, + }) + } - // If only aggregation is provided, fetch measurements without considering time range. - if (aggregation === "10m") { - return drizzleClient - .select() - .from(measurements10minView) - .where(eq(measurements10minView.sensorId, sensorId)) - .orderBy(desc(measurements10minView.time)); - } else if (aggregation === "1h") { - return drizzleClient - .select() - .from(measurements1hourView) - .where(eq(measurements1hourView.sensorId, sensorId)) - .orderBy(desc(measurements1hourView.time)); - } else if (aggregation === "1d") { - return drizzleClient - .select() - .from(measurements1dayView) - .where(eq(measurements1dayView.sensorId, sensorId)) - .orderBy(desc(measurements1dayView.time)); - } else if (aggregation === "1m") { - return drizzleClient - .select() - .from(measurements1monthView) - .where(eq(measurements1monthView.sensorId, sensorId)) - .orderBy(desc(measurements1monthView.time)); - } else if (aggregation === "1y") { - return drizzleClient - .select() - .from(measurements1yearView) - .where(eq(measurements1yearView.sensorId, sensorId)) - .orderBy(desc(measurements1yearView.time)); - } + // If only aggregation is provided, fetch measurements without considering time range. + if (aggregation === '10m') { + return drizzleClient + .select() + .from(measurements10minView) + .where(eq(measurements10minView.sensorId, sensorId)) + .orderBy(desc(measurements10minView.time)) + } else if (aggregation === '1h') { + return drizzleClient + .select() + .from(measurements1hourView) + .where(eq(measurements1hourView.sensorId, sensorId)) + .orderBy(desc(measurements1hourView.time)) + } else if (aggregation === '1d') { + return drizzleClient + .select() + .from(measurements1dayView) + .where(eq(measurements1dayView.sensorId, sensorId)) + .orderBy(desc(measurements1dayView.time)) + } else if (aggregation === '1m') { + return drizzleClient + .select() + .from(measurements1monthView) + .where(eq(measurements1monthView.sensorId, sensorId)) + .orderBy(desc(measurements1monthView.time)) + } else if (aggregation === '1y') { + return drizzleClient + .select() + .from(measurements1yearView) + .where(eq(measurements1yearView.sensorId, sensorId)) + .orderBy(desc(measurements1yearView.time)) + } - // If neither start date nor aggregation are specified, fetch default measurements with a limit of 20000. - return drizzleClient.query.measurement.findMany({ - where: (measurement, { eq }) => eq(measurement.sensorId, sensorId), - orderBy: [desc(measurement.time)], - with: { - location: { - // https://github.com/drizzle-team/drizzle-orm/pull/2778 - // with: { - // geometry: true - // }, - columns: { - id: true, - }, - extras: { - x: sql`ST_X(${location.location})`.as("x"), - y: sql`ST_Y(${location.location})`.as("y"), - }, - }, - }, - limit: 3600, // 60 measurements per hour * 24 hours * 2.5 days - }); + // If neither start date nor aggregation are specified, fetch default measurements with a limit of 20000. + return drizzleClient.query.measurement.findMany({ + where: (measurement, { eq }) => eq(measurement.sensorId, sensorId), + orderBy: [desc(measurement.time)], + with: { + location: { + // https://github.com/drizzle-team/drizzle-orm/pull/2778 + // with: { + // geometry: true + // }, + columns: { + id: true, + }, + extras: { + x: sql`ST_X(${location.location})`.as('x'), + y: sql`ST_Y(${location.location})`.as('y'), + }, + }, + }, + limit: 3600, // 60 measurements per hour * 24 hours * 2.5 days + }) } - export async function saveMeasurements( - device: MinimalDevice, - measurements: MeasurementWithLocation[] + device: MinimalDevice, + measurements: MeasurementWithLocation[], ): Promise { - if (!device) - throw new Error("No device given!") - if (!Array.isArray(measurements)) throw new Error("Array expected"); + if (!device) throw new Error('No device given!') + if (!Array.isArray(measurements)) throw new Error('Array expected') - const sensorIds = device.sensors.map((s: any) => s.id); - const lastMeasurements: Record> = {}; + const sensorIds = device.sensors.map((s: any) => s.id) + const lastMeasurements: Record> = {} - // Validate and prepare measurements - for (let i = measurements.length - 1; i >= 0; i--) { - const m = measurements[i]; + // Validate and prepare measurements + for (let i = measurements.length - 1; i >= 0; i--) { + const m = measurements[i] - if (!sensorIds.includes(m.sensor_id)) { - const error = new Error( - `Measurement for sensor with id ${m.sensor_id} does not belong to box` - ); - error.name = "ModelError"; - throw error; - } + if (!sensorIds.includes(m.sensor_id)) { + const error = new Error( + `Measurement for sensor with id ${m.sensor_id} does not belong to box`, + ) + error.name = 'ModelError' + throw error + } - const now = new Date(); - const maxFutureTime = 30 * 1000; // 30 seconds + const now = new Date() + const maxFutureTime = 30 * 1000 // 30 seconds - const measurementTime = new Date(m.createdAt || Date.now()); - if (measurementTime.getTime() > now.getTime() + maxFutureTime) { - const error = new Error( - `Measurement timestamp is too far in the future: ${measurementTime.toISOString()}` - ); - error.name = "ModelError"; - (error as any).type = "UnprocessableEntityError"; - throw error; - } + const measurementTime = new Date(m.createdAt || Date.now()) + if (measurementTime.getTime() > now.getTime() + maxFutureTime) { + const error = new Error( + `Measurement timestamp is too far in the future: ${measurementTime.toISOString()}`, + ) + error.name = 'ModelError' + ;(error as any).type = 'UnprocessableEntityError' + throw error + } - if (!lastMeasurements[m.sensor_id] || - lastMeasurements[m.sensor_id].createdAt < measurementTime.toISOString()) { - lastMeasurements[m.sensor_id] = { - value: m.value, - createdAt: measurementTime.toISOString(), - sensorId: m.sensor_id, - }; - } - } + if ( + !lastMeasurements[m.sensor_id] || + lastMeasurements[m.sensor_id].createdAt < measurementTime.toISOString() + ) { + lastMeasurements[m.sensor_id] = { + value: m.value, + createdAt: measurementTime.toISOString(), + sensorId: m.sensor_id, + } + } + } - // Track measurements that update device location (those with explicit locations) - const deviceLocationUpdates = getLocationUpdates(measurements); - const locations = await findOrCreateLocations(deviceLocationUpdates); - - // First, update device locations for all measurements with explicit locations - // This ensures the location history is complete before we infer locations - await addLocationUpdates(deviceLocationUpdates, device.id, locations); + // Track measurements that update device location (those with explicit locations) + const deviceLocationUpdates = getLocationUpdates(measurements) + const locations = await findOrCreateLocations(deviceLocationUpdates) - // Note that the insertion of measurements and update of sensors need to be in one - // transaction, since otherwise other updates could get in between and the data would be - // inconsistent. This shouldn't be a problem for the updates above. - await drizzleClient.transaction(async (tx) => { - // Now process each measurement and infer locations if needed - await insertMeasurementsWithLocation(measurements, locations, device.id, tx); - // Update sensor lastMeasurement values - await updateLastMeasurements(lastMeasurements, tx); - }); -} + // First, update device locations for all measurements with explicit locations + // This ensures the location history is complete before we infer locations + await addLocationUpdates(deviceLocationUpdates, device.id, locations) + // Note that the insertion of measurements and update of sensors need to be in one + // transaction, since otherwise other updates could get in between and the data would be + // inconsistent. This shouldn't be a problem for the updates above. + await drizzleClient.transaction(async (tx) => { + // Now process each measurement and infer locations if needed + await insertMeasurementsWithLocation(measurements, locations, device.id, tx) + // Update sensor lastMeasurement values + await updateLastMeasurements(lastMeasurements, tx) + }) +} export async function insertMeasurements(measurements: any[]): Promise { - const measurementInserts = measurements.map(measurement => ({ - sensorId: measurement.sensor_id, - value: measurement.value, - time: measurement.createdAt || new Date(), - })); + const measurementInserts = measurements.map((measurement) => ({ + sensorId: measurement.sensor_id, + value: measurement.value, + time: measurement.createdAt || new Date(), + })) - await drizzleClient.insert(measurement).values(measurementInserts); + await drizzleClient.insert(measurement).values(measurementInserts) } export async function deleteMeasurementsForSensor(sensorId: string) { - return await drizzleClient.delete(measurement).where(eq(measurement.sensorId, sensorId)); + return await drizzleClient + .delete(measurement) + .where(eq(measurement.sensorId, sensorId)) } export async function deleteMeasurementsForTime(date: Date) { - return await drizzleClient.delete(measurement).where(eq(measurement.time, date)); + return await drizzleClient + .delete(measurement) + .where(eq(measurement.time, date)) } - diff --git a/app/models/measurement.stream.server.ts b/app/models/measurement.stream.server.ts new file mode 100644 index 00000000..734014fe --- /dev/null +++ b/app/models/measurement.stream.server.ts @@ -0,0 +1,60 @@ +import { pg } from '~/db.server' + +/** + * Stream measurements in batches from Postgres + * @param sensorIds list of sensor IDs + * @param fromDate start of date range + * @param toDate end of date range + * @param bbox optional bounding box [lngSW, latSW, lngNE, latNE] + * @param batchSize number of rows per batch + */ +export async function* streamMeasurements( + sensorIds: string[], + fromDate: Date, + toDate: Date, + bbox?: any, + batchSize = 1000, +) { + // Build parameterized query values array preventing sql injections + const values: any[] = [ + sensorIds, // $1 - array of sensor IDs + fromDate instanceof Date ? fromDate.toISOString() : fromDate, // $2 - start date as ISO string + toDate instanceof Date ? toDate.toISOString() : toDate, // $3 - end date as ISO string + ] + + // check if sensor_id is in the array and filter by date range + let conditions = `m.sensor_id = ANY($1::text[]) AND m.time BETWEEN $2 AND $3` + + if (bbox) { + const [lngSW, latSW, lngNE, latNE] = bbox + values.push(lngSW, latSW, lngNE, latNE) + const idx = values.length - 4 // start index of bbox params in values (0-based -> $n numbering) + // NOTE: pg placeholders are 1-based, so use idx + 1 .. idx + 4 + conditions += ` AND ( + m.location_id IS NULL OR + ST_Contains( + ST_MakeEnvelope($${idx + 1}::double precision, $${idx + 2}::double precision, $${idx + 3}::double precision, $${idx + 4}::double precision, 4326), + l.location + ) + )` + } + + const sqlQuery = ` + SELECT + m.sensor_id, + m.time, + m.value, + l.location, + m.location_id + FROM measurement m + LEFT JOIN location l ON m.location_id = l.id + WHERE ${conditions} + ORDER BY m.time ASC + ` + + const cursor = pg.unsafe(sqlQuery, values).cursor(batchSize) + + for await (const rows of cursor) { + yield rows + } +} diff --git a/app/models/sensor.server.ts b/app/models/sensor.server.ts index d452452d..49fa0b3f 100644 --- a/app/models/sensor.server.ts +++ b/app/models/sensor.server.ts @@ -1,8 +1,10 @@ -import { eq, sql } from 'drizzle-orm' +import { eq, sql, inArray, and } from 'drizzle-orm' import { drizzleClient } from '~/db.server' +import { type BoxesDataQueryParams } from '~/lib/api-schemas/boxes-data-query-schema' import { type Measurement, sensor, + device, type Sensor, type SensorWithLatestMeasurement, } from '~/schema' @@ -200,3 +202,93 @@ export function getSensor(id: Sensor['id']) { export function deleteSensor(id: Sensor['id']) { return drizzleClient.delete(sensor).where(eq(sensor.id, id)) } + +/** + * Find matching devices+their sensors based on phenomenon or grouptag and device-level filters. + * Returns sensorsMap (sensorId -> augmented sensor metadata) and sensorIds array. + */ +export async function findMatchingSensors(params: BoxesDataQueryParams) { + const { boxId, exposure, phenomenon, grouptag } = params + + // Build device-level conditions + const deviceConditions = [] + + if (grouptag) { + deviceConditions.push(sql`${grouptag} = ANY(${device.tags})`) + } + + if (boxId) { + deviceConditions.push(inArray(device.id, boxId)) + } + + if (exposure) { + deviceConditions.push(inArray(device.exposure, exposure)) + } + + // Build sensor-level conditions + const sensorConditions = [] + + if (phenomenon) { + sensorConditions.push(eq(sensor.title, phenomenon)) + } + + const rows = await drizzleClient + .select({ + deviceId: device.id, + deviceName: device.name, + deviceExposure: device.exposure, + deviceLat: device.latitude, + deviceLon: device.longitude, + sensorId: sensor.id, + sensorTitle: sensor.title, + sensorUnit: sensor.unit, + sensorType: sensor.sensorType, + }) + .from(device) + .innerJoin(sensor, eq(sensor.deviceId, device.id)) + .where( + and( + sensorConditions.length > 0 ? and(...sensorConditions) : undefined, + deviceConditions.length > 0 ? and(...deviceConditions) : undefined, + ), + ) + + if (!rows || rows.length === 0) { + throw new Response('No senseBoxes found', { status: 404 }) + } + + const sensorsMap: Record< + string, + { + sensorId: string + boxId: string + boxName: string + exposure: string | null + lat: number + lon: number + height?: number + phenomenon: string | null + unit: string | null + sensorType: string | null + } + > = {} + + for (const r of rows) { + if (r.sensorId) { + sensorsMap[r.sensorId] = { + sensorId: r.sensorId, + boxId: r.deviceId, + boxName: r.deviceName, + exposure: r.deviceExposure, + lat: r.deviceLat, + lon: r.deviceLon, + height: undefined, + phenomenon: r.sensorTitle, + unit: r.sensorUnit, + sensorType: r.sensorType, + } + } + } + + return { sensorsMap, sensorIds: Object.keys(sensorsMap) } +} diff --git a/app/routes/api.boxes.$deviceId.data.$sensorId.ts b/app/routes/api.boxes.$deviceId.data.$sensorId.ts index 8a4bbd93..d5e6722c 100644 --- a/app/routes/api.boxes.$deviceId.data.$sensorId.ts +++ b/app/routes/api.boxes.$deviceId.data.$sensorId.ts @@ -1,10 +1,17 @@ -import { type Params, type LoaderFunction, type LoaderFunctionArgs } from "react-router"; -import { type TransformedMeasurement, transformOutliers } from "~/lib/outlier-transform"; -import { getMeasurements } from "~/models/sensor.server"; -import { type Measurement } from "~/schema"; -import { convertToCsv } from "~/utils/csv"; -import { parseDateParam, parseEnumParam } from "~/utils/param-utils"; -import { StandardResponse } from "~/utils/response-utils"; +import { + type Params, + type LoaderFunction, + type LoaderFunctionArgs, +} from 'react-router' +import { + type TransformedMeasurement, + transformOutliers, +} from '~/lib/outlier-transform' +import { getMeasurements } from '~/models/sensor.server' +import { type Measurement } from '~/schema' +import { convertToCsv } from '~/utils/csv' +import { parseDateParam, parseEnumParam } from '~/utils/param-utils' +import { StandardResponse } from '~/utils/response-utils' /** * @openapi @@ -139,121 +146,151 @@ import { StandardResponse } from "~/utils/response-utils"; */ export const loader: LoaderFunction = async ({ - request, - params, + request, + params, }: LoaderFunctionArgs): Promise => { - try { + try { + const collected = collectParameters(request, params) + if (collected instanceof Response) return collected + const { + sensorId, + outliers, + outlierWindow, + fromDate, + toDate, + format, + download, + delimiter, + } = collected - const collected = collectParameters(request, params); - if (collected instanceof Response) - return collected; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const {deviceId, sensorId, outliers, outlierWindow, fromDate, toDate, format, download, delimiter} = collected; + let meas: Measurement[] | TransformedMeasurement[] = await getMeasurements( + sensorId, + fromDate.toISOString(), + toDate.toISOString(), + ) + if (meas == null) return StandardResponse.notFound('Device not found.') - let meas: Measurement[] | TransformedMeasurement[] = await getMeasurements(sensorId, fromDate.toISOString(), toDate.toISOString()); - if (meas == null) - return StandardResponse.notFound("Device not found."); - - if (outliers) - meas = transformOutliers(meas, outlierWindow, outliers == "replace"); + if (outliers) + meas = transformOutliers(meas, outlierWindow, outliers == 'replace') - let headers: HeadersInit = { - "content-type": format == "json" ? "application/json; charset=utf-8" : "text/csv; charset=utf-8", - }; - if (download) - headers["Content-Disposition"] = `attachment; filename=${sensorId}.${format}`; + let headers: HeadersInit = { + 'content-type': + format == 'json' + ? 'application/json; charset=utf-8' + : 'text/csv; charset=utf-8', + } + if (download) + headers['Content-Disposition'] = + `attachment; filename=${sensorId}.${format}` - const responseInit: ResponseInit = { - status: 200, - headers: headers, - }; + const responseInit: ResponseInit = { + status: 200, + headers: headers, + } - if (format == "json") - return Response.json(meas, responseInit); - else { - const csv = getCsv(meas, delimiter == "comma" ? "," : ";"); - return new Response(csv, responseInit) - } - - } catch (err) { - console.warn(err); - return StandardResponse.internalServerError(); - } -}; + if (format == 'json') return Response.json(meas, responseInit) + else { + const csv = getCsv(meas, delimiter == 'comma' ? ',' : ';') + return new Response(csv, responseInit) + } + } catch (err) { + console.warn(err) + return StandardResponse.internalServerError() + } +} -function collectParameters(request: Request, params: Params): - Response | { - deviceId: string, - sensorId: string, - outliers: string | null, - outlierWindow: number, - fromDate: Date, - toDate: Date, - format: string | null, - download: boolean | null, - delimiter: string - } { - // deviceId is there for legacy reasons - const deviceId = params.deviceId; - if (deviceId === undefined) - return StandardResponse.badRequest("Invalid device id specified"); - const sensorId = params.sensorId; - if (sensorId === undefined) - return StandardResponse.badRequest("Invalid sensor id specified"); +function collectParameters( + request: Request, + params: Params, +): + | Response + | { + deviceId: string + sensorId: string + outliers: string | null + outlierWindow: number + fromDate: Date + toDate: Date + format: string | null + download: boolean | null + delimiter: string + } { + // deviceId is there for legacy reasons + const deviceId = params.deviceId + if (deviceId === undefined) + return StandardResponse.badRequest('Invalid device id specified') + const sensorId = params.sensorId + if (sensorId === undefined) + return StandardResponse.badRequest('Invalid sensor id specified') - const url = new URL(request.url); + const url = new URL(request.url) - const outliers = parseEnumParam(url, "outliers", ["replace", "mark"], null) - if (outliers instanceof Response) - return outliers; + const outliers = parseEnumParam(url, 'outliers', ['replace', 'mark'], null) + if (outliers instanceof Response) return outliers - const outlierWindowParam = url.searchParams.get("outlier-window") - let outlierWindow: number = 15; - if (outlierWindowParam !== null) { - if (Number.isNaN(outlierWindowParam) || Number(outlierWindowParam) < 1 || Number(outlierWindowParam) > 50) - return StandardResponse.badRequest("Illegal value for parameter outlier-window. Allowed values: numbers between 1 and 50"); - outlierWindow = Number(outlierWindowParam); - } + const outlierWindowParam = url.searchParams.get('outlier-window') + let outlierWindow: number = 15 + if (outlierWindowParam !== null) { + if ( + Number.isNaN(outlierWindowParam) || + Number(outlierWindowParam) < 1 || + Number(outlierWindowParam) > 50 + ) + return StandardResponse.badRequest( + 'Illegal value for parameter outlier-window. Allowed values: numbers between 1 and 50', + ) + outlierWindow = Number(outlierWindowParam) + } - const fromDate = parseDateParam(url, "from-date", new Date(new Date().setDate(new Date().getDate() - 2))) - if (fromDate instanceof Response) - return fromDate + const fromDate = parseDateParam( + url, + 'from-date', + new Date(new Date().setDate(new Date().getDate() - 2)), + ) + if (fromDate instanceof Response) return fromDate - const toDate = parseDateParam(url, "to-date", new Date()) - if (toDate instanceof Response) - return toDate + const toDate = parseDateParam(url, 'to-date', new Date()) + if (toDate instanceof Response) return toDate - const format = parseEnumParam(url, "format", ["json", "csv"], "json"); - if (format instanceof Response) - return format + const format = parseEnumParam(url, 'format', ['json', 'csv'], 'json') + if (format instanceof Response) return format - const downloadParam = parseEnumParam(url, "download", ["true", "false"], null) - if (downloadParam instanceof Response) - return downloadParam - const download = downloadParam == null - ? null - : (downloadParam === "true"); + const downloadParam = parseEnumParam(url, 'download', ['true', 'false'], null) + if (downloadParam instanceof Response) return downloadParam + const download = downloadParam == null ? null : downloadParam === 'true' - const delimiter = parseEnumParam(url, "delimiter", ["comma", "semicolon"], "comma"); - if (delimiter instanceof Response) - return delimiter; + const delimiter = parseEnumParam( + url, + 'delimiter', + ['comma', 'semicolon'], + 'comma', + ) + if (delimiter instanceof Response) return delimiter - return { - deviceId, - sensorId, - outliers, - outlierWindow, - fromDate, - toDate, - format, - download, - delimiter - }; + return { + deviceId, + sensorId, + outliers, + outlierWindow, + fromDate, + toDate, + format, + download, + delimiter, + } } -function getCsv(meas: Measurement[] | TransformedMeasurement[], delimiter: string): string { - return convertToCsv(["createdAt", "value"], meas, [ - measurement => measurement.time.toString(), - measurement => measurement.value?.toString() ?? "null" - ], delimiter) -} \ No newline at end of file +function getCsv( + meas: Measurement[] | TransformedMeasurement[], + delimiter: string, +): string { + return convertToCsv( + ['createdAt', 'value'], + meas, + [ + (measurement) => measurement.time.toString(), + (measurement) => measurement.value?.toString() ?? 'null', + ], + delimiter, + ) +} diff --git a/app/routes/api.boxes.data.ts b/app/routes/api.boxes.data.ts new file mode 100644 index 00000000..6d500a6a --- /dev/null +++ b/app/routes/api.boxes.data.ts @@ -0,0 +1,134 @@ +import { type LoaderFunctionArgs, type ActionFunctionArgs } from 'react-router' +import { parseBoxesDataQuery } from '~/lib/api-schemas/boxes-data-query-schema' +import { transformMeasurement } from '~/lib/measurement-service.server' +import { streamMeasurements } from '~/models/measurement.stream.server' +import { findMatchingSensors } from '~/models/sensor.server' +import { escapeCSVValue } from '~/utils/csv' +import { StandardResponse } from '~/utils/response-utils' + +function createDownloadFilename( + date: Date, + action: string, + params: string[], + format: string, +) { + return `opensensemap_org-${action}-${encodeURI(params.join('-'))}-${date + .toISOString() + .replace(/-|:|\.\d*Z/g, '') + .replace('T', '_')}.${format}` +} + +export async function loader({ request }: LoaderFunctionArgs) { + try { + const params = await parseBoxesDataQuery(request) + + const { sensorsMap, sensorIds } = await findMatchingSensors(params) + + if (sensorIds.length === 0) { + return StandardResponse.notFound('No matching sensors found') + } + + const headers = new Headers() + headers.set( + 'Content-Type', + params.format === 'csv' ? 'text/csv' : 'application/json', + ) + if (params.download) { + const filename = createDownloadFilename( + new Date(), + 'download', + [params.phenomenon || 'data'], + params.format, + ) + headers.set('Content-Disposition', `attachment; filename=${filename}`) + } + + const delimiterChar = params.delimiter === 'semicolon' ? ';' : ',' + + const stream = new ReadableStream({ + async start(controller) { + try { + const encoder = new TextEncoder() + let isFirst = true + + // Write CSV header or JSON opening bracket + if (params.format === 'csv') { + const header = params.columns.join(delimiterChar) + '\n' + controller.enqueue(encoder.encode(header)) + } else { + controller.enqueue(encoder.encode('[')) + } + + for await (const batch of streamMeasurements( + sensorIds, + params.fromDate, + params.toDate, + params.bbox, + )) { + for (const measurement of batch) { + const transformed = transformMeasurement( + { + sensorId: measurement.sensor_id, + createdAt: measurement.time + ? new Date(measurement.time) + : null, + value: measurement.value, + locationId: measurement.location_id ?? null, + }, + sensorsMap, + {}, + params.columns, + ) + + let line: string + if (params.format === 'csv') { + line = + params.columns + .map((col: string) => + escapeCSVValue( + (transformed as Record)[col], + delimiterChar, + ), + ) + .join(delimiterChar) + '\n' + } else { + // Format as JSON + if (!isFirst) { + line = ',' + JSON.stringify(transformed) + } else { + line = JSON.stringify(transformed) + isFirst = false + } + } + + controller.enqueue(encoder.encode(line)) + } + } + + // Close JSON array + if (params.format === 'json') { + controller.enqueue(encoder.encode(']')) + } + + controller.close() + } catch (error) { + console.error('Stream error:', error) + controller.error(error) + } + }, + }) + + return new Response(stream, { headers }) + } catch (err) { + if (err instanceof Response) throw err + return StandardResponse.internalServerError() + } +} + +export async function action(args: ActionFunctionArgs) { + return loader({ + request: args.request, + params: args.params as any, + context: args.context as any, + }) +} diff --git a/app/routes/api.ts b/app/routes/api.ts index 2a15d59d..394f28d0 100644 --- a/app/routes/api.ts +++ b/app/routes/api.ts @@ -1,224 +1,224 @@ -import { type LoaderFunctionArgs } from "react-router"; +import { type LoaderFunctionArgs } from 'react-router' -type RouteInfo = { path: string; method: "GET" | "PUT" | "POST" | "DELETE" }; +type RouteInfo = { path: string; method: 'GET' | 'PUT' | 'POST' | 'DELETE' } const routes: { noauth: RouteInfo[]; auth: RouteInfo[] } = { - noauth: [ - { - path: "/", - method: "GET", - }, - { - path: "/stats", - method: "GET", - }, - { - path: "/tags", - method: "GET", - }, - // { - // path: `statistics/idw`, - // method: "GET", + noauth: [ + { + path: '/', + method: 'GET', + }, + { + path: '/stats', + method: 'GET', + }, + { + path: '/tags', + method: 'GET', + }, + // { + // path: `statistics/idw`, + // method: "GET", - // }, - // { - // path: `statistics/descriptive`, - // method: "GET", + // }, + // { + // path: `statistics/descriptive`, + // method: "GET", - // }, - { - path: `boxes`, - method: "GET", - }, - // { - // path: `boxes/data`, - // method: "GET", - // }, + // }, + { + path: `boxes`, + method: 'GET', + }, + { + path: `boxes/data`, + method: 'GET', + }, - // { - // path: `boxes/:boxId`, - // method: "GET", - // }, - { - path: `boxes/:boxId/sensors`, - method: "GET", - }, - { - path: `boxes/:boxId/sensors/:sensorId`, - method: "GET", - }, - // { - // path: `boxes/:boxId/data/:sensorId`, - // method: "GET", - // }, - // { - // path: `boxes/:boxId/locations`, - // method: "GET", - // }, - // { - // path: `boxes/data`, - // method: "POST", - // }, - { - path: `boxes/:boxId/data`, - method: "POST", - }, - { - path: `boxes/:boxId/:sensorId`, - method: "POST", - }, - { - path: `users/register`, - method: "POST", - }, - { - path: `users/request-password-reset`, - method: "POST", - }, - { - path: `users/password-reset`, - method: "POST", - }, - { - path: `users/confirm-email`, - method: "POST", - }, - { - path: `users/sign-in`, - method: "POST", - }, - ], - auth: [ - { - path: `users/refresh-auth`, - method: "POST", - }, - { - path: `users/me`, - method: "GET", - }, - { - path: `users/me`, - method: "PUT", - }, - { - path: `users/me/boxes`, - method: "GET", - }, - { - path: `users/me/boxes/:boxId`, - method: "GET", - }, - // { - // path: `boxes/:boxId/script`, - // method: "GET", - // }, - { - path: `boxes`, - method: "POST", - }, - { - path: `boxes/claim`, - method: "POST", - }, - { - path: `boxes/transfer`, - method: "POST", - }, - { - path: `boxes/transfer`, - method: "DELETE", - }, - { - path: `boxes/transfer/:boxId`, - method: "GET", - }, - { - path: `boxes/transfer/:boxId`, - method: "PUT", - }, - // { - // path: `boxes/:boxId`, - // method: "PUT", - // }, - { - path: `boxes/:boxId`, - method: "DELETE", - }, - // { - // path: `boxes/:boxId/:sensorId/measurements`, - // method: "DELETE", - // }, - { - path: `users/sign-out`, - method: "POST", - }, - { - path: `users/me`, - method: "DELETE", - }, - { - path: `users/me/resend-email-confirmation`, - method: "POST", - }, - ], - // management: [ - // { - // path: `${managementPath}/boxes`, - // method: "GET", - // }, - // { - // path: `${managementPath}/boxes/:boxId`, - // method: "GET", - // }, - // { - // path: `${managementPath}/boxes/:boxId`, - // method: "PUT", - // }, - // { - // path: `${managementPath}/boxes/delete`, - // method: "POST", - // }, - // { - // path: `${managementPath}/users`, - // method: "GET", - // }, - // { - // path: `${managementPath}/users/:userId`, - // method: "GET", - // }, - // { - // path: `${managementPath}/users/:userId`, - // method: "PUT", - // }, - // { - // path: `${managementPath}/users/delete`, - // method: "POST", - // }, - // { - // path: `${managementPath}/users/:userId/exec`, - // method: "POST", - // }, - // ], -}; + // { + // path: `boxes/:boxId`, + // method: "GET", + // }, + { + path: `boxes/:boxId/sensors`, + method: 'GET', + }, + { + path: `boxes/:boxId/sensors/:sensorId`, + method: 'GET', + }, + // { + // path: `boxes/:boxId/data/:sensorId`, + // method: "GET", + // }, + // { + // path: `boxes/:boxId/locations`, + // method: "GET", + // }, + // { + // path: `boxes/data`, + // method: "POST", + // }, + { + path: `boxes/:boxId/data`, + method: 'POST', + }, + { + path: `boxes/:boxId/:sensorId`, + method: 'POST', + }, + { + path: `users/register`, + method: 'POST', + }, + { + path: `users/request-password-reset`, + method: 'POST', + }, + { + path: `users/password-reset`, + method: 'POST', + }, + { + path: `users/confirm-email`, + method: 'POST', + }, + { + path: `users/sign-in`, + method: 'POST', + }, + ], + auth: [ + { + path: `users/refresh-auth`, + method: 'POST', + }, + { + path: `users/me`, + method: 'GET', + }, + { + path: `users/me`, + method: 'PUT', + }, + { + path: `users/me/boxes`, + method: 'GET', + }, + { + path: `users/me/boxes/:boxId`, + method: 'GET', + }, + // { + // path: `boxes/:boxId/script`, + // method: "GET", + // }, + { + path: `boxes`, + method: 'POST', + }, + { + path: `boxes/claim`, + method: 'POST', + }, + { + path: `boxes/transfer`, + method: 'POST', + }, + { + path: `boxes/transfer`, + method: 'DELETE', + }, + { + path: `boxes/transfer/:boxId`, + method: 'GET', + }, + { + path: `boxes/transfer/:boxId`, + method: 'PUT', + }, + // { + // path: `boxes/:boxId`, + // method: "PUT", + // }, + { + path: `boxes/:boxId`, + method: 'DELETE', + }, + // { + // path: `boxes/:boxId/:sensorId/measurements`, + // method: "DELETE", + // }, + { + path: `users/sign-out`, + method: 'POST', + }, + { + path: `users/me`, + method: 'DELETE', + }, + { + path: `users/me/resend-email-confirmation`, + method: 'POST', + }, + ], + // management: [ + // { + // path: `${managementPath}/boxes`, + // method: "GET", + // }, + // { + // path: `${managementPath}/boxes/:boxId`, + // method: "GET", + // }, + // { + // path: `${managementPath}/boxes/:boxId`, + // method: "PUT", + // }, + // { + // path: `${managementPath}/boxes/delete`, + // method: "POST", + // }, + // { + // path: `${managementPath}/users`, + // method: "GET", + // }, + // { + // path: `${managementPath}/users/:userId`, + // method: "GET", + // }, + // { + // path: `${managementPath}/users/:userId`, + // method: "PUT", + // }, + // { + // path: `${managementPath}/users/delete`, + // method: "POST", + // }, + // { + // path: `${managementPath}/users/:userId/exec`, + // method: "POST", + // }, + // ], +} export async function loader({}: LoaderFunctionArgs) { - const lines = [ - `This is the openSenseMap API`, - "You can find a detailed reference at https://docs.opensensemap.org\n", - "Routes requiring no authentication:", - ]; + const lines = [ + `This is the openSenseMap API`, + 'You can find a detailed reference at https://docs.opensensemap.org\n', + 'Routes requiring no authentication:', + ] - for (const r of routes.noauth) lines.push(`${r.method}\t${r.path}`); + for (const r of routes.noauth) lines.push(`${r.method}\t${r.path}`) - lines.push("\nRoutes requiring valid authentication through JWT:"); + lines.push('\nRoutes requiring valid authentication through JWT:') - for (const r of routes.auth) lines.push(`${r.method}\t${r.path}`); + for (const r of routes.auth) lines.push(`${r.method}\t${r.path}`) - return new Response(lines.join("\n"), { - status: 200, - headers: { - "Content-Type": "text/plain; charset=utf-8", - }, - }); + return new Response(lines.join('\n'), { + status: 200, + headers: { + 'Content-Type': 'text/plain; charset=utf-8', + }, + }) } diff --git a/app/routes/explore.$deviceId.$sensorId.$.tsx b/app/routes/explore.$deviceId.$sensorId.$.tsx index 0a12a675..62015785 100644 --- a/app/routes/explore.$deviceId.$sensorId.$.tsx +++ b/app/routes/explore.$deviceId.$sensorId.$.tsx @@ -2,9 +2,12 @@ import { addDays } from 'date-fns' import { redirect, type LoaderFunctionArgs, useLoaderData } from 'react-router' import Graph from '~/components/device-detail/graph' import MobileBoxView from '~/components/map/layers/mobile/mobile-box-view' -import { categorizeIntoTrips, type LocationPoint} from '~/lib/mobile-box-helper' +import { + categorizeIntoTrips, + type LocationPoint, +} from '~/lib/mobile-box-helper' import { getDevice } from '~/models/device.server' -import { getMeasurement } from '~/models/measurement.server' +import { getMeasurement } from '~/models/measurement.query.server' import { getSensor } from '~/models/sensor.server' import { type SensorWithMeasurementData } from '~/schema' diff --git a/app/routes/explore.tsx b/app/routes/explore.tsx index 88cc6478..fa3584cd 100644 --- a/app/routes/explore.tsx +++ b/app/routes/explore.tsx @@ -1,423 +1,426 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ -import { type FeatureCollection, type Point } from "geojson"; -import mapboxglcss from "mapbox-gl/dist/mapbox-gl.css?url"; -import { useState, useRef } from "react"; +import { type FeatureCollection, type Point } from 'geojson' +import mapboxglcss from 'mapbox-gl/dist/mapbox-gl.css?url' +import { useState, useRef } from 'react' import { - type MapLayerMouseEvent, - type MapRef, - MapProvider, - Layer, - Source, - Marker, -} from "react-map-gl"; + type MapLayerMouseEvent, + type MapRef, + MapProvider, + Layer, + Source, + Marker, +} from 'react-map-gl' import { - Outlet, - useNavigate, - useSearchParams, - useLoaderData, - useParams, - type LoaderFunctionArgs, - type LinksFunction, -} from "react-router"; -import type Supercluster from "supercluster"; -import ErrorMessage from "~/components/error-message"; -import Header from "~/components/header"; -import Map from "~/components/map"; -import { phenomenonLayers, defaultLayer } from "~/components/map/layers"; -import BoxMarker from "~/components/map/layers/cluster/box-marker"; -import ClusterLayer from "~/components/map/layers/cluster/cluster-layer"; -import Legend, { type LegendValue } from "~/components/map/legend"; -import i18next from "~/i18next.server"; -import { getDevices, getDevicesWithSensors } from "~/models/device.server"; -import { getMeasurement } from "~/models/measurement.server"; -import { getProfileByUserId } from "~/models/profile.server"; -import { getSensors } from "~/models/sensor.server"; -import { type Device } from "~/schema"; -import { getFilteredDevices } from "~/utils"; -import { getCSV, getJSON, getTXT } from "~/utils/file-exports"; -import { getUser, getUserSession } from "~/utils/session.server"; + Outlet, + useNavigate, + useSearchParams, + useLoaderData, + useParams, + type LoaderFunctionArgs, + type LinksFunction, +} from 'react-router' +import type Supercluster from 'supercluster' +import ErrorMessage from '~/components/error-message' +import Header from '~/components/header' +import Map from '~/components/map' +import { phenomenonLayers, defaultLayer } from '~/components/map/layers' +import BoxMarker from '~/components/map/layers/cluster/box-marker' +import ClusterLayer from '~/components/map/layers/cluster/cluster-layer' +import Legend, { type LegendValue } from '~/components/map/legend' +import i18next from '~/i18next.server' +import { getDevices, getDevicesWithSensors } from '~/models/device.server' +import { getMeasurement } from '~/models/measurement.query.server' +import { getProfileByUserId } from '~/models/profile.server' +import { getSensors } from '~/models/sensor.server' +import { type Device } from '~/schema' +import { getFilteredDevices } from '~/utils' +import { getCSV, getJSON, getTXT } from '~/utils/file-exports' +import { getUser, getUserSession } from '~/utils/session.server' export async function action({ request }: { request: Request }) { - const deviceLimit = 50; - const sensorIds: Array = []; - const measurements: Array = []; - const formdata = await request.formData(); - const deviceIds = (formdata.get("devices") as string).split(","); - const format = formdata.get("format") as string; - const aggregate = formdata.get("aggregate") as string; - const includeFields = { - title: formdata.get("title") === "on", - unit: formdata.get("unit") === "on", - value: formdata.get("value") === "on", - timestamp: formdata.get("timestamp") === "on", - }; - - if (deviceIds.length >= deviceLimit) { - return Response.json({ - error: "error", - link: "https://archive.opensensemap.org/", - }); - } - for (const device of deviceIds) { - const sensors = await getSensors(device); - for (const sensor of sensors) { - sensorIds.push(sensor.id); - const measurement = await getMeasurement(sensor.id, aggregate); - measurement.map((m: any) => { - m["title"] = sensor.title; - m["unit"] = sensor.unit; - }); - - measurements.push(measurement); - } - } - - let content = ""; - let contentType = "text/plain"; - let fileName = ""; - - if (format === "csv") { - const result = getCSV(measurements, includeFields); - content = result.content; - fileName = result.fileName; - contentType = result.contentType; - } else if (format === "json") { - const result = getJSON(measurements, includeFields); - content = result.content; - fileName = result.fileName; - contentType = result.contentType; - } else { - // txt format - const result = getTXT(measurements, includeFields); - content = result.content; - fileName = result.fileName; - contentType = result.contentType; - } - - return Response.json({ - href: `data:${contentType};charset=utf-8,${encodeURIComponent(content)}`, - download: fileName, - }); + const deviceLimit = 50 + const sensorIds: Array = [] + const measurements: Array = [] + const formdata = await request.formData() + const deviceIds = (formdata.get('devices') as string).split(',') + const format = formdata.get('format') as string + const aggregate = formdata.get('aggregate') as string + const includeFields = { + title: formdata.get('title') === 'on', + unit: formdata.get('unit') === 'on', + value: formdata.get('value') === 'on', + timestamp: formdata.get('timestamp') === 'on', + } + + if (deviceIds.length >= deviceLimit) { + return Response.json({ + error: 'error', + link: 'https://archive.opensensemap.org/', + }) + } + for (const device of deviceIds) { + const sensors = await getSensors(device) + for (const sensor of sensors) { + sensorIds.push(sensor.id) + const measurement = await getMeasurement(sensor.id, aggregate) + measurement.map((m: any) => { + m['title'] = sensor.title + m['unit'] = sensor.unit + }) + + measurements.push(measurement) + } + } + + let content = '' + let contentType = 'text/plain' + let fileName = '' + + if (format === 'csv') { + const result = getCSV(measurements, includeFields) + content = result.content + fileName = result.fileName + contentType = result.contentType + } else if (format === 'json') { + const result = getJSON(measurements, includeFields) + content = result.content + fileName = result.fileName + contentType = result.contentType + } else { + // txt format + const result = getTXT(measurements, includeFields) + content = result.content + fileName = result.fileName + contentType = result.contentType + } + + return Response.json({ + href: `data:${contentType};charset=utf-8,${encodeURIComponent(content)}`, + download: fileName, + }) } export type DeviceClusterProperties = - | Supercluster.PointFeature - | Supercluster.PointFeature< - Supercluster.ClusterProperties & { - categories: { - [x: number]: number; - }; - } - >; + | Supercluster.PointFeature + | Supercluster.PointFeature< + Supercluster.ClusterProperties & { + categories: { + [x: number]: number + } + } + > export async function loader({ request }: LoaderFunctionArgs) { - //* Get filter params - let locale = await i18next.getLocale(request); - const url = new URL(request.url); - const filterParams = url.search; - const urlFilterParams = new URLSearchParams(url.search); - - // check if sensors are queried - if not get devices only to reduce load - const devices = !urlFilterParams.get("phenomenon") - ? await getDevices("geojson") - : await getDevicesWithSensors(); - - const session = await getUserSession(request); - const message = session.get("global_message") || null; - - var filteredDevices = getFilteredDevices(devices, urlFilterParams); - - const user = await getUser(request); - //const phenomena = await getPhenomena(); - - if (user) { - const profile = await getProfileByUserId(user.id); - const userLocale = user.language - ? user.language.split(/[_-]/)[0].toLowerCase() - : "en"; - return { - devices, - user, - profile, - filteredDevices, - filterParams, - locale: userLocale, - //phenomena - }; - } - return { - devices, - user, - profile: null, - filterParams, - filteredDevices, - message, - locale - //phenomena, - }; + //* Get filter params + let locale = await i18next.getLocale(request) + const url = new URL(request.url) + const filterParams = url.search + const urlFilterParams = new URLSearchParams(url.search) + + // check if sensors are queried - if not get devices only to reduce load + const devices = !urlFilterParams.get('phenomenon') + ? await getDevices('geojson') + : await getDevicesWithSensors() + + const session = await getUserSession(request) + const message = session.get('global_message') || null + + var filteredDevices = getFilteredDevices(devices, urlFilterParams) + + const user = await getUser(request) + //const phenomena = await getPhenomena(); + + if (user) { + const profile = await getProfileByUserId(user.id) + const userLocale = user.language + ? user.language.split(/[_-]/)[0].toLowerCase() + : 'en' + return { + devices, + user, + profile, + filteredDevices, + filterParams, + locale: userLocale, + //phenomena + } + } + return { + devices, + user, + profile: null, + filterParams, + filteredDevices, + message, + locale, + //phenomena, + } } export const links: LinksFunction = () => { - return [ - { - rel: "stylesheet", - href: mapboxglcss, - }, - ]; -}; + return [ + { + rel: 'stylesheet', + href: mapboxglcss, + }, + ] +} // This is for the live data display. The 21-06-2023 works with the seed Data, for Production take now minus 10 minutes -let currentDate = new Date("2023-06-21T14:13:11.024Z"); -if (process.env.NODE_ENV === "production") { - currentDate = new Date(Date.now() - 1000 * 600); +let currentDate = new Date('2023-06-21T14:13:11.024Z') +if (process.env.NODE_ENV === 'production') { + currentDate = new Date(Date.now() - 1000 * 600) } export default function Explore() { - // data from our loader - const { devices, user, profile, filterParams, filteredDevices, message,locale } = - useLoaderData(); - - const mapRef = useRef(null); - - // get map bounds - const [, setViewState] = useState({ - longitude: 7.628202, - latitude: 51.961563, - zoom: 2, - }); - const navigate = useNavigate(); - // const [showSearch, setShowSearch] = useState(false); - const [selectedPheno, setSelectedPheno] = useState( - undefined, - ); - const [searchParams] = useSearchParams(); - const [filteredData, setFilteredData] = useState< - GeoJSON.FeatureCollection - >({ - type: "FeatureCollection", - features: [], - }); - - //listen to search params change - // useEffect(() => { - // //filters devices for pheno - // if (searchParams.has("mapPheno") && searchParams.get("mapPheno") != "all") { - // let sensorsFiltered: any = []; - // let currentParam = searchParams.get("mapPheno"); - // //check if pheno exists in sensor-wiki data - // let pheno = data.phenomena.filter( - // (pheno: any) => pheno.slug == currentParam?.toString(), - // ); - // if (pheno[0]) { - // setSelectedPheno(pheno[0]); - // data.devices.features.forEach((device: any) => { - // device.properties.sensors.forEach((sensor: Sensor) => { - // if ( - // sensor.sensorWikiPhenomenon == currentParam && - // sensor.lastMeasurement - // ) { - // const lastMeasurementDate = new Date( - // //@ts-ignore - // sensor.lastMeasurement.createdAt, - // ); - // //take only measurements in the last 10mins - // //@ts-ignore - // if (currentDate < lastMeasurementDate) { - // sensorsFiltered.push({ - // ...device, - // properties: { - // ...device.properties, - // sensor: { - // ...sensor, - // lastMeasurement: { - // //@ts-ignore - // value: parseFloat(sensor.lastMeasurement.value), - // //@ts-ignore - // createdAt: sensor.lastMeasurement.createdAt, - // }, - // }, - // }, - // }); - // } - // } - // }); - // return false; - // }); - // setFilteredData({ - // type: "FeatureCollection", - // features: sensorsFiltered, - // }); - // } - // } else { - // setSelectedPheno(undefined); - // } - // // eslint-disable-next-line react-hooks/exhaustive-deps - // }, [searchParams]); - - function calculateLabelPositions(length: number): string[] { - const positions: string[] = []; - for (let i = length - 1; i >= 0; i--) { - const position = - i === length - 1 ? "95%" : `${((i / (length - 1)) * 100).toFixed(0)}%`; - positions.push(position); - } - return positions; - } - - const legendLabels = () => { - const values = - //@ts-ignore - phenomenonLayers[selectedPheno.slug].paint["circle-color"].slice(3); - const numbers = values.filter( - (v: number | string) => typeof v === "number", - ); - const colors = values.filter((v: number | string) => typeof v === "string"); - const positions = calculateLabelPositions(numbers.length); - - const legend: LegendValue[] = []; - const length = numbers.length; - for (let i = 0; i < length; i++) { - const legendObj: LegendValue = { - value: numbers[i], - color: colors[i], - position: positions[i], - }; - legend.push(legendObj); - } - return legend; - }; - - // // /** - // // * Focus the search input when the search overlay is displayed - // // */ - // // const focusSearchInput = () => { - // // searchRef.current?.focus(); - // // }; - - // /** - // * Display the search overlay when the ctrl + k key combination is pressed - // */ - // useHotkeys([ - // [ - // "ctrl+K", - // () => { - // setShowSearch(!showSearch); - // setTimeout(() => { - // focusSearchInput(); - // }, 100); - // }, - // ], - // ]); - - const onMapClick = (e: MapLayerMouseEvent) => { - if (e.features && e.features.length > 0) { - const feature = e.features[0]; - - if (feature.layer.id === "phenomenon-layer") { - void navigate( - `/explore/${feature.properties?.id}?${searchParams.toString()}`, - ); - } - } - }; - - const handleMouseMove = (e: mapboxgl.MapLayerMouseEvent) => { - if (e.features && e.features.length > 0) { - mapRef!.current!.getCanvas().style.cursor = "pointer"; - } else { - mapRef!.current!.getCanvas().style.cursor = ""; - } - }; - - //* fly to sensebox location when url inludes deviceId - const { deviceId } = useParams(); - var deviceLoc: any; - let selectedDevice: any; - if (deviceId) { - selectedDevice = (devices as any).features.find( - (device: any) => device.properties.id === deviceId, - ); - deviceLoc = [ - selectedDevice?.properties.latitude, - selectedDevice?.properties.longitude, - ]; - } - - const buildLayerFromPheno = (selectedPheno: any) => { - //TODO: ADD VALUES TO DEFAULTLAYER FROM selectedPheno.ROV or min/max from values. - return defaultLayer; - }; - - return ( -
- -
- {selectedPheno && ( - - )} - setViewState(evt.viewState)} - interactiveLayerIds={selectedPheno ? ["phenomenon-layer"] : []} - onClick={onMapClick} - onMouseMove={handleMouseMove} - ref={mapRef} - initialViewState={ - deviceId - ? { latitude: deviceLoc[0], longitude: deviceLoc[1], zoom: 10 } - : { latitude: 7, longitude: 52, zoom: 2 } - } - > - {!selectedPheno && ( - } - /> - )} - {selectedPheno && ( - } - cluster={false} - > - - - )} - - {/* Render BoxMarker for the selected device */} - {selectedDevice && deviceId && ( - - - - )} - - {/* () + + const mapRef = useRef(null) + + // get map bounds + const [, setViewState] = useState({ + longitude: 7.628202, + latitude: 51.961563, + zoom: 2, + }) + const navigate = useNavigate() + // const [showSearch, setShowSearch] = useState(false); + const [selectedPheno, setSelectedPheno] = useState(undefined) + const [searchParams] = useSearchParams() + const [filteredData, setFilteredData] = useState< + GeoJSON.FeatureCollection + >({ + type: 'FeatureCollection', + features: [], + }) + + //listen to search params change + // useEffect(() => { + // //filters devices for pheno + // if (searchParams.has("mapPheno") && searchParams.get("mapPheno") != "all") { + // let sensorsFiltered: any = []; + // let currentParam = searchParams.get("mapPheno"); + // //check if pheno exists in sensor-wiki data + // let pheno = data.phenomena.filter( + // (pheno: any) => pheno.slug == currentParam?.toString(), + // ); + // if (pheno[0]) { + // setSelectedPheno(pheno[0]); + // data.devices.features.forEach((device: any) => { + // device.properties.sensors.forEach((sensor: Sensor) => { + // if ( + // sensor.sensorWikiPhenomenon == currentParam && + // sensor.lastMeasurement + // ) { + // const lastMeasurementDate = new Date( + // //@ts-ignore + // sensor.lastMeasurement.createdAt, + // ); + // //take only measurements in the last 10mins + // //@ts-ignore + // if (currentDate < lastMeasurementDate) { + // sensorsFiltered.push({ + // ...device, + // properties: { + // ...device.properties, + // sensor: { + // ...sensor, + // lastMeasurement: { + // //@ts-ignore + // value: parseFloat(sensor.lastMeasurement.value), + // //@ts-ignore + // createdAt: sensor.lastMeasurement.createdAt, + // }, + // }, + // }, + // }); + // } + // } + // }); + // return false; + // }); + // setFilteredData({ + // type: "FeatureCollection", + // features: sensorsFiltered, + // }); + // } + // } else { + // setSelectedPheno(undefined); + // } + // // eslint-disable-next-line react-hooks/exhaustive-deps + // }, [searchParams]); + + function calculateLabelPositions(length: number): string[] { + const positions: string[] = [] + for (let i = length - 1; i >= 0; i--) { + const position = + i === length - 1 ? '95%' : `${((i / (length - 1)) * 100).toFixed(0)}%` + positions.push(position) + } + return positions + } + + const legendLabels = () => { + const values = + //@ts-ignore + phenomenonLayers[selectedPheno.slug].paint['circle-color'].slice(3) + const numbers = values.filter((v: number | string) => typeof v === 'number') + const colors = values.filter((v: number | string) => typeof v === 'string') + const positions = calculateLabelPositions(numbers.length) + + const legend: LegendValue[] = [] + const length = numbers.length + for (let i = 0; i < length; i++) { + const legendObj: LegendValue = { + value: numbers[i], + color: colors[i], + position: positions[i], + } + legend.push(legendObj) + } + return legend + } + + // // /** + // // * Focus the search input when the search overlay is displayed + // // */ + // // const focusSearchInput = () => { + // // searchRef.current?.focus(); + // // }; + + // /** + // * Display the search overlay when the ctrl + k key combination is pressed + // */ + // useHotkeys([ + // [ + // "ctrl+K", + // () => { + // setShowSearch(!showSearch); + // setTimeout(() => { + // focusSearchInput(); + // }, 100); + // }, + // ], + // ]); + + const onMapClick = (e: MapLayerMouseEvent) => { + if (e.features && e.features.length > 0) { + const feature = e.features[0] + + if (feature.layer.id === 'phenomenon-layer') { + void navigate( + `/explore/${feature.properties?.id}?${searchParams.toString()}`, + ) + } + } + } + + const handleMouseMove = (e: mapboxgl.MapLayerMouseEvent) => { + if (e.features && e.features.length > 0) { + mapRef!.current!.getCanvas().style.cursor = 'pointer' + } else { + mapRef!.current!.getCanvas().style.cursor = '' + } + } + + //* fly to sensebox location when url inludes deviceId + const { deviceId } = useParams() + var deviceLoc: any + let selectedDevice: any + if (deviceId) { + selectedDevice = (devices as any).features.find( + (device: any) => device.properties.id === deviceId, + ) + deviceLoc = [ + selectedDevice?.properties.latitude, + selectedDevice?.properties.longitude, + ] + } + + const buildLayerFromPheno = (selectedPheno: any) => { + //TODO: ADD VALUES TO DEFAULTLAYER FROM selectedPheno.ROV or min/max from values. + return defaultLayer + } + + return ( +
+ +
+ {selectedPheno && ( + + )} + setViewState(evt.viewState)} + interactiveLayerIds={selectedPheno ? ['phenomenon-layer'] : []} + onClick={onMapClick} + onMouseMove={handleMouseMove} + ref={mapRef} + initialViewState={ + deviceId + ? { latitude: deviceLoc[0], longitude: deviceLoc[1], zoom: 10 } + : { latitude: 7, longitude: 52, zoom: 2 } + } + > + {!selectedPheno && ( + } + /> + )} + {selectedPheno && ( + } + cluster={false} + > + + + )} + + {/* Render BoxMarker for the selected device */} + {selectedDevice && deviceId && ( + + + + )} + + {/* */} - - - -
- ); + +
+ +
+ ) } export function ErrorBoundary() { - return ( -
- -
- ); + return ( +
+ +
+ ) } diff --git a/app/utils/csv.ts b/app/utils/csv.ts index cb5e5a62..92d6aa5f 100644 --- a/app/utils/csv.ts +++ b/app/utils/csv.ts @@ -4,14 +4,26 @@ * @param data The data as an array of arbitrary data rows * @param dataSelectors Selectors that picks data out of a data row and converts it into a string. * Order should be the same as for the headers. - * @returns + * @returns */ -export const convertToCsv = (headers: string[], data: DataRow[], - dataSelectors: ((row: DataRow) => string)[], delimiter = ",") => { +export const convertToCsv = ( + headers: string[], + data: DataRow[], + dataSelectors: ((row: DataRow) => string)[], + delimiter = ',', +) => { + const rows: string[] = data.map((dataRow) => + headers.map((_, i) => dataSelectors[i](dataRow)).join(delimiter), + ) - const rows: string[] = data.map(dataRow => - headers.map((_, i) => dataSelectors[i](dataRow)).join(delimiter) - ) + return [headers.join(delimiter), ...rows].join('\n') +} - return [headers.join(delimiter), ...rows].join("\n") -} \ No newline at end of file +export function escapeCSVValue(value: any, delimiter: string): string { + if (value === null || value === undefined) return '' + const str = String(value) + if (str.includes(delimiter) || str.includes('"') || str.includes('\n')) { + return `"${str.replace(/"/g, '""')}"` + } + return str +} diff --git a/package-lock.json b/package-lock.json index 6f0f6fa1..40e72e98 100644 --- a/package-lock.json +++ b/package-lock.json @@ -154,6 +154,7 @@ "@types/node": "^22.15.35", "@types/node-fetch": "^2.6.13", "@types/nodemailer": "^7.0.1", + "@types/pg": "^8.15.6", "@types/react": "19.1.8", "@types/react-dom": "19.2.2", "@types/source-map-support": "^0.5.10", @@ -8997,6 +8998,17 @@ "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", "license": "MIT" }, + "node_modules/@types/pg": { + "version": "8.15.6", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.6.tgz", + "integrity": "sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==", + "dev": true, + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@types/prismjs": { "version": "1.26.5", "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.5.tgz", diff --git a/package.json b/package.json index b2ef34e1..499d06fb 100644 --- a/package.json +++ b/package.json @@ -176,6 +176,7 @@ "@types/node": "^22.15.35", "@types/node-fetch": "^2.6.13", "@types/nodemailer": "^7.0.1", + "@types/pg": "^8.15.6", "@types/react": "19.1.8", "@types/react-dom": "19.2.2", "@types/source-map-support": "^0.5.10", diff --git a/tests/routes/api.boxes.data.spec.ts b/tests/routes/api.boxes.data.spec.ts new file mode 100644 index 00000000..e2b35427 --- /dev/null +++ b/tests/routes/api.boxes.data.spec.ts @@ -0,0 +1,466 @@ +import { eq } from 'drizzle-orm' +import { type AppLoadContext } from 'react-router' +import { generateTestUserCredentials } from 'tests/data/generate_test_user' +import { describe, it, expect, beforeAll } from 'vitest' +import { BASE_URL } from 'vitest.setup' +import { drizzleClient } from '~/db.server' +import { createToken } from '~/lib/jwt' +import { registerUser } from '~/lib/user-service.server' +import { createDevice, deleteDevice } from '~/models/device.server' +import { deleteUserByEmail } from '~/models/user.server' +import { + loader as boxesDataLoader, + action as boxesDataAction, +} from '~/routes/api.boxes.data' +import { device, measurement, sensor, type User } from '~/schema' + +const BOXES_DATA_TEST_USER = generateTestUserCredentials() + +const TEST_BOX = { + name: 'Download Box', + exposure: 'indoor' as const, + expiresAt: null, + tags: [], + latitude: 51.5, + longitude: 7.5, + model: 'luftdaten.info' as const, + mqttEnabled: false, + ttnEnabled: false, +} + +describe('openSenseMap API: /boxes/data', () => { + let jwt = '' + let user: User + let deviceId = '' + let outdoorDeviceId = '' + let sensorId = '' + + const expectedMeasurementsCount = 10 + + beforeAll(async () => { + await deleteUserByEmail(BOXES_DATA_TEST_USER.email) + + const testUser = await registerUser( + BOXES_DATA_TEST_USER.name, + BOXES_DATA_TEST_USER.email, + BOXES_DATA_TEST_USER.password, + 'en_US', + ) + user = testUser as User + const t = await createToken(user) + jwt = t.token + + const device = await createDevice(TEST_BOX, user.id) + deviceId = device.id + + const outdoorDevice = await createDevice( + { + ...TEST_BOX, + name: 'Download Box Outdoor', + exposure: 'outdoor', + }, + user.id, + ) + outdoorDeviceId = outdoorDevice.id + + const [outdoorSensor] = await drizzleClient + .insert(sensor) + .values({ + title: 'Temperatur', + unit: '°C', + sensorType: 'HDC1080', + deviceId: outdoorDevice.id, + status: 'active', + }) + .returning() + + const outdoorMeasurements = [] + for (let i = 0; i < 5; i++) { + outdoorMeasurements.push({ + sensorId: outdoorSensor.id, + time: new Date(Date.now() - i * 60000), + value: 15 + Math.random() * 5, + }) + } + + await drizzleClient.insert(measurement).values(outdoorMeasurements) + + const [createdSensor] = await drizzleClient + .insert(sensor) + .values({ + title: 'Temperatur', + unit: '°C', + sensorType: 'HDC1080', + deviceId: device.id, + status: 'active', + }) + .returning() + + sensorId = createdSensor.id + + // Create test measurements + const now = new Date() + const measurements = [] + for (let i = 0; i < expectedMeasurementsCount; i++) { + measurements.push({ + sensorId: sensorId, + time: new Date(now.getTime() - i * 60000), // 1 minute apart + value: 20 + Math.random() * 10, // 20-30°C + }) + } + + await drizzleClient.insert(measurement).values(measurements) + }) + + // --------------------------- + // CSV (default) + // --------------------------- + it('GET /boxes/data CSV', async () => { + const url = `${BASE_URL}/api/boxes/data?boxid=${deviceId}&phenomenon=Temperatur` + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + const text = await res.text() + + expect(res.status).toBe(200) + expect(text).not.toBe('') + expect(res.headers.get('content-type')).toBe('text/csv') + + // Check that CSV has header and data rows + const lines = text.trim().split('\n') + expect(lines.length).toBeGreaterThan(1) // At least header + 1 data row + }) + + it('GET /boxes/data CSV with format=csv', async () => { + const url = `${BASE_URL}/api/boxes/data?boxid=${deviceId}&phenomenon=Temperatur&format=csv` + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + const text = await res.text() + + expect(res.status).toBe(200) + expect(text).not.toBe('') + expect(res.headers.get('content-type')).toBe('text/csv') + }) + + // --------------------------- + // JSON + // --------------------------- + it('GET /boxes/data JSON', async () => { + const url = `${BASE_URL}/api/boxes/data?boxid=${deviceId}&phenomenon=Temperatur&format=json&columns=sensorId,value,lat,lon` + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + expect(res.status).toBe(200) + expect(res.headers.get('content-type')).toBe('application/json') + + const body = await res.json() + expect(Array.isArray(body)).toBe(true) + expect(body.length).toBeGreaterThan(0) + + for (const m of body) { + expect(m.sensorId).toBeDefined() + expect(m.value).toBeDefined() + expect(m.lat).toBeDefined() + expect(m.lon).toBeDefined() + } + }) + + // --------------------------- + // Multiple box IDs + // --------------------------- + it('GET /boxes/data CSV with multiple boxids', async () => { + const url = `${BASE_URL}/api/boxes/data?boxid=${deviceId},${deviceId}&phenomenon=Temperatur` + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + const text = await res.text() + const lines = text.trim().split('\n').slice(1) // Skip header + + expect(res.status).toBe(200) + expect(lines).toHaveLength(expectedMeasurementsCount) + }) + + // --------------------------- + // POST CSV + // --------------------------- + it('POST /boxes/data CSV', async () => { + const req = new Request( + `${BASE_URL}/boxes/data?boxid=${deviceId}&phenomenon=Temperatur`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${jwt}`, + }, + }, + ) + + const response = await boxesDataAction({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + + const text = (await response.text()).trim() + const lines = text.split('\n').slice(1) + + expect(response.status).toBe(200) + expect(lines).toHaveLength(expectedMeasurementsCount) + }) + + // --------------------------- + // Exposure filtering + // --------------------------- + it('GET /boxes/data with exposure filter', async () => { + const from = new Date(Date.now() - 100 * 864e5).toISOString() + const to = new Date().toISOString() + + const req = new Request( + `${BASE_URL}/boxes/data/?bbox=-180,-90,180,90&phenomenon=Temperatur&exposure=indoor&columns=exposure&from-date=${from}&to-date=${to}`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + const text = (await res.text()).trim() + const [header, ...lines] = text.split('\n') + + expect(res.status).toBe(200) + expect(header).toBe('exposure') + expect(lines).toHaveLength(expectedMeasurementsCount) + + for (const line of lines.slice(0, -1)) { + expect(line).toBe('indoor') + } + }) + + it('GET /boxes/data with multiple exposure filters', async () => { + const from = new Date(Date.now() - 100 * 864e5).toISOString() + const to = new Date().toISOString() + + const url = + `${BASE_URL}/boxes/data/?` + + `bbox=-180,-90,180,90` + + `&phenomenon=Temperatur` + + `&exposure=indoor,outdoor` + + `&columns=exposure` + + `&from-date=${from}` + + `&to-date=${to}` + + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + + expect(res.status).toBe(200) + expect(res.headers.get('content-type')).toBe('text/csv') + + const text = (await res.text()).trim() + const [header, ...lines] = text.split('\n') + + expect(header).toBe('exposure') + + expect(lines).toHaveLength(expectedMeasurementsCount + 5) + + let sawIndoor = false + let sawOutdoor = false + + for (const line of lines.slice(0, -1)) { + if (line === 'indoor') sawIndoor = true + if (line === 'outdoor') sawOutdoor = true + if (sawIndoor && sawOutdoor) break + } + + expect(sawIndoor).toBe(true) + expect(sawOutdoor).toBe(true) + }) + + // --------------------------- + // content-disposition header + // --------------------------- + it('GET /boxes/data should include content-disposition by default', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?boxid=${deviceId},${deviceId}&phenomenon=Temperatur`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + const cd = res.headers.get('content-disposition') + + expect(cd).toMatch(/opensensemap_org-download-Temperatur/) + }) + + it('GET /boxes/data should NOT include content-disposition when download=false', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?boxid=${deviceId},${deviceId}&phenomenon=Temperatur&download=false`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + + const cd = res.headers.get('content-disposition') + + expect(cd).toBeNull() + }) + + // --------------------------- + // Bounding box validation + // --------------------------- + it('GET /boxes/data invalid bbox (too many values)', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?boxid=${deviceId}&phenomenon=Temperatur&bbox=1,2,3,4,5`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + let res: Response + try { + res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + } catch (response) { + res = response as Response + } + + expect(res.status).toBe(422) + + const json = await res.json() + expect(json.code).toBe('Unprocessable Content') + }) + + it('should allow to specify bounding boxes with area greater than a single hemisphere', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?phenomenon=Temperatur&bbox=-180,-90,180,90`, + ) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + + expect(res.status).toBe(200) + + expect(res.headers.get('content-type')).toContain('text/csv') + + const bodyText = await res.text() + + const lines = bodyText.split('\n') + expect(lines.length).toBeGreaterThan(1) + }) + + it('GET /boxes/data invalid bbox (too few values)', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?boxid=${deviceId}&phenomenon=Temperatur&bbox=1,2,3`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + let res: Response + try { + res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + } catch (response) { + res = response as Response + } + + expect(res.status).toBe(422) + }) + + it('GET /boxes/data invalid bbox (not floats)', async () => { + const req = new Request( + `${BASE_URL}/boxes/data/?boxid=${deviceId}&phenomenon=Temperatur&bbox=1,2,east,4`, + { headers: { Authorization: `Bearer ${jwt}` } }, + ) + + let res: Response + try { + res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + } catch (response) { + res = response as Response + } + + expect(res.status).toBe(422) + }) + + it('GET /boxes/data JSON by grouptag', async () => { + const GROUPTAG = 'bytag' + + // Add tag to device + await drizzleClient + .update(device) + .set({ tags: [GROUPTAG] }) + .where(eq(device.id, deviceId)) + + const url = `${BASE_URL}/api/boxes/data?grouptag=${GROUPTAG}&format=json&columns=sensorId,value&phenomenon=Temperatur` + const req = new Request(url, { + headers: { Authorization: `Bearer ${jwt}` }, + }) + + const res = await boxesDataLoader({ + request: req, + params: {}, + context: {} as AppLoadContext, + }) + + expect(res.status).toBe(200) + expect(res.headers.get('content-type')).toBe('application/json') + + const body = await res.json() + expect(Array.isArray(body)).toBe(true) + expect(body).toHaveLength(expectedMeasurementsCount) + }) + + afterAll(async () => { + await deleteDevice({ id: deviceId }) + await deleteDevice({ id: outdoorDeviceId }) + await deleteUserByEmail(BOXES_DATA_TEST_USER.email) + }) +})