23531 lines
753 KiB
Dart
23531 lines
753 KiB
Dart
// GENERATED CODE - DO NOT MODIFY BY HAND
|
|
|
|
part of 'model.dart';
|
|
|
|
// **************************************************************************
|
|
// SqfEntityGenerator
|
|
// **************************************************************************
|
|
|
|
// ignore_for_file: non_constant_identifier_names
|
|
|
|
// These classes was generated by SqfEntity
|
|
// Copyright (c) 2019, All rights reserved. Use of this source code is governed by a
|
|
// Apache license that can be found in the LICENSE file.
|
|
|
|
// To use these SqfEntity classes do following:
|
|
// - import model.dart into where to use
|
|
// - start typing ex:DbAthlete.select()... (add a few filters with fluent methods)...(add orderBy/orderBydesc if you want)...
|
|
// - and then just put end of filters / or end of only select() toSingle() / or toList()
|
|
// - you can select one or return List<yourObject> by your filters and orders
|
|
// - also you can batch update or batch delete by using delete/update methods instead of tosingle/tolist methods
|
|
// Enjoy.. Huseyin Tokpunar
|
|
|
|
// BEGIN TABLES
|
|
// DbAthlete TABLE
|
|
class TableDbAthlete extends SqfEntityTableBase {
|
|
TableDbAthlete() {
|
|
// declare properties of EntityTable
|
|
tableName = 'athletes';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('state', DbType.text, defaultValue: 'new'),
|
|
SqfEntityFieldBase('firstName', DbType.text),
|
|
SqfEntityFieldBase('lastName', DbType.text),
|
|
SqfEntityFieldBase('stravaUsername', DbType.text),
|
|
SqfEntityFieldBase('photoPath', DbType.text),
|
|
SqfEntityFieldBase('stravaId', DbType.integer),
|
|
SqfEntityFieldBase('geoState', DbType.text),
|
|
SqfEntityFieldBase('downloadInterval', DbType.integer),
|
|
SqfEntityFieldBase('recordAggregationCount', DbType.integer),
|
|
SqfEntityFieldBase('uuid', DbType.text),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbAthlete();
|
|
}
|
|
}
|
|
|
|
// DbActivity TABLE
|
|
class TableDbActivity extends SqfEntityTableBase {
|
|
TableDbActivity() {
|
|
// declare properties of EntityTable
|
|
tableName = 'activities';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('state', DbType.text, defaultValue: 'new'),
|
|
SqfEntityFieldBase('path', DbType.text),
|
|
SqfEntityFieldBase('stravaId', DbType.integer),
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('movingTime', DbType.integer),
|
|
SqfEntityFieldBase('type', DbType.text),
|
|
SqfEntityFieldBase('distance', DbType.integer),
|
|
SqfEntityFieldBase('serialNumber', DbType.integer),
|
|
SqfEntityFieldBase('timeCreated', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('sportName', DbType.text),
|
|
SqfEntityFieldBase('sport', DbType.text),
|
|
SqfEntityFieldBase('subSport', DbType.text),
|
|
SqfEntityFieldBase('timeStamp', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('startTime', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('startPositionLat', DbType.real),
|
|
SqfEntityFieldBase('startPositionLong', DbType.real),
|
|
SqfEntityFieldBase('event', DbType.text),
|
|
SqfEntityFieldBase('eventType', DbType.text),
|
|
SqfEntityFieldBase('eventGroup', DbType.integer),
|
|
SqfEntityFieldBase('totalDistance', DbType.integer),
|
|
SqfEntityFieldBase('totalStrides', DbType.integer),
|
|
SqfEntityFieldBase('totalCalories', DbType.integer),
|
|
SqfEntityFieldBase('avgSpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByMeasurements', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedBySpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByDistance', DbType.real),
|
|
SqfEntityFieldBase('sdevSpeed', DbType.real),
|
|
SqfEntityFieldBase('sdevPace', DbType.real),
|
|
SqfEntityFieldBase('minSpeed', DbType.real),
|
|
SqfEntityFieldBase('maxSpeed', DbType.real),
|
|
SqfEntityFieldBase('totalAscent', DbType.integer),
|
|
SqfEntityFieldBase('totalDescent', DbType.integer),
|
|
SqfEntityFieldBase('maxRunningCadence', DbType.integer),
|
|
SqfEntityFieldBase('trigger', DbType.text),
|
|
SqfEntityFieldBase('avgTemperature', DbType.integer),
|
|
SqfEntityFieldBase('maxTemperature', DbType.integer),
|
|
SqfEntityFieldBase('avgFractionalCadence', DbType.real),
|
|
SqfEntityFieldBase('maxFractionalCadence', DbType.real),
|
|
SqfEntityFieldBase('totalFractionalCycles', DbType.real),
|
|
SqfEntityFieldBase('avgStanceTimePercent', DbType.real),
|
|
SqfEntityFieldBase('avgStanceTime', DbType.real),
|
|
SqfEntityFieldBase('avgHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('maxHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('avgRunningCadence', DbType.real),
|
|
SqfEntityFieldBase('avgVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('totalElapsedTime', DbType.integer),
|
|
SqfEntityFieldBase('totalTimerTime', DbType.integer),
|
|
SqfEntityFieldBase('totalTrainingEffect', DbType.integer),
|
|
SqfEntityFieldBase('necLat', DbType.real),
|
|
SqfEntityFieldBase('necLong', DbType.real),
|
|
SqfEntityFieldBase('swcLat', DbType.real),
|
|
SqfEntityFieldBase('swcLong', DbType.real),
|
|
SqfEntityFieldBase('firstLapIndex', DbType.integer),
|
|
SqfEntityFieldBase('numLaps', DbType.integer),
|
|
SqfEntityFieldBase('numSessions', DbType.integer),
|
|
SqfEntityFieldBase('localTimestamp', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('avgPower', DbType.real),
|
|
SqfEntityFieldBase('sdevPower', DbType.real),
|
|
SqfEntityFieldBase('minPower', DbType.integer),
|
|
SqfEntityFieldBase('maxPower', DbType.integer),
|
|
SqfEntityFieldBase('minHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('sdevHeartRate', DbType.real),
|
|
SqfEntityFieldBase('avgGroundTime', DbType.real),
|
|
SqfEntityFieldBase('sdevGroundTime', DbType.real),
|
|
SqfEntityFieldBase('avgLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('sdevLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('avgFormPower', DbType.real),
|
|
SqfEntityFieldBase('sdevFormPower', DbType.real),
|
|
SqfEntityFieldBase('avgPowerRatio', DbType.real),
|
|
SqfEntityFieldBase('sdevPowerRatio', DbType.real),
|
|
SqfEntityFieldBase('avgStrideRatio', DbType.real),
|
|
SqfEntityFieldBase('sdevStrideRatio', DbType.real),
|
|
SqfEntityFieldBase('avgStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('cp', DbType.real),
|
|
SqfEntityFieldBase('ftp', DbType.real),
|
|
SqfEntityFieldBase('nonParsable', DbType.bool),
|
|
SqfEntityFieldBase('excluded', DbType.bool),
|
|
SqfEntityFieldBase('manual', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbActivity();
|
|
}
|
|
}
|
|
|
|
// DbEvent TABLE
|
|
class TableDbEvent extends SqfEntityTableBase {
|
|
TableDbEvent() {
|
|
// declare properties of EntityTable
|
|
tableName = 'events';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('event', DbType.text),
|
|
SqfEntityFieldBase('eventType', DbType.text),
|
|
SqfEntityFieldBase('eventGroup', DbType.integer),
|
|
SqfEntityFieldBase('timerTrigger', DbType.text),
|
|
SqfEntityFieldBase('timeStamp', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('positionLat', DbType.real),
|
|
SqfEntityFieldBase('positionLong', DbType.real),
|
|
SqfEntityFieldBase('distance', DbType.real),
|
|
SqfEntityFieldBase('altitude', DbType.real),
|
|
SqfEntityFieldBase('speed', DbType.real),
|
|
SqfEntityFieldBase('heartRate', DbType.integer),
|
|
SqfEntityFieldBase('cadence', DbType.real),
|
|
SqfEntityFieldBase('fractionalCadence', DbType.real),
|
|
SqfEntityFieldBase('power', DbType.integer),
|
|
SqfEntityFieldBase('strydCadence', DbType.real),
|
|
SqfEntityFieldBase('groundTime', DbType.real),
|
|
SqfEntityFieldBase('verticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('formPower', DbType.integer),
|
|
SqfEntityFieldBase('legSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('data', DbType.real),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbActivity.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'activitiesId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(TableDbLap.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'lapsId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbEvent();
|
|
}
|
|
}
|
|
|
|
// DbLap TABLE
|
|
class TableDbLap extends SqfEntityTableBase {
|
|
TableDbLap() {
|
|
// declare properties of EntityTable
|
|
tableName = 'laps';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('timeStamp', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('startTime', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('startPositionLat', DbType.real),
|
|
SqfEntityFieldBase('startPositionLong', DbType.real),
|
|
SqfEntityFieldBase('endPositionLat', DbType.real),
|
|
SqfEntityFieldBase('endPositionLong', DbType.real),
|
|
SqfEntityFieldBase('avgHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('maxHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('avgRunningCadence', DbType.real),
|
|
SqfEntityFieldBase('event', DbType.text),
|
|
SqfEntityFieldBase('eventType', DbType.text),
|
|
SqfEntityFieldBase('eventGroup', DbType.integer),
|
|
SqfEntityFieldBase('sport', DbType.text),
|
|
SqfEntityFieldBase('subSport', DbType.text),
|
|
SqfEntityFieldBase('avgVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('totalElapsedTime', DbType.integer),
|
|
SqfEntityFieldBase('totalTimerTime', DbType.integer),
|
|
SqfEntityFieldBase('totalDistance', DbType.integer),
|
|
SqfEntityFieldBase('totalStrides', DbType.integer),
|
|
SqfEntityFieldBase('totalCalories', DbType.integer),
|
|
SqfEntityFieldBase('avgSpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByMeasurements', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedBySpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByDistance', DbType.real),
|
|
SqfEntityFieldBase('sdevSpeed', DbType.real),
|
|
SqfEntityFieldBase('sdevPace', DbType.real),
|
|
SqfEntityFieldBase('minSpeed', DbType.real),
|
|
SqfEntityFieldBase('maxSpeed', DbType.real),
|
|
SqfEntityFieldBase('totalAscent', DbType.integer),
|
|
SqfEntityFieldBase('totalDescent', DbType.integer),
|
|
SqfEntityFieldBase('avgStanceTimePercent', DbType.real),
|
|
SqfEntityFieldBase('avgStanceTime', DbType.real),
|
|
SqfEntityFieldBase('maxRunningCadence', DbType.integer),
|
|
SqfEntityFieldBase('intensity', DbType.integer),
|
|
SqfEntityFieldBase('lapTrigger', DbType.text),
|
|
SqfEntityFieldBase('avgTemperature', DbType.integer),
|
|
SqfEntityFieldBase('maxTemperature', DbType.integer),
|
|
SqfEntityFieldBase('avgFractionalCadence', DbType.real),
|
|
SqfEntityFieldBase('maxFractionalCadence', DbType.real),
|
|
SqfEntityFieldBase('totalFractionalCycles', DbType.real),
|
|
SqfEntityFieldBase('avgPower', DbType.real),
|
|
SqfEntityFieldBase('minPower', DbType.integer),
|
|
SqfEntityFieldBase('maxPower', DbType.integer),
|
|
SqfEntityFieldBase('sdevPower', DbType.real),
|
|
SqfEntityFieldBase('minHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('sdevHeartRate', DbType.real),
|
|
SqfEntityFieldBase('avgGroundTime', DbType.real),
|
|
SqfEntityFieldBase('sdevGroundTime', DbType.real),
|
|
SqfEntityFieldBase('avgLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('sdevLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('avgFormPower', DbType.real),
|
|
SqfEntityFieldBase('sdevFormPower', DbType.real),
|
|
SqfEntityFieldBase('avgStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('avgPowerRatio', DbType.real),
|
|
SqfEntityFieldBase('sdevPowerRatio', DbType.real),
|
|
SqfEntityFieldBase('avgStrideRatio', DbType.real),
|
|
SqfEntityFieldBase('sdevStrideRatio', DbType.real),
|
|
SqfEntityFieldBase('cp', DbType.real),
|
|
SqfEntityFieldBase('ftp', DbType.real),
|
|
SqfEntityFieldBase('movingTime', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbActivity.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'activitiesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbLap();
|
|
}
|
|
}
|
|
|
|
// DbInterval TABLE
|
|
class TableDbInterval extends SqfEntityTableBase {
|
|
TableDbInterval() {
|
|
// declare properties of EntityTable
|
|
tableName = 'intervals';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('timeStamp', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('duration', DbType.integer),
|
|
SqfEntityFieldBase('avgPower', DbType.real),
|
|
SqfEntityFieldBase('minPower', DbType.integer),
|
|
SqfEntityFieldBase('maxPower', DbType.integer),
|
|
SqfEntityFieldBase('sdevPower', DbType.real),
|
|
SqfEntityFieldBase('avgSpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByMeasurements', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedBySpeed', DbType.real),
|
|
SqfEntityFieldBase('avgSpeedByDistance', DbType.real),
|
|
SqfEntityFieldBase('minSpeed', DbType.real),
|
|
SqfEntityFieldBase('maxSpeed', DbType.real),
|
|
SqfEntityFieldBase('sdevSpeed', DbType.real),
|
|
SqfEntityFieldBase('sdevPace', DbType.real),
|
|
SqfEntityFieldBase('distance', DbType.integer),
|
|
SqfEntityFieldBase('avgHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('minHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('maxHeartRate', DbType.integer),
|
|
SqfEntityFieldBase('sdevHeartRate', DbType.real),
|
|
SqfEntityFieldBase('avgCadence', DbType.real),
|
|
SqfEntityFieldBase('minCadence', DbType.real),
|
|
SqfEntityFieldBase('maxCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevCadence', DbType.real),
|
|
SqfEntityFieldBase('avgStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('minStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('maxStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('sdevStrydCadence', DbType.real),
|
|
SqfEntityFieldBase('avgGroundTime', DbType.real),
|
|
SqfEntityFieldBase('minGroundTime', DbType.real),
|
|
SqfEntityFieldBase('maxGroundTime', DbType.real),
|
|
SqfEntityFieldBase('sdevGroundTime', DbType.real),
|
|
SqfEntityFieldBase('avgVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('minVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('maxVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('sdevVerticalOscillation', DbType.real),
|
|
SqfEntityFieldBase('avgFormPower', DbType.real),
|
|
SqfEntityFieldBase('maxFormPower', DbType.integer),
|
|
SqfEntityFieldBase('minFormPower', DbType.integer),
|
|
SqfEntityFieldBase('sdevFormPower', DbType.real),
|
|
SqfEntityFieldBase('avgLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('maxLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('minLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('sdevLegSpringStiffness', DbType.real),
|
|
SqfEntityFieldBase('totalAscent', DbType.integer),
|
|
SqfEntityFieldBase('totalDescent', DbType.integer),
|
|
SqfEntityFieldBase('cp', DbType.real),
|
|
SqfEntityFieldBase('ftp', DbType.real),
|
|
SqfEntityFieldBase('movingTime', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbEvent.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'firstRecordId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbEvent.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'lastRecordId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbActivity.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'activitiesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbInterval();
|
|
}
|
|
}
|
|
|
|
// DbWeight TABLE
|
|
class TableDbWeight extends SqfEntityTableBase {
|
|
TableDbWeight() {
|
|
// declare properties of EntityTable
|
|
tableName = 'weights';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('date', DbType.date,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('value', DbType.real),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbWeight();
|
|
}
|
|
}
|
|
|
|
// DbHeartRateZoneSchema TABLE
|
|
class TableDbHeartRateZoneSchema extends SqfEntityTableBase {
|
|
TableDbHeartRateZoneSchema() {
|
|
// declare properties of EntityTable
|
|
tableName = 'heartRateZoneSchemata';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('date', DbType.date,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('base', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbHeartRateZoneSchema();
|
|
}
|
|
}
|
|
|
|
// DbHeartRateZone TABLE
|
|
class TableDbHeartRateZone extends SqfEntityTableBase {
|
|
TableDbHeartRateZone() {
|
|
// declare properties of EntityTable
|
|
tableName = 'heartRateZone';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('lowerPercentage', DbType.integer),
|
|
SqfEntityFieldBase('upperPercentage', DbType.integer),
|
|
SqfEntityFieldBase('lowerLimit', DbType.integer),
|
|
SqfEntityFieldBase('upperLimit', DbType.integer),
|
|
SqfEntityFieldBase('color', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbHeartRateZoneSchema.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'heartRateZoneSchemataId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbHeartRateZone();
|
|
}
|
|
}
|
|
|
|
// DbPowerZoneSchema TABLE
|
|
class TableDbPowerZoneSchema extends SqfEntityTableBase {
|
|
TableDbPowerZoneSchema() {
|
|
// declare properties of EntityTable
|
|
tableName = 'powerZoneSchemata';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('date', DbType.date,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('base', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbPowerZoneSchema();
|
|
}
|
|
}
|
|
|
|
// DbPowerZone TABLE
|
|
class TableDbPowerZone extends SqfEntityTableBase {
|
|
TableDbPowerZone() {
|
|
// declare properties of EntityTable
|
|
tableName = 'powerZone';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('lowerPercentage', DbType.integer),
|
|
SqfEntityFieldBase('upperPercentage', DbType.integer),
|
|
SqfEntityFieldBase('lowerLimit', DbType.integer),
|
|
SqfEntityFieldBase('upperLimit', DbType.integer),
|
|
SqfEntityFieldBase('color', DbType.integer),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbPowerZoneSchema.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'powerZoneSchemataId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbPowerZone();
|
|
}
|
|
}
|
|
|
|
// DbTag TABLE
|
|
class TableDbTag extends SqfEntityTableBase {
|
|
TableDbTag() {
|
|
// declare properties of EntityTable
|
|
tableName = 'tags';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('color', DbType.integer),
|
|
SqfEntityFieldBase('sortOrder', DbType.integer),
|
|
SqfEntityFieldBase('system', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbTagGroup.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'tagGroupsId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbTag();
|
|
}
|
|
}
|
|
|
|
// DbTagGroup TABLE
|
|
class TableDbTagGroup extends SqfEntityTableBase {
|
|
TableDbTagGroup() {
|
|
// declare properties of EntityTable
|
|
tableName = 'tagGroups';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('name', DbType.text),
|
|
SqfEntityFieldBase('color', DbType.integer),
|
|
SqfEntityFieldBase('system', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbAthlete.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'athletesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbTagGroup();
|
|
}
|
|
}
|
|
|
|
// DbLapTagging TABLE
|
|
class TableDbLapTagging extends SqfEntityTableBase {
|
|
TableDbLapTagging() {
|
|
// declare properties of EntityTable
|
|
tableName = 'lapTaggings';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('system', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(TableDbTag.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'tagsId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(TableDbLap.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'lapsId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbLapTagging();
|
|
}
|
|
}
|
|
|
|
// DbActivityTagging TABLE
|
|
class TableDbActivityTagging extends SqfEntityTableBase {
|
|
TableDbActivityTagging() {
|
|
// declare properties of EntityTable
|
|
tableName = 'activityTaggings';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('system', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(TableDbTag.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'tagsId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbActivity.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'activitiesId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbActivityTagging();
|
|
}
|
|
}
|
|
|
|
// DbIntervalTagging TABLE
|
|
class TableDbIntervalTagging extends SqfEntityTableBase {
|
|
TableDbIntervalTagging() {
|
|
// declare properties of EntityTable
|
|
tableName = 'intervalTaggings';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('system', DbType.bool),
|
|
SqfEntityFieldRelationshipBase(TableDbTag.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'tagsId',
|
|
defaultValue: 0),
|
|
SqfEntityFieldRelationshipBase(
|
|
TableDbInterval.getInstance, DeleteRule.CASCADE,
|
|
relationType: RelationType.ONE_TO_MANY,
|
|
fieldName: 'intervalsId',
|
|
defaultValue: 0),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbIntervalTagging();
|
|
}
|
|
}
|
|
|
|
// DbLog TABLE
|
|
class TableDbLog extends SqfEntityTableBase {
|
|
TableDbLog() {
|
|
// declare properties of EntityTable
|
|
tableName = 'logs';
|
|
primaryKeyName = 'id';
|
|
primaryKeyType = PrimaryKeyType.integer_auto_incremental;
|
|
useSoftDeleting = false;
|
|
// when useSoftDeleting is true, creates a field named 'isDeleted' on the table, and set to '1' this field when item deleted (does not hard delete)
|
|
|
|
// declare fields
|
|
fields = [
|
|
SqfEntityFieldBase('dateTime', DbType.datetime,
|
|
minValue: DateTime.parse('1900-01-01')),
|
|
SqfEntityFieldBase('message', DbType.text),
|
|
SqfEntityFieldBase('method', DbType.text),
|
|
SqfEntityFieldBase('comment', DbType.text),
|
|
SqfEntityFieldBase('stackTrace', DbType.text),
|
|
];
|
|
super.init();
|
|
}
|
|
static SqfEntityTableBase? _instance;
|
|
static SqfEntityTableBase get getInstance {
|
|
return _instance = _instance ?? TableDbLog();
|
|
}
|
|
}
|
|
// END TABLES
|
|
|
|
// BEGIN SEQUENCES
|
|
// END SEQUENCES
|
|
|
|
// BEGIN DATABASE MODEL
|
|
class DbEncrateia extends SqfEntityModelProvider {
|
|
DbEncrateia() {
|
|
databaseName = encrateia.databaseName;
|
|
password = encrateia.password;
|
|
dbVersion = encrateia.dbVersion;
|
|
preSaveAction = encrateia.preSaveAction;
|
|
logFunction = encrateia.logFunction;
|
|
databaseTables = [
|
|
TableDbAthlete.getInstance,
|
|
TableDbActivity.getInstance,
|
|
TableDbEvent.getInstance,
|
|
TableDbLap.getInstance,
|
|
TableDbInterval.getInstance,
|
|
TableDbWeight.getInstance,
|
|
TableDbHeartRateZoneSchema.getInstance,
|
|
TableDbHeartRateZone.getInstance,
|
|
TableDbPowerZoneSchema.getInstance,
|
|
TableDbPowerZone.getInstance,
|
|
TableDbTag.getInstance,
|
|
TableDbTagGroup.getInstance,
|
|
TableDbLapTagging.getInstance,
|
|
TableDbActivityTagging.getInstance,
|
|
TableDbIntervalTagging.getInstance,
|
|
TableDbLog.getInstance,
|
|
];
|
|
|
|
bundledDatabasePath = encrateia
|
|
.bundledDatabasePath; //'assets/sample.db'; // This value is optional. When bundledDatabasePath is empty then EntityBase creats a new database when initializing the database
|
|
databasePath = encrateia.databasePath;
|
|
}
|
|
Map<String, dynamic> getControllers() {
|
|
final controllers = <String, dynamic>{};
|
|
|
|
return controllers;
|
|
}
|
|
}
|
|
// END DATABASE MODEL
|
|
|
|
// BEGIN ENTITIES
|
|
// region DbAthlete
|
|
class DbAthlete extends TableBase {
|
|
DbAthlete(
|
|
{this.id,
|
|
this.state,
|
|
this.firstName,
|
|
this.lastName,
|
|
this.stravaUsername,
|
|
this.photoPath,
|
|
this.stravaId,
|
|
this.geoState,
|
|
this.downloadInterval,
|
|
this.recordAggregationCount,
|
|
this.uuid}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbAthlete.withFields(
|
|
this.state,
|
|
this.firstName,
|
|
this.lastName,
|
|
this.stravaUsername,
|
|
this.photoPath,
|
|
this.stravaId,
|
|
this.geoState,
|
|
this.downloadInterval,
|
|
this.recordAggregationCount,
|
|
this.uuid) {
|
|
_setDefaultValues();
|
|
}
|
|
DbAthlete.withId(
|
|
this.id,
|
|
this.state,
|
|
this.firstName,
|
|
this.lastName,
|
|
this.stravaUsername,
|
|
this.photoPath,
|
|
this.stravaId,
|
|
this.geoState,
|
|
this.downloadInterval,
|
|
this.recordAggregationCount,
|
|
this.uuid) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbAthlete.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['state'] != null) {
|
|
state = o['state'].toString();
|
|
}
|
|
if (o['firstName'] != null) {
|
|
firstName = o['firstName'].toString();
|
|
}
|
|
if (o['lastName'] != null) {
|
|
lastName = o['lastName'].toString();
|
|
}
|
|
if (o['stravaUsername'] != null) {
|
|
stravaUsername = o['stravaUsername'].toString();
|
|
}
|
|
if (o['photoPath'] != null) {
|
|
photoPath = o['photoPath'].toString();
|
|
}
|
|
if (o['stravaId'] != null) {
|
|
stravaId = int.tryParse(o['stravaId'].toString());
|
|
}
|
|
if (o['geoState'] != null) {
|
|
geoState = o['geoState'].toString();
|
|
}
|
|
if (o['downloadInterval'] != null) {
|
|
downloadInterval = int.tryParse(o['downloadInterval'].toString());
|
|
}
|
|
if (o['recordAggregationCount'] != null) {
|
|
recordAggregationCount =
|
|
int.tryParse(o['recordAggregationCount'].toString());
|
|
}
|
|
if (o['uuid'] != null) {
|
|
uuid = o['uuid'].toString();
|
|
}
|
|
}
|
|
// FIELDS (DbAthlete)
|
|
int? id;
|
|
String? state;
|
|
String? firstName;
|
|
String? lastName;
|
|
String? stravaUsername;
|
|
String? photoPath;
|
|
int? stravaId;
|
|
String? geoState;
|
|
int? downloadInterval;
|
|
int? recordAggregationCount;
|
|
String? uuid;
|
|
|
|
// end FIELDS (DbAthlete)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbAthlete)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivities', 'plField2'..]) or so on..
|
|
List<DbActivity>? plDbActivities;
|
|
|
|
/// get DbActivity(s) filtered by id=athletesId
|
|
DbActivityFilterBuilder? getDbActivities(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbActivity()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervals', 'plField2'..]) or so on..
|
|
List<DbInterval>? plDbIntervals;
|
|
|
|
/// get DbInterval(s) filtered by id=athletesId
|
|
DbIntervalFilterBuilder? getDbIntervals(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbInterval()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbWeights', 'plField2'..]) or so on..
|
|
List<DbWeight>? plDbWeights;
|
|
|
|
/// get DbWeight(s) filtered by id=athletesId
|
|
DbWeightFilterBuilder? getDbWeights(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbWeight()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbHeartRateZoneSchemas', 'plField2'..]) or so on..
|
|
List<DbHeartRateZoneSchema>? plDbHeartRateZoneSchemas;
|
|
|
|
/// get DbHeartRateZoneSchema(s) filtered by id=athletesId
|
|
DbHeartRateZoneSchemaFilterBuilder? getDbHeartRateZoneSchemas(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbHeartRateZoneSchema()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbPowerZoneSchemas', 'plField2'..]) or so on..
|
|
List<DbPowerZoneSchema>? plDbPowerZoneSchemas;
|
|
|
|
/// get DbPowerZoneSchema(s) filtered by id=athletesId
|
|
DbPowerZoneSchemaFilterBuilder? getDbPowerZoneSchemas(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbPowerZoneSchema()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTagGroups', 'plField2'..]) or so on..
|
|
List<DbTagGroup>? plDbTagGroups;
|
|
|
|
/// get DbTagGroup(s) filtered by id=athletesId
|
|
DbTagGroupFilterBuilder? getDbTagGroups(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbTagGroup()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.athletesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbAthlete)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbAthleteManager? __mnDbAthlete;
|
|
|
|
DbAthleteManager get _mnDbAthlete {
|
|
return __mnDbAthlete = __mnDbAthlete ?? DbAthleteManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (state != null || !forView) {
|
|
map['state'] = state;
|
|
}
|
|
if (firstName != null || !forView) {
|
|
map['firstName'] = firstName;
|
|
}
|
|
if (lastName != null || !forView) {
|
|
map['lastName'] = lastName;
|
|
}
|
|
if (stravaUsername != null || !forView) {
|
|
map['stravaUsername'] = stravaUsername;
|
|
}
|
|
if (photoPath != null || !forView) {
|
|
map['photoPath'] = photoPath;
|
|
}
|
|
if (stravaId != null || !forView) {
|
|
map['stravaId'] = stravaId;
|
|
}
|
|
if (geoState != null || !forView) {
|
|
map['geoState'] = geoState;
|
|
}
|
|
if (downloadInterval != null || !forView) {
|
|
map['downloadInterval'] = downloadInterval;
|
|
}
|
|
if (recordAggregationCount != null || !forView) {
|
|
map['recordAggregationCount'] = recordAggregationCount;
|
|
}
|
|
if (uuid != null || !forView) {
|
|
map['uuid'] = uuid;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (state != null || !forView) {
|
|
map['state'] = state;
|
|
}
|
|
if (firstName != null || !forView) {
|
|
map['firstName'] = firstName;
|
|
}
|
|
if (lastName != null || !forView) {
|
|
map['lastName'] = lastName;
|
|
}
|
|
if (stravaUsername != null || !forView) {
|
|
map['stravaUsername'] = stravaUsername;
|
|
}
|
|
if (photoPath != null || !forView) {
|
|
map['photoPath'] = photoPath;
|
|
}
|
|
if (stravaId != null || !forView) {
|
|
map['stravaId'] = stravaId;
|
|
}
|
|
if (geoState != null || !forView) {
|
|
map['geoState'] = geoState;
|
|
}
|
|
if (downloadInterval != null || !forView) {
|
|
map['downloadInterval'] = downloadInterval;
|
|
}
|
|
if (recordAggregationCount != null || !forView) {
|
|
map['recordAggregationCount'] = recordAggregationCount;
|
|
}
|
|
if (uuid != null || !forView) {
|
|
map['uuid'] = uuid;
|
|
}
|
|
|
|
// COLLECTIONS (DbAthlete)
|
|
if (!forQuery) {
|
|
map['DbActivities'] = await getDbActivities()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbIntervals'] = await getDbIntervals()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbWeights'] = await getDbWeights()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbHeartRateZoneSchemas'] =
|
|
await getDbHeartRateZoneSchemas()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbPowerZoneSchemas'] = await getDbPowerZoneSchemas()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbTagGroups'] = await getDbTagGroups()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbAthlete)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbAthlete]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbAthlete]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
state,
|
|
firstName,
|
|
lastName,
|
|
stravaUsername,
|
|
photoPath,
|
|
stravaId,
|
|
geoState,
|
|
downloadInterval,
|
|
recordAggregationCount,
|
|
uuid
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
state,
|
|
firstName,
|
|
lastName,
|
|
stravaUsername,
|
|
photoPath,
|
|
stravaId,
|
|
geoState,
|
|
downloadInterval,
|
|
recordAggregationCount,
|
|
uuid
|
|
];
|
|
}
|
|
|
|
static Future<List<DbAthlete>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbAthlete.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbAthlete>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbAthlete>[];
|
|
try {
|
|
objList = list
|
|
.map((dbathlete) =>
|
|
DbAthlete.fromMap(dbathlete as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbAthlete.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbAthlete>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbAthlete> objList = <DbAthlete>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbAthlete.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('athletes.plDbActivities') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivities'))) {
|
|
/*_loadedfields!.add('athletes.plDbActivities'); */ obj
|
|
.plDbActivities =
|
|
obj.plDbActivities ??
|
|
await obj.getDbActivities()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('athletes.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbWeights') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbWeights'))) {
|
|
/*_loadedfields!.add('athletes.plDbWeights'); */ obj.plDbWeights =
|
|
obj.plDbWeights ??
|
|
await obj.getDbWeights()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbHeartRateZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbHeartRateZoneSchemas'); */ obj
|
|
.plDbHeartRateZoneSchemas =
|
|
obj.plDbHeartRateZoneSchemas ??
|
|
await obj.getDbHeartRateZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbPowerZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbPowerZoneSchemas'); */ obj
|
|
.plDbPowerZoneSchemas =
|
|
obj.plDbPowerZoneSchemas ??
|
|
await obj.getDbPowerZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbTagGroups') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTagGroups'))) {
|
|
/*_loadedfields!.add('athletes.plDbTagGroups'); */ obj.plDbTagGroups =
|
|
obj.plDbTagGroups ??
|
|
await obj.getDbTagGroups()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbAthlete by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbAthlete] if exist, otherwise returns null
|
|
Future<DbAthlete?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbAthlete? obj;
|
|
final data = await _mnDbAthlete.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbAthlete.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('athletes.plDbActivities') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivities'))) {
|
|
/*_loadedfields!.add('athletes.plDbActivities'); */ obj
|
|
.plDbActivities =
|
|
obj.plDbActivities ??
|
|
await obj.getDbActivities()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('athletes.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbWeights') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbWeights'))) {
|
|
/*_loadedfields!.add('athletes.plDbWeights'); */ obj.plDbWeights =
|
|
obj.plDbWeights ??
|
|
await obj.getDbWeights()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbHeartRateZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbHeartRateZoneSchemas'); */ obj
|
|
.plDbHeartRateZoneSchemas =
|
|
obj.plDbHeartRateZoneSchemas ??
|
|
await obj.getDbHeartRateZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbPowerZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbPowerZoneSchemas'); */ obj
|
|
.plDbPowerZoneSchemas =
|
|
obj.plDbPowerZoneSchemas ??
|
|
await obj.getDbPowerZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbTagGroups') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTagGroups'))) {
|
|
/*_loadedfields!.add('athletes.plDbTagGroups'); */ obj.plDbTagGroups =
|
|
obj.plDbTagGroups ??
|
|
await obj.getDbTagGroups()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbAthlete) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbAthlete.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbAthlete.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbAthlete) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbAthlete.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbAthlete.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbAthlete. Returns a new Primary Key value of DbAthlete
|
|
|
|
/// <returns>Returns a new Primary Key value of DbAthlete
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbAthlete> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbAthlete> dbathletes,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbathletes) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbathletes.length; i++) {
|
|
if (dbathletes[i].id == null) {
|
|
dbathletes[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbAthlete.rawInsert(
|
|
'INSERT OR REPLACE INTO athletes (id, state, firstName, lastName, stravaUsername, photoPath, stravaId, geoState, downloadInterval, recordAggregationCount, uuid) VALUES (?,?,?,?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
state,
|
|
firstName,
|
|
lastName,
|
|
stravaUsername,
|
|
photoPath,
|
|
stravaId,
|
|
geoState,
|
|
downloadInterval,
|
|
recordAggregationCount,
|
|
uuid
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbAthlete id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbAthlete id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbAthlete Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbAthlete>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbAthlete> dbathletes,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbAthlete.rawInsertAll(
|
|
'INSERT OR REPLACE INTO athletes (id, state, firstName, lastName, stravaUsername, photoPath, stravaId, geoState, downloadInterval, recordAggregationCount, uuid) VALUES (?,?,?,?,?,?,?,?,?,?,?)',
|
|
dbathletes,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbAthlete
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbAthlete invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbActivity()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbInterval()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbWeight()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbHeartRateZoneSchema()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbPowerZoneSchema()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbTagGroup()
|
|
.select()
|
|
.athletesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbAthlete
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbAthlete.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbAthlete] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbAthleteFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbAthleteFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
state = state ?? 'new';
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbathlete
|
|
|
|
// region DbAthleteField
|
|
class DbAthleteField extends FilterBase {
|
|
DbAthleteField(DbAthleteFilterBuilder dbathleteFB) : super(dbathleteFB);
|
|
|
|
@override
|
|
DbAthleteFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder isNull() {
|
|
return super.isNull() as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbAthleteFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbAthleteField get not {
|
|
return super.not as DbAthleteField;
|
|
}
|
|
}
|
|
// endregion DbAthleteField
|
|
|
|
// region DbAthleteFilterBuilder
|
|
class DbAthleteFilterBuilder extends ConjunctionBase {
|
|
DbAthleteFilterBuilder(DbAthlete obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbAthlete = obj._mnDbAthlete;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbAthleteManager? _mnDbAthlete;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbAthleteFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbAthleteFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbAthleteFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbAthleteFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbAthleteFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbAthleteFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbAthleteFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbAthleteFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbAthleteFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbAthleteFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbAthleteFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbAthleteField _setField(
|
|
DbAthleteField? field, String colName, DbType dbtype) {
|
|
return DbAthleteField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbAthleteField? _id;
|
|
DbAthleteField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbAthleteField? _state;
|
|
DbAthleteField get state {
|
|
return _state = _setField(_state, 'state', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _firstName;
|
|
DbAthleteField get firstName {
|
|
return _firstName = _setField(_firstName, 'firstName', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _lastName;
|
|
DbAthleteField get lastName {
|
|
return _lastName = _setField(_lastName, 'lastName', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _stravaUsername;
|
|
DbAthleteField get stravaUsername {
|
|
return _stravaUsername =
|
|
_setField(_stravaUsername, 'stravaUsername', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _photoPath;
|
|
DbAthleteField get photoPath {
|
|
return _photoPath = _setField(_photoPath, 'photoPath', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _stravaId;
|
|
DbAthleteField get stravaId {
|
|
return _stravaId = _setField(_stravaId, 'stravaId', DbType.integer);
|
|
}
|
|
|
|
DbAthleteField? _geoState;
|
|
DbAthleteField get geoState {
|
|
return _geoState = _setField(_geoState, 'geoState', DbType.text);
|
|
}
|
|
|
|
DbAthleteField? _downloadInterval;
|
|
DbAthleteField get downloadInterval {
|
|
return _downloadInterval =
|
|
_setField(_downloadInterval, 'downloadInterval', DbType.integer);
|
|
}
|
|
|
|
DbAthleteField? _recordAggregationCount;
|
|
DbAthleteField get recordAggregationCount {
|
|
return _recordAggregationCount = _setField(
|
|
_recordAggregationCount, 'recordAggregationCount', DbType.integer);
|
|
}
|
|
|
|
DbAthleteField? _uuid;
|
|
DbAthleteField get uuid {
|
|
return _uuid = _setField(_uuid, 'uuid', DbType.text);
|
|
}
|
|
|
|
/// Deletes List<DbAthlete> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbActivity) according to DeleteRule.CASCADE
|
|
final idListDbActivityBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbActivityBYathletesId = await DbActivity()
|
|
.select()
|
|
.where('athletesId IN (${idListDbActivityBYathletesId['sql']})',
|
|
parameterValue: idListDbActivityBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbActivityBYathletesId.success) {
|
|
return resDbActivityBYathletesId;
|
|
}
|
|
// Delete sub records where in (DbInterval) according to DeleteRule.CASCADE
|
|
final idListDbIntervalBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalBYathletesId = await DbInterval()
|
|
.select()
|
|
.where('athletesId IN (${idListDbIntervalBYathletesId['sql']})',
|
|
parameterValue: idListDbIntervalBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalBYathletesId.success) {
|
|
return resDbIntervalBYathletesId;
|
|
}
|
|
// Delete sub records where in (DbWeight) according to DeleteRule.CASCADE
|
|
final idListDbWeightBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbWeightBYathletesId = await DbWeight()
|
|
.select()
|
|
.where('athletesId IN (${idListDbWeightBYathletesId['sql']})',
|
|
parameterValue: idListDbWeightBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbWeightBYathletesId.success) {
|
|
return resDbWeightBYathletesId;
|
|
}
|
|
// Delete sub records where in (DbHeartRateZoneSchema) according to DeleteRule.CASCADE
|
|
final idListDbHeartRateZoneSchemaBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbHeartRateZoneSchemaBYathletesId = await DbHeartRateZoneSchema()
|
|
.select()
|
|
.where(
|
|
'athletesId IN (${idListDbHeartRateZoneSchemaBYathletesId['sql']})',
|
|
parameterValue: idListDbHeartRateZoneSchemaBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbHeartRateZoneSchemaBYathletesId.success) {
|
|
return resDbHeartRateZoneSchemaBYathletesId;
|
|
}
|
|
// Delete sub records where in (DbPowerZoneSchema) according to DeleteRule.CASCADE
|
|
final idListDbPowerZoneSchemaBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbPowerZoneSchemaBYathletesId = await DbPowerZoneSchema()
|
|
.select()
|
|
.where('athletesId IN (${idListDbPowerZoneSchemaBYathletesId['sql']})',
|
|
parameterValue: idListDbPowerZoneSchemaBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbPowerZoneSchemaBYathletesId.success) {
|
|
return resDbPowerZoneSchemaBYathletesId;
|
|
}
|
|
// Delete sub records where in (DbTagGroup) according to DeleteRule.CASCADE
|
|
final idListDbTagGroupBYathletesId = toListPrimaryKeySQL(false);
|
|
final resDbTagGroupBYathletesId = await DbTagGroup()
|
|
.select()
|
|
.where('athletesId IN (${idListDbTagGroupBYathletesId['sql']})',
|
|
parameterValue: idListDbTagGroupBYathletesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbTagGroupBYathletesId.success) {
|
|
return resDbTagGroupBYathletesId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbAthlete!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbAthlete!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from athletes ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbAthlete!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbAthlete] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbAthlete?
|
|
@override
|
|
Future<DbAthlete?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbAthlete!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbAthlete? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbAthlete.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('athletes.plDbActivities') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivities'))) {
|
|
/*_loadedfields!.add('athletes.plDbActivities'); */ obj
|
|
.plDbActivities =
|
|
obj.plDbActivities ??
|
|
await obj.getDbActivities()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('athletes.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbWeights') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbWeights'))) {
|
|
/*_loadedfields!.add('athletes.plDbWeights'); */ obj.plDbWeights =
|
|
obj.plDbWeights ??
|
|
await obj.getDbWeights()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbHeartRateZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbHeartRateZoneSchemas'); */ obj
|
|
.plDbHeartRateZoneSchemas =
|
|
obj.plDbHeartRateZoneSchemas ??
|
|
await obj.getDbHeartRateZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbPowerZoneSchemas') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZoneSchemas'))) {
|
|
/*_loadedfields!.add('athletes.plDbPowerZoneSchemas'); */ obj
|
|
.plDbPowerZoneSchemas =
|
|
obj.plDbPowerZoneSchemas ??
|
|
await obj.getDbPowerZoneSchemas()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('athletes.plDbTagGroups') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTagGroups'))) {
|
|
/*_loadedfields!.add('athletes.plDbTagGroups'); */ obj.plDbTagGroups =
|
|
obj.plDbTagGroups ??
|
|
await obj.getDbTagGroups()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbAthlete]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbAthlete?
|
|
@override
|
|
Future<DbAthlete> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbAthlete();
|
|
}
|
|
|
|
/// This method returns int. [DbAthlete]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbathleteCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbathletesFuture = await _mnDbAthlete!.toList(qparams);
|
|
final int count = dbathletesFuture[0]['CNT'] as int;
|
|
if (dbathleteCount != null) {
|
|
dbathleteCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbAthlete> [DbAthlete]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbAthlete>
|
|
@override
|
|
Future<List<DbAthlete>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbAthlete> dbathletesData = await DbAthlete.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbathletesData;
|
|
}
|
|
|
|
/// This method returns Json String [DbAthlete]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbAthlete]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbAthlete]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbAthlete!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbAthlete]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM athletes WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbAthlete!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbAthlete]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbAthlete!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbAthlete.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbAthlete!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbAthleteFilterBuilder
|
|
|
|
// region DbAthleteFields
|
|
class DbAthleteFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fState;
|
|
static TableField get state {
|
|
return _fState =
|
|
_fState ?? SqlSyntax.setField(_fState, 'state', DbType.text);
|
|
}
|
|
|
|
static TableField? _fFirstName;
|
|
static TableField get firstName {
|
|
return _fFirstName = _fFirstName ??
|
|
SqlSyntax.setField(_fFirstName, 'firstName', DbType.text);
|
|
}
|
|
|
|
static TableField? _fLastName;
|
|
static TableField get lastName {
|
|
return _fLastName =
|
|
_fLastName ?? SqlSyntax.setField(_fLastName, 'lastName', DbType.text);
|
|
}
|
|
|
|
static TableField? _fStravaUsername;
|
|
static TableField get stravaUsername {
|
|
return _fStravaUsername = _fStravaUsername ??
|
|
SqlSyntax.setField(_fStravaUsername, 'stravaUsername', DbType.text);
|
|
}
|
|
|
|
static TableField? _fPhotoPath;
|
|
static TableField get photoPath {
|
|
return _fPhotoPath = _fPhotoPath ??
|
|
SqlSyntax.setField(_fPhotoPath, 'photoPath', DbType.text);
|
|
}
|
|
|
|
static TableField? _fStravaId;
|
|
static TableField get stravaId {
|
|
return _fStravaId = _fStravaId ??
|
|
SqlSyntax.setField(_fStravaId, 'stravaId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fGeoState;
|
|
static TableField get geoState {
|
|
return _fGeoState =
|
|
_fGeoState ?? SqlSyntax.setField(_fGeoState, 'geoState', DbType.text);
|
|
}
|
|
|
|
static TableField? _fDownloadInterval;
|
|
static TableField get downloadInterval {
|
|
return _fDownloadInterval = _fDownloadInterval ??
|
|
SqlSyntax.setField(
|
|
_fDownloadInterval, 'downloadInterval', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fRecordAggregationCount;
|
|
static TableField get recordAggregationCount {
|
|
return _fRecordAggregationCount = _fRecordAggregationCount ??
|
|
SqlSyntax.setField(
|
|
_fRecordAggregationCount, 'recordAggregationCount', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fUuid;
|
|
static TableField get uuid {
|
|
return _fUuid = _fUuid ?? SqlSyntax.setField(_fUuid, 'uuid', DbType.text);
|
|
}
|
|
}
|
|
// endregion DbAthleteFields
|
|
|
|
//region DbAthleteManager
|
|
class DbAthleteManager extends SqfEntityProvider {
|
|
DbAthleteManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'athletes';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbAthleteManager
|
|
// region DbActivity
|
|
class DbActivity extends TableBase {
|
|
DbActivity(
|
|
{this.id,
|
|
this.state,
|
|
this.path,
|
|
this.stravaId,
|
|
this.name,
|
|
this.movingTime,
|
|
this.type,
|
|
this.distance,
|
|
this.serialNumber,
|
|
this.timeCreated,
|
|
this.sportName,
|
|
this.sport,
|
|
this.subSport,
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.maxRunningCadence,
|
|
this.trigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalTrainingEffect,
|
|
this.necLat,
|
|
this.necLong,
|
|
this.swcLat,
|
|
this.swcLong,
|
|
this.firstLapIndex,
|
|
this.numLaps,
|
|
this.numSessions,
|
|
this.localTimestamp,
|
|
this.avgPower,
|
|
this.sdevPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.cp,
|
|
this.ftp,
|
|
this.nonParsable,
|
|
this.excluded,
|
|
this.manual,
|
|
this.athletesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbActivity.withFields(
|
|
this.state,
|
|
this.path,
|
|
this.stravaId,
|
|
this.name,
|
|
this.movingTime,
|
|
this.type,
|
|
this.distance,
|
|
this.serialNumber,
|
|
this.timeCreated,
|
|
this.sportName,
|
|
this.sport,
|
|
this.subSport,
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.maxRunningCadence,
|
|
this.trigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalTrainingEffect,
|
|
this.necLat,
|
|
this.necLong,
|
|
this.swcLat,
|
|
this.swcLong,
|
|
this.firstLapIndex,
|
|
this.numLaps,
|
|
this.numSessions,
|
|
this.localTimestamp,
|
|
this.avgPower,
|
|
this.sdevPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.cp,
|
|
this.ftp,
|
|
this.nonParsable,
|
|
this.excluded,
|
|
this.manual,
|
|
this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbActivity.withId(
|
|
this.id,
|
|
this.state,
|
|
this.path,
|
|
this.stravaId,
|
|
this.name,
|
|
this.movingTime,
|
|
this.type,
|
|
this.distance,
|
|
this.serialNumber,
|
|
this.timeCreated,
|
|
this.sportName,
|
|
this.sport,
|
|
this.subSport,
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.maxRunningCadence,
|
|
this.trigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalTrainingEffect,
|
|
this.necLat,
|
|
this.necLong,
|
|
this.swcLat,
|
|
this.swcLong,
|
|
this.firstLapIndex,
|
|
this.numLaps,
|
|
this.numSessions,
|
|
this.localTimestamp,
|
|
this.avgPower,
|
|
this.sdevPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.cp,
|
|
this.ftp,
|
|
this.nonParsable,
|
|
this.excluded,
|
|
this.manual,
|
|
this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbActivity.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['state'] != null) {
|
|
state = o['state'].toString();
|
|
}
|
|
if (o['path'] != null) {
|
|
path = o['path'].toString();
|
|
}
|
|
if (o['stravaId'] != null) {
|
|
stravaId = int.tryParse(o['stravaId'].toString());
|
|
}
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['movingTime'] != null) {
|
|
movingTime = int.tryParse(o['movingTime'].toString());
|
|
}
|
|
if (o['type'] != null) {
|
|
type = o['type'].toString();
|
|
}
|
|
if (o['distance'] != null) {
|
|
distance = int.tryParse(o['distance'].toString());
|
|
}
|
|
if (o['serialNumber'] != null) {
|
|
serialNumber = int.tryParse(o['serialNumber'].toString());
|
|
}
|
|
if (o['timeCreated'] != null) {
|
|
timeCreated = int.tryParse(o['timeCreated'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['timeCreated'].toString())!)
|
|
: DateTime.tryParse(o['timeCreated'].toString());
|
|
}
|
|
if (o['sportName'] != null) {
|
|
sportName = o['sportName'].toString();
|
|
}
|
|
if (o['sport'] != null) {
|
|
sport = o['sport'].toString();
|
|
}
|
|
if (o['subSport'] != null) {
|
|
subSport = o['subSport'].toString();
|
|
}
|
|
if (o['timeStamp'] != null) {
|
|
timeStamp = int.tryParse(o['timeStamp'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['timeStamp'].toString())!)
|
|
: DateTime.tryParse(o['timeStamp'].toString());
|
|
}
|
|
if (o['startTime'] != null) {
|
|
startTime = int.tryParse(o['startTime'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['startTime'].toString())!)
|
|
: DateTime.tryParse(o['startTime'].toString());
|
|
}
|
|
if (o['startPositionLat'] != null) {
|
|
startPositionLat = double.tryParse(o['startPositionLat'].toString());
|
|
}
|
|
if (o['startPositionLong'] != null) {
|
|
startPositionLong = double.tryParse(o['startPositionLong'].toString());
|
|
}
|
|
if (o['event'] != null) {
|
|
event = o['event'].toString();
|
|
}
|
|
if (o['eventType'] != null) {
|
|
eventType = o['eventType'].toString();
|
|
}
|
|
if (o['eventGroup'] != null) {
|
|
eventGroup = int.tryParse(o['eventGroup'].toString());
|
|
}
|
|
if (o['totalDistance'] != null) {
|
|
totalDistance = int.tryParse(o['totalDistance'].toString());
|
|
}
|
|
if (o['totalStrides'] != null) {
|
|
totalStrides = int.tryParse(o['totalStrides'].toString());
|
|
}
|
|
if (o['totalCalories'] != null) {
|
|
totalCalories = int.tryParse(o['totalCalories'].toString());
|
|
}
|
|
if (o['avgSpeed'] != null) {
|
|
avgSpeed = double.tryParse(o['avgSpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByMeasurements'] != null) {
|
|
avgSpeedByMeasurements =
|
|
double.tryParse(o['avgSpeedByMeasurements'].toString());
|
|
}
|
|
if (o['avgSpeedBySpeed'] != null) {
|
|
avgSpeedBySpeed = double.tryParse(o['avgSpeedBySpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByDistance'] != null) {
|
|
avgSpeedByDistance = double.tryParse(o['avgSpeedByDistance'].toString());
|
|
}
|
|
if (o['sdevSpeed'] != null) {
|
|
sdevSpeed = double.tryParse(o['sdevSpeed'].toString());
|
|
}
|
|
if (o['sdevPace'] != null) {
|
|
sdevPace = double.tryParse(o['sdevPace'].toString());
|
|
}
|
|
if (o['minSpeed'] != null) {
|
|
minSpeed = double.tryParse(o['minSpeed'].toString());
|
|
}
|
|
if (o['maxSpeed'] != null) {
|
|
maxSpeed = double.tryParse(o['maxSpeed'].toString());
|
|
}
|
|
if (o['totalAscent'] != null) {
|
|
totalAscent = int.tryParse(o['totalAscent'].toString());
|
|
}
|
|
if (o['totalDescent'] != null) {
|
|
totalDescent = int.tryParse(o['totalDescent'].toString());
|
|
}
|
|
if (o['maxRunningCadence'] != null) {
|
|
maxRunningCadence = int.tryParse(o['maxRunningCadence'].toString());
|
|
}
|
|
if (o['trigger'] != null) {
|
|
trigger = o['trigger'].toString();
|
|
}
|
|
if (o['avgTemperature'] != null) {
|
|
avgTemperature = int.tryParse(o['avgTemperature'].toString());
|
|
}
|
|
if (o['maxTemperature'] != null) {
|
|
maxTemperature = int.tryParse(o['maxTemperature'].toString());
|
|
}
|
|
if (o['avgFractionalCadence'] != null) {
|
|
avgFractionalCadence =
|
|
double.tryParse(o['avgFractionalCadence'].toString());
|
|
}
|
|
if (o['maxFractionalCadence'] != null) {
|
|
maxFractionalCadence =
|
|
double.tryParse(o['maxFractionalCadence'].toString());
|
|
}
|
|
if (o['totalFractionalCycles'] != null) {
|
|
totalFractionalCycles =
|
|
double.tryParse(o['totalFractionalCycles'].toString());
|
|
}
|
|
if (o['avgStanceTimePercent'] != null) {
|
|
avgStanceTimePercent =
|
|
double.tryParse(o['avgStanceTimePercent'].toString());
|
|
}
|
|
if (o['avgStanceTime'] != null) {
|
|
avgStanceTime = double.tryParse(o['avgStanceTime'].toString());
|
|
}
|
|
if (o['avgHeartRate'] != null) {
|
|
avgHeartRate = int.tryParse(o['avgHeartRate'].toString());
|
|
}
|
|
if (o['maxHeartRate'] != null) {
|
|
maxHeartRate = int.tryParse(o['maxHeartRate'].toString());
|
|
}
|
|
if (o['avgRunningCadence'] != null) {
|
|
avgRunningCadence = double.tryParse(o['avgRunningCadence'].toString());
|
|
}
|
|
if (o['avgVerticalOscillation'] != null) {
|
|
avgVerticalOscillation =
|
|
double.tryParse(o['avgVerticalOscillation'].toString());
|
|
}
|
|
if (o['totalElapsedTime'] != null) {
|
|
totalElapsedTime = int.tryParse(o['totalElapsedTime'].toString());
|
|
}
|
|
if (o['totalTimerTime'] != null) {
|
|
totalTimerTime = int.tryParse(o['totalTimerTime'].toString());
|
|
}
|
|
if (o['totalTrainingEffect'] != null) {
|
|
totalTrainingEffect = int.tryParse(o['totalTrainingEffect'].toString());
|
|
}
|
|
if (o['necLat'] != null) {
|
|
necLat = double.tryParse(o['necLat'].toString());
|
|
}
|
|
if (o['necLong'] != null) {
|
|
necLong = double.tryParse(o['necLong'].toString());
|
|
}
|
|
if (o['swcLat'] != null) {
|
|
swcLat = double.tryParse(o['swcLat'].toString());
|
|
}
|
|
if (o['swcLong'] != null) {
|
|
swcLong = double.tryParse(o['swcLong'].toString());
|
|
}
|
|
if (o['firstLapIndex'] != null) {
|
|
firstLapIndex = int.tryParse(o['firstLapIndex'].toString());
|
|
}
|
|
if (o['numLaps'] != null) {
|
|
numLaps = int.tryParse(o['numLaps'].toString());
|
|
}
|
|
if (o['numSessions'] != null) {
|
|
numSessions = int.tryParse(o['numSessions'].toString());
|
|
}
|
|
if (o['localTimestamp'] != null) {
|
|
localTimestamp = int.tryParse(o['localTimestamp'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['localTimestamp'].toString())!)
|
|
: DateTime.tryParse(o['localTimestamp'].toString());
|
|
}
|
|
if (o['avgPower'] != null) {
|
|
avgPower = double.tryParse(o['avgPower'].toString());
|
|
}
|
|
if (o['sdevPower'] != null) {
|
|
sdevPower = double.tryParse(o['sdevPower'].toString());
|
|
}
|
|
if (o['minPower'] != null) {
|
|
minPower = int.tryParse(o['minPower'].toString());
|
|
}
|
|
if (o['maxPower'] != null) {
|
|
maxPower = int.tryParse(o['maxPower'].toString());
|
|
}
|
|
if (o['minHeartRate'] != null) {
|
|
minHeartRate = int.tryParse(o['minHeartRate'].toString());
|
|
}
|
|
if (o['sdevHeartRate'] != null) {
|
|
sdevHeartRate = double.tryParse(o['sdevHeartRate'].toString());
|
|
}
|
|
if (o['avgGroundTime'] != null) {
|
|
avgGroundTime = double.tryParse(o['avgGroundTime'].toString());
|
|
}
|
|
if (o['sdevGroundTime'] != null) {
|
|
sdevGroundTime = double.tryParse(o['sdevGroundTime'].toString());
|
|
}
|
|
if (o['avgLegSpringStiffness'] != null) {
|
|
avgLegSpringStiffness =
|
|
double.tryParse(o['avgLegSpringStiffness'].toString());
|
|
}
|
|
if (o['sdevLegSpringStiffness'] != null) {
|
|
sdevLegSpringStiffness =
|
|
double.tryParse(o['sdevLegSpringStiffness'].toString());
|
|
}
|
|
if (o['avgFormPower'] != null) {
|
|
avgFormPower = double.tryParse(o['avgFormPower'].toString());
|
|
}
|
|
if (o['sdevFormPower'] != null) {
|
|
sdevFormPower = double.tryParse(o['sdevFormPower'].toString());
|
|
}
|
|
if (o['avgPowerRatio'] != null) {
|
|
avgPowerRatio = double.tryParse(o['avgPowerRatio'].toString());
|
|
}
|
|
if (o['sdevPowerRatio'] != null) {
|
|
sdevPowerRatio = double.tryParse(o['sdevPowerRatio'].toString());
|
|
}
|
|
if (o['avgStrideRatio'] != null) {
|
|
avgStrideRatio = double.tryParse(o['avgStrideRatio'].toString());
|
|
}
|
|
if (o['sdevStrideRatio'] != null) {
|
|
sdevStrideRatio = double.tryParse(o['sdevStrideRatio'].toString());
|
|
}
|
|
if (o['avgStrydCadence'] != null) {
|
|
avgStrydCadence = double.tryParse(o['avgStrydCadence'].toString());
|
|
}
|
|
if (o['sdevStrydCadence'] != null) {
|
|
sdevStrydCadence = double.tryParse(o['sdevStrydCadence'].toString());
|
|
}
|
|
if (o['sdevVerticalOscillation'] != null) {
|
|
sdevVerticalOscillation =
|
|
double.tryParse(o['sdevVerticalOscillation'].toString());
|
|
}
|
|
if (o['cp'] != null) {
|
|
cp = double.tryParse(o['cp'].toString());
|
|
}
|
|
if (o['ftp'] != null) {
|
|
ftp = double.tryParse(o['ftp'].toString());
|
|
}
|
|
if (o['nonParsable'] != null) {
|
|
nonParsable = o['nonParsable'].toString() == '1' ||
|
|
o['nonParsable'].toString() == 'true';
|
|
}
|
|
if (o['excluded'] != null) {
|
|
excluded =
|
|
o['excluded'].toString() == '1' || o['excluded'].toString() == 'true';
|
|
}
|
|
if (o['manual'] != null) {
|
|
manual =
|
|
o['manual'].toString() == '1' || o['manual'].toString() == 'true';
|
|
}
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbActivity)
|
|
int? id;
|
|
String? state;
|
|
String? path;
|
|
int? stravaId;
|
|
String? name;
|
|
int? movingTime;
|
|
String? type;
|
|
int? distance;
|
|
int? serialNumber;
|
|
DateTime? timeCreated;
|
|
String? sportName;
|
|
String? sport;
|
|
String? subSport;
|
|
DateTime? timeStamp;
|
|
DateTime? startTime;
|
|
double? startPositionLat;
|
|
double? startPositionLong;
|
|
String? event;
|
|
String? eventType;
|
|
int? eventGroup;
|
|
int? totalDistance;
|
|
int? totalStrides;
|
|
int? totalCalories;
|
|
double? avgSpeed;
|
|
double? avgSpeedByMeasurements;
|
|
double? avgSpeedBySpeed;
|
|
double? avgSpeedByDistance;
|
|
double? sdevSpeed;
|
|
double? sdevPace;
|
|
double? minSpeed;
|
|
double? maxSpeed;
|
|
int? totalAscent;
|
|
int? totalDescent;
|
|
int? maxRunningCadence;
|
|
String? trigger;
|
|
int? avgTemperature;
|
|
int? maxTemperature;
|
|
double? avgFractionalCadence;
|
|
double? maxFractionalCadence;
|
|
double? totalFractionalCycles;
|
|
double? avgStanceTimePercent;
|
|
double? avgStanceTime;
|
|
int? avgHeartRate;
|
|
int? maxHeartRate;
|
|
double? avgRunningCadence;
|
|
double? avgVerticalOscillation;
|
|
int? totalElapsedTime;
|
|
int? totalTimerTime;
|
|
int? totalTrainingEffect;
|
|
double? necLat;
|
|
double? necLong;
|
|
double? swcLat;
|
|
double? swcLong;
|
|
int? firstLapIndex;
|
|
int? numLaps;
|
|
int? numSessions;
|
|
DateTime? localTimestamp;
|
|
double? avgPower;
|
|
double? sdevPower;
|
|
int? minPower;
|
|
int? maxPower;
|
|
int? minHeartRate;
|
|
double? sdevHeartRate;
|
|
double? avgGroundTime;
|
|
double? sdevGroundTime;
|
|
double? avgLegSpringStiffness;
|
|
double? sdevLegSpringStiffness;
|
|
double? avgFormPower;
|
|
double? sdevFormPower;
|
|
double? avgPowerRatio;
|
|
double? sdevPowerRatio;
|
|
double? avgStrideRatio;
|
|
double? sdevStrideRatio;
|
|
double? avgStrydCadence;
|
|
double? sdevStrydCadence;
|
|
double? sdevVerticalOscillation;
|
|
double? cp;
|
|
double? ftp;
|
|
bool? nonParsable;
|
|
bool? excluded;
|
|
bool? manual;
|
|
int? athletesId;
|
|
|
|
// end FIELDS (DbActivity)
|
|
|
|
// RELATIONSHIPS (DbActivity)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbActivity)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbActivity)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbEvents', 'plField2'..]) or so on..
|
|
List<DbEvent>? plDbEvents;
|
|
|
|
/// get DbEvent(s) filtered by id=activitiesId
|
|
DbEventFilterBuilder? getDbEvents(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbEvent()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.activitiesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbLaps', 'plField2'..]) or so on..
|
|
List<DbLap>? plDbLaps;
|
|
|
|
/// get DbLap(s) filtered by id=activitiesId
|
|
DbLapFilterBuilder? getDbLaps(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbLap()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.activitiesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervals', 'plField2'..]) or so on..
|
|
List<DbInterval>? plDbIntervals;
|
|
|
|
/// get DbInterval(s) filtered by id=activitiesId
|
|
DbIntervalFilterBuilder? getDbIntervals(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbInterval()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.activitiesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivityTaggings', 'plField2'..]) or so on..
|
|
List<DbActivityTagging>? plDbActivityTaggings;
|
|
|
|
/// get DbActivityTagging(s) filtered by id=activitiesId
|
|
DbActivityTaggingFilterBuilder? getDbActivityTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbActivityTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.activitiesId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbActivity)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbActivityManager? __mnDbActivity;
|
|
|
|
DbActivityManager get _mnDbActivity {
|
|
return __mnDbActivity = __mnDbActivity ?? DbActivityManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (state != null || !forView) {
|
|
map['state'] = state;
|
|
}
|
|
if (path != null || !forView) {
|
|
map['path'] = path;
|
|
}
|
|
if (stravaId != null || !forView) {
|
|
map['stravaId'] = stravaId;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (type != null || !forView) {
|
|
map['type'] = type;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (serialNumber != null || !forView) {
|
|
map['serialNumber'] = serialNumber;
|
|
}
|
|
if (timeCreated != null) {
|
|
map['timeCreated'] = forJson
|
|
? timeCreated!.toString()
|
|
: forQuery
|
|
? timeCreated!.millisecondsSinceEpoch
|
|
: timeCreated;
|
|
} else if (timeCreated != null || !forView) {
|
|
map['timeCreated'] = null;
|
|
}
|
|
if (sportName != null || !forView) {
|
|
map['sportName'] = sportName;
|
|
}
|
|
if (sport != null || !forView) {
|
|
map['sport'] = sport;
|
|
}
|
|
if (subSport != null || !forView) {
|
|
map['subSport'] = subSport;
|
|
}
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (startTime != null) {
|
|
map['startTime'] = forJson
|
|
? startTime!.toString()
|
|
: forQuery
|
|
? startTime!.millisecondsSinceEpoch
|
|
: startTime;
|
|
} else if (startTime != null || !forView) {
|
|
map['startTime'] = null;
|
|
}
|
|
if (startPositionLat != null || !forView) {
|
|
map['startPositionLat'] = startPositionLat;
|
|
}
|
|
if (startPositionLong != null || !forView) {
|
|
map['startPositionLong'] = startPositionLong;
|
|
}
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (totalDistance != null || !forView) {
|
|
map['totalDistance'] = totalDistance;
|
|
}
|
|
if (totalStrides != null || !forView) {
|
|
map['totalStrides'] = totalStrides;
|
|
}
|
|
if (totalCalories != null || !forView) {
|
|
map['totalCalories'] = totalCalories;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (maxRunningCadence != null || !forView) {
|
|
map['maxRunningCadence'] = maxRunningCadence;
|
|
}
|
|
if (trigger != null || !forView) {
|
|
map['trigger'] = trigger;
|
|
}
|
|
if (avgTemperature != null || !forView) {
|
|
map['avgTemperature'] = avgTemperature;
|
|
}
|
|
if (maxTemperature != null || !forView) {
|
|
map['maxTemperature'] = maxTemperature;
|
|
}
|
|
if (avgFractionalCadence != null || !forView) {
|
|
map['avgFractionalCadence'] = avgFractionalCadence;
|
|
}
|
|
if (maxFractionalCadence != null || !forView) {
|
|
map['maxFractionalCadence'] = maxFractionalCadence;
|
|
}
|
|
if (totalFractionalCycles != null || !forView) {
|
|
map['totalFractionalCycles'] = totalFractionalCycles;
|
|
}
|
|
if (avgStanceTimePercent != null || !forView) {
|
|
map['avgStanceTimePercent'] = avgStanceTimePercent;
|
|
}
|
|
if (avgStanceTime != null || !forView) {
|
|
map['avgStanceTime'] = avgStanceTime;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (avgRunningCadence != null || !forView) {
|
|
map['avgRunningCadence'] = avgRunningCadence;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (totalElapsedTime != null || !forView) {
|
|
map['totalElapsedTime'] = totalElapsedTime;
|
|
}
|
|
if (totalTimerTime != null || !forView) {
|
|
map['totalTimerTime'] = totalTimerTime;
|
|
}
|
|
if (totalTrainingEffect != null || !forView) {
|
|
map['totalTrainingEffect'] = totalTrainingEffect;
|
|
}
|
|
if (necLat != null || !forView) {
|
|
map['necLat'] = necLat;
|
|
}
|
|
if (necLong != null || !forView) {
|
|
map['necLong'] = necLong;
|
|
}
|
|
if (swcLat != null || !forView) {
|
|
map['swcLat'] = swcLat;
|
|
}
|
|
if (swcLong != null || !forView) {
|
|
map['swcLong'] = swcLong;
|
|
}
|
|
if (firstLapIndex != null || !forView) {
|
|
map['firstLapIndex'] = firstLapIndex;
|
|
}
|
|
if (numLaps != null || !forView) {
|
|
map['numLaps'] = numLaps;
|
|
}
|
|
if (numSessions != null || !forView) {
|
|
map['numSessions'] = numSessions;
|
|
}
|
|
if (localTimestamp != null) {
|
|
map['localTimestamp'] = forJson
|
|
? localTimestamp!.toString()
|
|
: forQuery
|
|
? localTimestamp!.millisecondsSinceEpoch
|
|
: localTimestamp;
|
|
} else if (localTimestamp != null || !forView) {
|
|
map['localTimestamp'] = null;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgPowerRatio != null || !forView) {
|
|
map['avgPowerRatio'] = avgPowerRatio;
|
|
}
|
|
if (sdevPowerRatio != null || !forView) {
|
|
map['sdevPowerRatio'] = sdevPowerRatio;
|
|
}
|
|
if (avgStrideRatio != null || !forView) {
|
|
map['avgStrideRatio'] = avgStrideRatio;
|
|
}
|
|
if (sdevStrideRatio != null || !forView) {
|
|
map['sdevStrideRatio'] = sdevStrideRatio;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (nonParsable != null) {
|
|
map['nonParsable'] = forQuery ? (nonParsable! ? 1 : 0) : nonParsable;
|
|
} else if (nonParsable != null || !forView) {
|
|
map['nonParsable'] = null;
|
|
}
|
|
if (excluded != null) {
|
|
map['excluded'] = forQuery ? (excluded! ? 1 : 0) : excluded;
|
|
} else if (excluded != null || !forView) {
|
|
map['excluded'] = null;
|
|
}
|
|
if (manual != null) {
|
|
map['manual'] = forQuery ? (manual! ? 1 : 0) : manual;
|
|
} else if (manual != null || !forView) {
|
|
map['manual'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (state != null || !forView) {
|
|
map['state'] = state;
|
|
}
|
|
if (path != null || !forView) {
|
|
map['path'] = path;
|
|
}
|
|
if (stravaId != null || !forView) {
|
|
map['stravaId'] = stravaId;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (type != null || !forView) {
|
|
map['type'] = type;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (serialNumber != null || !forView) {
|
|
map['serialNumber'] = serialNumber;
|
|
}
|
|
if (timeCreated != null) {
|
|
map['timeCreated'] = forJson
|
|
? timeCreated!.toString()
|
|
: forQuery
|
|
? timeCreated!.millisecondsSinceEpoch
|
|
: timeCreated;
|
|
} else if (timeCreated != null || !forView) {
|
|
map['timeCreated'] = null;
|
|
}
|
|
if (sportName != null || !forView) {
|
|
map['sportName'] = sportName;
|
|
}
|
|
if (sport != null || !forView) {
|
|
map['sport'] = sport;
|
|
}
|
|
if (subSport != null || !forView) {
|
|
map['subSport'] = subSport;
|
|
}
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (startTime != null) {
|
|
map['startTime'] = forJson
|
|
? startTime!.toString()
|
|
: forQuery
|
|
? startTime!.millisecondsSinceEpoch
|
|
: startTime;
|
|
} else if (startTime != null || !forView) {
|
|
map['startTime'] = null;
|
|
}
|
|
if (startPositionLat != null || !forView) {
|
|
map['startPositionLat'] = startPositionLat;
|
|
}
|
|
if (startPositionLong != null || !forView) {
|
|
map['startPositionLong'] = startPositionLong;
|
|
}
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (totalDistance != null || !forView) {
|
|
map['totalDistance'] = totalDistance;
|
|
}
|
|
if (totalStrides != null || !forView) {
|
|
map['totalStrides'] = totalStrides;
|
|
}
|
|
if (totalCalories != null || !forView) {
|
|
map['totalCalories'] = totalCalories;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (maxRunningCadence != null || !forView) {
|
|
map['maxRunningCadence'] = maxRunningCadence;
|
|
}
|
|
if (trigger != null || !forView) {
|
|
map['trigger'] = trigger;
|
|
}
|
|
if (avgTemperature != null || !forView) {
|
|
map['avgTemperature'] = avgTemperature;
|
|
}
|
|
if (maxTemperature != null || !forView) {
|
|
map['maxTemperature'] = maxTemperature;
|
|
}
|
|
if (avgFractionalCadence != null || !forView) {
|
|
map['avgFractionalCadence'] = avgFractionalCadence;
|
|
}
|
|
if (maxFractionalCadence != null || !forView) {
|
|
map['maxFractionalCadence'] = maxFractionalCadence;
|
|
}
|
|
if (totalFractionalCycles != null || !forView) {
|
|
map['totalFractionalCycles'] = totalFractionalCycles;
|
|
}
|
|
if (avgStanceTimePercent != null || !forView) {
|
|
map['avgStanceTimePercent'] = avgStanceTimePercent;
|
|
}
|
|
if (avgStanceTime != null || !forView) {
|
|
map['avgStanceTime'] = avgStanceTime;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (avgRunningCadence != null || !forView) {
|
|
map['avgRunningCadence'] = avgRunningCadence;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (totalElapsedTime != null || !forView) {
|
|
map['totalElapsedTime'] = totalElapsedTime;
|
|
}
|
|
if (totalTimerTime != null || !forView) {
|
|
map['totalTimerTime'] = totalTimerTime;
|
|
}
|
|
if (totalTrainingEffect != null || !forView) {
|
|
map['totalTrainingEffect'] = totalTrainingEffect;
|
|
}
|
|
if (necLat != null || !forView) {
|
|
map['necLat'] = necLat;
|
|
}
|
|
if (necLong != null || !forView) {
|
|
map['necLong'] = necLong;
|
|
}
|
|
if (swcLat != null || !forView) {
|
|
map['swcLat'] = swcLat;
|
|
}
|
|
if (swcLong != null || !forView) {
|
|
map['swcLong'] = swcLong;
|
|
}
|
|
if (firstLapIndex != null || !forView) {
|
|
map['firstLapIndex'] = firstLapIndex;
|
|
}
|
|
if (numLaps != null || !forView) {
|
|
map['numLaps'] = numLaps;
|
|
}
|
|
if (numSessions != null || !forView) {
|
|
map['numSessions'] = numSessions;
|
|
}
|
|
if (localTimestamp != null) {
|
|
map['localTimestamp'] = forJson
|
|
? localTimestamp!.toString()
|
|
: forQuery
|
|
? localTimestamp!.millisecondsSinceEpoch
|
|
: localTimestamp;
|
|
} else if (localTimestamp != null || !forView) {
|
|
map['localTimestamp'] = null;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgPowerRatio != null || !forView) {
|
|
map['avgPowerRatio'] = avgPowerRatio;
|
|
}
|
|
if (sdevPowerRatio != null || !forView) {
|
|
map['sdevPowerRatio'] = sdevPowerRatio;
|
|
}
|
|
if (avgStrideRatio != null || !forView) {
|
|
map['avgStrideRatio'] = avgStrideRatio;
|
|
}
|
|
if (sdevStrideRatio != null || !forView) {
|
|
map['sdevStrideRatio'] = sdevStrideRatio;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (nonParsable != null) {
|
|
map['nonParsable'] = forQuery ? (nonParsable! ? 1 : 0) : nonParsable;
|
|
} else if (nonParsable != null || !forView) {
|
|
map['nonParsable'] = null;
|
|
}
|
|
if (excluded != null) {
|
|
map['excluded'] = forQuery ? (excluded! ? 1 : 0) : excluded;
|
|
} else if (excluded != null || !forView) {
|
|
map['excluded'] = null;
|
|
}
|
|
if (manual != null) {
|
|
map['manual'] = forQuery ? (manual! ? 1 : 0) : manual;
|
|
} else if (manual != null || !forView) {
|
|
map['manual'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbActivity)
|
|
if (!forQuery) {
|
|
map['DbEvents'] = await getDbEvents()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbLaps'] = await getDbLaps()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbIntervals'] = await getDbIntervals()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbActivityTaggings'] = await getDbActivityTaggings()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbActivity)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbActivity]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbActivity]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
state,
|
|
path,
|
|
stravaId,
|
|
name,
|
|
movingTime,
|
|
type,
|
|
distance,
|
|
serialNumber,
|
|
timeCreated != null ? timeCreated!.millisecondsSinceEpoch : null,
|
|
sportName,
|
|
sport,
|
|
subSport,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
maxRunningCadence,
|
|
trigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalTrainingEffect,
|
|
necLat,
|
|
necLong,
|
|
swcLat,
|
|
swcLong,
|
|
firstLapIndex,
|
|
numLaps,
|
|
numSessions,
|
|
localTimestamp != null ? localTimestamp!.millisecondsSinceEpoch : null,
|
|
avgPower,
|
|
sdevPower,
|
|
minPower,
|
|
maxPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
cp,
|
|
ftp,
|
|
nonParsable,
|
|
excluded,
|
|
manual,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
state,
|
|
path,
|
|
stravaId,
|
|
name,
|
|
movingTime,
|
|
type,
|
|
distance,
|
|
serialNumber,
|
|
timeCreated != null ? timeCreated!.millisecondsSinceEpoch : null,
|
|
sportName,
|
|
sport,
|
|
subSport,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
maxRunningCadence,
|
|
trigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalTrainingEffect,
|
|
necLat,
|
|
necLong,
|
|
swcLat,
|
|
swcLong,
|
|
firstLapIndex,
|
|
numLaps,
|
|
numSessions,
|
|
localTimestamp != null ? localTimestamp!.millisecondsSinceEpoch : null,
|
|
avgPower,
|
|
sdevPower,
|
|
minPower,
|
|
maxPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
cp,
|
|
ftp,
|
|
nonParsable,
|
|
excluded,
|
|
manual,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbActivity>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbActivity.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbActivity>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbActivity>[];
|
|
try {
|
|
objList = list
|
|
.map((dbactivity) =>
|
|
DbActivity.fromMap(dbactivity as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbActivity.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbActivity>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbActivity> objList = <DbActivity>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbActivity.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('activities.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('activities.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbLaps') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLaps'))) {
|
|
/*_loadedfields!.add('activities.plDbLaps'); */ obj.plDbLaps =
|
|
obj.plDbLaps ??
|
|
await obj.getDbLaps()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('activities.plDbIntervals'); */ obj
|
|
.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('activities.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbActivity by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbActivity] if exist, otherwise returns null
|
|
Future<DbActivity?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbActivity? obj;
|
|
final data = await _mnDbActivity.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbActivity.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('activities.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('activities.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbLaps') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLaps'))) {
|
|
/*_loadedfields!.add('activities.plDbLaps'); */ obj.plDbLaps =
|
|
obj.plDbLaps ??
|
|
await obj.getDbLaps()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('activities.plDbIntervals'); */ obj
|
|
.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('activities.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbActivity) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbActivity.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbActivity.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbActivity) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbActivity.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbActivity.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbActivity. Returns a new Primary Key value of DbActivity
|
|
|
|
/// <returns>Returns a new Primary Key value of DbActivity
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbActivity> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbActivity> dbactivities,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbactivities) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbactivities.length; i++) {
|
|
if (dbactivities[i].id == null) {
|
|
dbactivities[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbActivity.rawInsert(
|
|
'INSERT OR REPLACE INTO activities (id, state, path, stravaId, name, movingTime, type, distance, serialNumber, timeCreated, sportName, sport, subSport, timeStamp, startTime, startPositionLat, startPositionLong, event, eventType, eventGroup, totalDistance, totalStrides, totalCalories, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, sdevSpeed, sdevPace, minSpeed, maxSpeed, totalAscent, totalDescent, maxRunningCadence, trigger, avgTemperature, maxTemperature, avgFractionalCadence, maxFractionalCadence, totalFractionalCycles, avgStanceTimePercent, avgStanceTime, avgHeartRate, maxHeartRate, avgRunningCadence, avgVerticalOscillation, totalElapsedTime, totalTimerTime, totalTrainingEffect, necLat, necLong, swcLat, swcLong, firstLapIndex, numLaps, numSessions, localTimestamp, avgPower, sdevPower, minPower, maxPower, minHeartRate, sdevHeartRate, avgGroundTime, sdevGroundTime, avgLegSpringStiffness, sdevLegSpringStiffness, avgFormPower, sdevFormPower, avgPowerRatio, sdevPowerRatio, avgStrideRatio, sdevStrideRatio, avgStrydCadence, sdevStrydCadence, sdevVerticalOscillation, cp, ftp, nonParsable, excluded, manual, athletesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
state,
|
|
path,
|
|
stravaId,
|
|
name,
|
|
movingTime,
|
|
type,
|
|
distance,
|
|
serialNumber,
|
|
timeCreated != null ? timeCreated!.millisecondsSinceEpoch : null,
|
|
sportName,
|
|
sport,
|
|
subSport,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
maxRunningCadence,
|
|
trigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalTrainingEffect,
|
|
necLat,
|
|
necLong,
|
|
swcLat,
|
|
swcLong,
|
|
firstLapIndex,
|
|
numLaps,
|
|
numSessions,
|
|
localTimestamp != null
|
|
? localTimestamp!.millisecondsSinceEpoch
|
|
: null,
|
|
avgPower,
|
|
sdevPower,
|
|
minPower,
|
|
maxPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
cp,
|
|
ftp,
|
|
nonParsable,
|
|
excluded,
|
|
manual,
|
|
athletesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbActivity id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbActivity id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbActivity Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbActivity>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbActivity> dbactivities,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbActivity.rawInsertAll(
|
|
'INSERT OR REPLACE INTO activities (id, state, path, stravaId, name, movingTime, type, distance, serialNumber, timeCreated, sportName, sport, subSport, timeStamp, startTime, startPositionLat, startPositionLong, event, eventType, eventGroup, totalDistance, totalStrides, totalCalories, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, sdevSpeed, sdevPace, minSpeed, maxSpeed, totalAscent, totalDescent, maxRunningCadence, trigger, avgTemperature, maxTemperature, avgFractionalCadence, maxFractionalCadence, totalFractionalCycles, avgStanceTimePercent, avgStanceTime, avgHeartRate, maxHeartRate, avgRunningCadence, avgVerticalOscillation, totalElapsedTime, totalTimerTime, totalTrainingEffect, necLat, necLong, swcLat, swcLong, firstLapIndex, numLaps, numSessions, localTimestamp, avgPower, sdevPower, minPower, maxPower, minHeartRate, sdevHeartRate, avgGroundTime, sdevGroundTime, avgLegSpringStiffness, sdevLegSpringStiffness, avgFormPower, sdevFormPower, avgPowerRatio, sdevPowerRatio, avgStrideRatio, sdevStrideRatio, avgStrydCadence, sdevStrydCadence, sdevVerticalOscillation, cp, ftp, nonParsable, excluded, manual, athletesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
dbactivities,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbActivity
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbActivity invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbEvent()
|
|
.select()
|
|
.activitiesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result =
|
|
await DbLap().select().activitiesId.equals(id).and.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbInterval()
|
|
.select()
|
|
.activitiesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbActivityTagging()
|
|
.select()
|
|
.activitiesId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbActivity
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbActivity.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbActivity] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbActivityFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbActivityFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
state = state ?? 'new';
|
|
athletesId = athletesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbactivity
|
|
|
|
// region DbActivityField
|
|
class DbActivityField extends FilterBase {
|
|
DbActivityField(DbActivityFilterBuilder dbactivityFB) : super(dbactivityFB);
|
|
|
|
@override
|
|
DbActivityFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder isNull() {
|
|
return super.isNull() as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbActivityFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityField get not {
|
|
return super.not as DbActivityField;
|
|
}
|
|
}
|
|
// endregion DbActivityField
|
|
|
|
// region DbActivityFilterBuilder
|
|
class DbActivityFilterBuilder extends ConjunctionBase {
|
|
DbActivityFilterBuilder(DbActivity obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbActivity = obj._mnDbActivity;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbActivityManager? _mnDbActivity;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbActivityFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbActivityFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbActivityFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbActivityFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbActivityFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbActivityFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbActivityFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbActivityFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbActivityFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbActivityFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbActivityFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbActivityField _setField(
|
|
DbActivityField? field, String colName, DbType dbtype) {
|
|
return DbActivityField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbActivityField? _id;
|
|
DbActivityField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _state;
|
|
DbActivityField get state {
|
|
return _state = _setField(_state, 'state', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _path;
|
|
DbActivityField get path {
|
|
return _path = _setField(_path, 'path', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _stravaId;
|
|
DbActivityField get stravaId {
|
|
return _stravaId = _setField(_stravaId, 'stravaId', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _name;
|
|
DbActivityField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _movingTime;
|
|
DbActivityField get movingTime {
|
|
return _movingTime = _setField(_movingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _type;
|
|
DbActivityField get type {
|
|
return _type = _setField(_type, 'type', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _distance;
|
|
DbActivityField get distance {
|
|
return _distance = _setField(_distance, 'distance', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _serialNumber;
|
|
DbActivityField get serialNumber {
|
|
return _serialNumber =
|
|
_setField(_serialNumber, 'serialNumber', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _timeCreated;
|
|
DbActivityField get timeCreated {
|
|
return _timeCreated =
|
|
_setField(_timeCreated, 'timeCreated', DbType.datetime);
|
|
}
|
|
|
|
DbActivityField? _sportName;
|
|
DbActivityField get sportName {
|
|
return _sportName = _setField(_sportName, 'sportName', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _sport;
|
|
DbActivityField get sport {
|
|
return _sport = _setField(_sport, 'sport', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _subSport;
|
|
DbActivityField get subSport {
|
|
return _subSport = _setField(_subSport, 'subSport', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _timeStamp;
|
|
DbActivityField get timeStamp {
|
|
return _timeStamp = _setField(_timeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
DbActivityField? _startTime;
|
|
DbActivityField get startTime {
|
|
return _startTime = _setField(_startTime, 'startTime', DbType.datetime);
|
|
}
|
|
|
|
DbActivityField? _startPositionLat;
|
|
DbActivityField get startPositionLat {
|
|
return _startPositionLat =
|
|
_setField(_startPositionLat, 'startPositionLat', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _startPositionLong;
|
|
DbActivityField get startPositionLong {
|
|
return _startPositionLong =
|
|
_setField(_startPositionLong, 'startPositionLong', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _event;
|
|
DbActivityField get event {
|
|
return _event = _setField(_event, 'event', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _eventType;
|
|
DbActivityField get eventType {
|
|
return _eventType = _setField(_eventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _eventGroup;
|
|
DbActivityField get eventGroup {
|
|
return _eventGroup = _setField(_eventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalDistance;
|
|
DbActivityField get totalDistance {
|
|
return _totalDistance =
|
|
_setField(_totalDistance, 'totalDistance', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalStrides;
|
|
DbActivityField get totalStrides {
|
|
return _totalStrides =
|
|
_setField(_totalStrides, 'totalStrides', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalCalories;
|
|
DbActivityField get totalCalories {
|
|
return _totalCalories =
|
|
_setField(_totalCalories, 'totalCalories', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _avgSpeed;
|
|
DbActivityField get avgSpeed {
|
|
return _avgSpeed = _setField(_avgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgSpeedByMeasurements;
|
|
DbActivityField get avgSpeedByMeasurements {
|
|
return _avgSpeedByMeasurements = _setField(
|
|
_avgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgSpeedBySpeed;
|
|
DbActivityField get avgSpeedBySpeed {
|
|
return _avgSpeedBySpeed =
|
|
_setField(_avgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgSpeedByDistance;
|
|
DbActivityField get avgSpeedByDistance {
|
|
return _avgSpeedByDistance =
|
|
_setField(_avgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevSpeed;
|
|
DbActivityField get sdevSpeed {
|
|
return _sdevSpeed = _setField(_sdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevPace;
|
|
DbActivityField get sdevPace {
|
|
return _sdevPace = _setField(_sdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _minSpeed;
|
|
DbActivityField get minSpeed {
|
|
return _minSpeed = _setField(_minSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _maxSpeed;
|
|
DbActivityField get maxSpeed {
|
|
return _maxSpeed = _setField(_maxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _totalAscent;
|
|
DbActivityField get totalAscent {
|
|
return _totalAscent =
|
|
_setField(_totalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalDescent;
|
|
DbActivityField get totalDescent {
|
|
return _totalDescent =
|
|
_setField(_totalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _maxRunningCadence;
|
|
DbActivityField get maxRunningCadence {
|
|
return _maxRunningCadence =
|
|
_setField(_maxRunningCadence, 'maxRunningCadence', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _trigger;
|
|
DbActivityField get trigger {
|
|
return _trigger = _setField(_trigger, 'trigger', DbType.text);
|
|
}
|
|
|
|
DbActivityField? _avgTemperature;
|
|
DbActivityField get avgTemperature {
|
|
return _avgTemperature =
|
|
_setField(_avgTemperature, 'avgTemperature', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _maxTemperature;
|
|
DbActivityField get maxTemperature {
|
|
return _maxTemperature =
|
|
_setField(_maxTemperature, 'maxTemperature', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _avgFractionalCadence;
|
|
DbActivityField get avgFractionalCadence {
|
|
return _avgFractionalCadence =
|
|
_setField(_avgFractionalCadence, 'avgFractionalCadence', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _maxFractionalCadence;
|
|
DbActivityField get maxFractionalCadence {
|
|
return _maxFractionalCadence =
|
|
_setField(_maxFractionalCadence, 'maxFractionalCadence', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _totalFractionalCycles;
|
|
DbActivityField get totalFractionalCycles {
|
|
return _totalFractionalCycles =
|
|
_setField(_totalFractionalCycles, 'totalFractionalCycles', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgStanceTimePercent;
|
|
DbActivityField get avgStanceTimePercent {
|
|
return _avgStanceTimePercent =
|
|
_setField(_avgStanceTimePercent, 'avgStanceTimePercent', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgStanceTime;
|
|
DbActivityField get avgStanceTime {
|
|
return _avgStanceTime =
|
|
_setField(_avgStanceTime, 'avgStanceTime', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgHeartRate;
|
|
DbActivityField get avgHeartRate {
|
|
return _avgHeartRate =
|
|
_setField(_avgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _maxHeartRate;
|
|
DbActivityField get maxHeartRate {
|
|
return _maxHeartRate =
|
|
_setField(_maxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _avgRunningCadence;
|
|
DbActivityField get avgRunningCadence {
|
|
return _avgRunningCadence =
|
|
_setField(_avgRunningCadence, 'avgRunningCadence', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgVerticalOscillation;
|
|
DbActivityField get avgVerticalOscillation {
|
|
return _avgVerticalOscillation = _setField(
|
|
_avgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _totalElapsedTime;
|
|
DbActivityField get totalElapsedTime {
|
|
return _totalElapsedTime =
|
|
_setField(_totalElapsedTime, 'totalElapsedTime', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalTimerTime;
|
|
DbActivityField get totalTimerTime {
|
|
return _totalTimerTime =
|
|
_setField(_totalTimerTime, 'totalTimerTime', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _totalTrainingEffect;
|
|
DbActivityField get totalTrainingEffect {
|
|
return _totalTrainingEffect =
|
|
_setField(_totalTrainingEffect, 'totalTrainingEffect', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _necLat;
|
|
DbActivityField get necLat {
|
|
return _necLat = _setField(_necLat, 'necLat', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _necLong;
|
|
DbActivityField get necLong {
|
|
return _necLong = _setField(_necLong, 'necLong', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _swcLat;
|
|
DbActivityField get swcLat {
|
|
return _swcLat = _setField(_swcLat, 'swcLat', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _swcLong;
|
|
DbActivityField get swcLong {
|
|
return _swcLong = _setField(_swcLong, 'swcLong', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _firstLapIndex;
|
|
DbActivityField get firstLapIndex {
|
|
return _firstLapIndex =
|
|
_setField(_firstLapIndex, 'firstLapIndex', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _numLaps;
|
|
DbActivityField get numLaps {
|
|
return _numLaps = _setField(_numLaps, 'numLaps', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _numSessions;
|
|
DbActivityField get numSessions {
|
|
return _numSessions =
|
|
_setField(_numSessions, 'numSessions', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _localTimestamp;
|
|
DbActivityField get localTimestamp {
|
|
return _localTimestamp =
|
|
_setField(_localTimestamp, 'localTimestamp', DbType.datetime);
|
|
}
|
|
|
|
DbActivityField? _avgPower;
|
|
DbActivityField get avgPower {
|
|
return _avgPower = _setField(_avgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevPower;
|
|
DbActivityField get sdevPower {
|
|
return _sdevPower = _setField(_sdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _minPower;
|
|
DbActivityField get minPower {
|
|
return _minPower = _setField(_minPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _maxPower;
|
|
DbActivityField get maxPower {
|
|
return _maxPower = _setField(_maxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _minHeartRate;
|
|
DbActivityField get minHeartRate {
|
|
return _minHeartRate =
|
|
_setField(_minHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbActivityField? _sdevHeartRate;
|
|
DbActivityField get sdevHeartRate {
|
|
return _sdevHeartRate =
|
|
_setField(_sdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgGroundTime;
|
|
DbActivityField get avgGroundTime {
|
|
return _avgGroundTime =
|
|
_setField(_avgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevGroundTime;
|
|
DbActivityField get sdevGroundTime {
|
|
return _sdevGroundTime =
|
|
_setField(_sdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgLegSpringStiffness;
|
|
DbActivityField get avgLegSpringStiffness {
|
|
return _avgLegSpringStiffness =
|
|
_setField(_avgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevLegSpringStiffness;
|
|
DbActivityField get sdevLegSpringStiffness {
|
|
return _sdevLegSpringStiffness = _setField(
|
|
_sdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgFormPower;
|
|
DbActivityField get avgFormPower {
|
|
return _avgFormPower =
|
|
_setField(_avgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevFormPower;
|
|
DbActivityField get sdevFormPower {
|
|
return _sdevFormPower =
|
|
_setField(_sdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgPowerRatio;
|
|
DbActivityField get avgPowerRatio {
|
|
return _avgPowerRatio =
|
|
_setField(_avgPowerRatio, 'avgPowerRatio', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevPowerRatio;
|
|
DbActivityField get sdevPowerRatio {
|
|
return _sdevPowerRatio =
|
|
_setField(_sdevPowerRatio, 'sdevPowerRatio', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgStrideRatio;
|
|
DbActivityField get avgStrideRatio {
|
|
return _avgStrideRatio =
|
|
_setField(_avgStrideRatio, 'avgStrideRatio', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevStrideRatio;
|
|
DbActivityField get sdevStrideRatio {
|
|
return _sdevStrideRatio =
|
|
_setField(_sdevStrideRatio, 'sdevStrideRatio', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _avgStrydCadence;
|
|
DbActivityField get avgStrydCadence {
|
|
return _avgStrydCadence =
|
|
_setField(_avgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevStrydCadence;
|
|
DbActivityField get sdevStrydCadence {
|
|
return _sdevStrydCadence =
|
|
_setField(_sdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _sdevVerticalOscillation;
|
|
DbActivityField get sdevVerticalOscillation {
|
|
return _sdevVerticalOscillation = _setField(
|
|
_sdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _cp;
|
|
DbActivityField get cp {
|
|
return _cp = _setField(_cp, 'cp', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _ftp;
|
|
DbActivityField get ftp {
|
|
return _ftp = _setField(_ftp, 'ftp', DbType.real);
|
|
}
|
|
|
|
DbActivityField? _nonParsable;
|
|
DbActivityField get nonParsable {
|
|
return _nonParsable = _setField(_nonParsable, 'nonParsable', DbType.bool);
|
|
}
|
|
|
|
DbActivityField? _excluded;
|
|
DbActivityField get excluded {
|
|
return _excluded = _setField(_excluded, 'excluded', DbType.bool);
|
|
}
|
|
|
|
DbActivityField? _manual;
|
|
DbActivityField get manual {
|
|
return _manual = _setField(_manual, 'manual', DbType.bool);
|
|
}
|
|
|
|
DbActivityField? _athletesId;
|
|
DbActivityField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbActivity> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbEvent) according to DeleteRule.CASCADE
|
|
final idListDbEventBYactivitiesId = toListPrimaryKeySQL(false);
|
|
final resDbEventBYactivitiesId = await DbEvent()
|
|
.select()
|
|
.where('activitiesId IN (${idListDbEventBYactivitiesId['sql']})',
|
|
parameterValue: idListDbEventBYactivitiesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbEventBYactivitiesId.success) {
|
|
return resDbEventBYactivitiesId;
|
|
}
|
|
// Delete sub records where in (DbLap) according to DeleteRule.CASCADE
|
|
final idListDbLapBYactivitiesId = toListPrimaryKeySQL(false);
|
|
final resDbLapBYactivitiesId = await DbLap()
|
|
.select()
|
|
.where('activitiesId IN (${idListDbLapBYactivitiesId['sql']})',
|
|
parameterValue: idListDbLapBYactivitiesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbLapBYactivitiesId.success) {
|
|
return resDbLapBYactivitiesId;
|
|
}
|
|
// Delete sub records where in (DbInterval) according to DeleteRule.CASCADE
|
|
final idListDbIntervalBYactivitiesId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalBYactivitiesId = await DbInterval()
|
|
.select()
|
|
.where('activitiesId IN (${idListDbIntervalBYactivitiesId['sql']})',
|
|
parameterValue: idListDbIntervalBYactivitiesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalBYactivitiesId.success) {
|
|
return resDbIntervalBYactivitiesId;
|
|
}
|
|
// Delete sub records where in (DbActivityTagging) according to DeleteRule.CASCADE
|
|
final idListDbActivityTaggingBYactivitiesId = toListPrimaryKeySQL(false);
|
|
final resDbActivityTaggingBYactivitiesId = await DbActivityTagging()
|
|
.select()
|
|
.where(
|
|
'activitiesId IN (${idListDbActivityTaggingBYactivitiesId['sql']})',
|
|
parameterValue: idListDbActivityTaggingBYactivitiesId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbActivityTaggingBYactivitiesId.success) {
|
|
return resDbActivityTaggingBYactivitiesId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbActivity!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbActivity!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from activities ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbActivity!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbActivity] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbActivity?
|
|
@override
|
|
Future<DbActivity?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbActivity!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbActivity? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbActivity.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('activities.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('activities.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbLaps') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLaps'))) {
|
|
/*_loadedfields!.add('activities.plDbLaps'); */ obj.plDbLaps =
|
|
obj.plDbLaps ??
|
|
await obj.getDbLaps()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('activities.plDbIntervals'); */ obj
|
|
.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('activities.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('activities.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbActivity]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbActivity?
|
|
@override
|
|
Future<DbActivity> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbActivity();
|
|
}
|
|
|
|
/// This method returns int. [DbActivity]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbactivityCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbactivitiesFuture = await _mnDbActivity!.toList(qparams);
|
|
final int count = dbactivitiesFuture[0]['CNT'] as int;
|
|
if (dbactivityCount != null) {
|
|
dbactivityCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbActivity> [DbActivity]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbActivity>
|
|
@override
|
|
Future<List<DbActivity>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbActivity> dbactivitiesData = await DbActivity.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbactivitiesData;
|
|
}
|
|
|
|
/// This method returns Json String [DbActivity]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbActivity]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbActivity]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbActivity!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbActivity]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM activities WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbActivity!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbActivity]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbActivity!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbActivity.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbActivity!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbActivityFilterBuilder
|
|
|
|
// region DbActivityFields
|
|
class DbActivityFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fState;
|
|
static TableField get state {
|
|
return _fState =
|
|
_fState ?? SqlSyntax.setField(_fState, 'state', DbType.text);
|
|
}
|
|
|
|
static TableField? _fPath;
|
|
static TableField get path {
|
|
return _fPath = _fPath ?? SqlSyntax.setField(_fPath, 'path', DbType.text);
|
|
}
|
|
|
|
static TableField? _fStravaId;
|
|
static TableField get stravaId {
|
|
return _fStravaId = _fStravaId ??
|
|
SqlSyntax.setField(_fStravaId, 'stravaId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fMovingTime;
|
|
static TableField get movingTime {
|
|
return _fMovingTime = _fMovingTime ??
|
|
SqlSyntax.setField(_fMovingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fType;
|
|
static TableField get type {
|
|
return _fType = _fType ?? SqlSyntax.setField(_fType, 'type', DbType.text);
|
|
}
|
|
|
|
static TableField? _fDistance;
|
|
static TableField get distance {
|
|
return _fDistance = _fDistance ??
|
|
SqlSyntax.setField(_fDistance, 'distance', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSerialNumber;
|
|
static TableField get serialNumber {
|
|
return _fSerialNumber = _fSerialNumber ??
|
|
SqlSyntax.setField(_fSerialNumber, 'serialNumber', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTimeCreated;
|
|
static TableField get timeCreated {
|
|
return _fTimeCreated = _fTimeCreated ??
|
|
SqlSyntax.setField(_fTimeCreated, 'timeCreated', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fSportName;
|
|
static TableField get sportName {
|
|
return _fSportName = _fSportName ??
|
|
SqlSyntax.setField(_fSportName, 'sportName', DbType.text);
|
|
}
|
|
|
|
static TableField? _fSport;
|
|
static TableField get sport {
|
|
return _fSport =
|
|
_fSport ?? SqlSyntax.setField(_fSport, 'sport', DbType.text);
|
|
}
|
|
|
|
static TableField? _fSubSport;
|
|
static TableField get subSport {
|
|
return _fSubSport =
|
|
_fSubSport ?? SqlSyntax.setField(_fSubSport, 'subSport', DbType.text);
|
|
}
|
|
|
|
static TableField? _fTimeStamp;
|
|
static TableField get timeStamp {
|
|
return _fTimeStamp = _fTimeStamp ??
|
|
SqlSyntax.setField(_fTimeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fStartTime;
|
|
static TableField get startTime {
|
|
return _fStartTime = _fStartTime ??
|
|
SqlSyntax.setField(_fStartTime, 'startTime', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fStartPositionLat;
|
|
static TableField get startPositionLat {
|
|
return _fStartPositionLat = _fStartPositionLat ??
|
|
SqlSyntax.setField(_fStartPositionLat, 'startPositionLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fStartPositionLong;
|
|
static TableField get startPositionLong {
|
|
return _fStartPositionLong = _fStartPositionLong ??
|
|
SqlSyntax.setField(
|
|
_fStartPositionLong, 'startPositionLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fEvent;
|
|
static TableField get event {
|
|
return _fEvent =
|
|
_fEvent ?? SqlSyntax.setField(_fEvent, 'event', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventType;
|
|
static TableField get eventType {
|
|
return _fEventType = _fEventType ??
|
|
SqlSyntax.setField(_fEventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventGroup;
|
|
static TableField get eventGroup {
|
|
return _fEventGroup = _fEventGroup ??
|
|
SqlSyntax.setField(_fEventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalDistance;
|
|
static TableField get totalDistance {
|
|
return _fTotalDistance = _fTotalDistance ??
|
|
SqlSyntax.setField(_fTotalDistance, 'totalDistance', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalStrides;
|
|
static TableField get totalStrides {
|
|
return _fTotalStrides = _fTotalStrides ??
|
|
SqlSyntax.setField(_fTotalStrides, 'totalStrides', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalCalories;
|
|
static TableField get totalCalories {
|
|
return _fTotalCalories = _fTotalCalories ??
|
|
SqlSyntax.setField(_fTotalCalories, 'totalCalories', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgSpeed;
|
|
static TableField get avgSpeed {
|
|
return _fAvgSpeed =
|
|
_fAvgSpeed ?? SqlSyntax.setField(_fAvgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByMeasurements;
|
|
static TableField get avgSpeedByMeasurements {
|
|
return _fAvgSpeedByMeasurements = _fAvgSpeedByMeasurements ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedBySpeed;
|
|
static TableField get avgSpeedBySpeed {
|
|
return _fAvgSpeedBySpeed = _fAvgSpeedBySpeed ??
|
|
SqlSyntax.setField(_fAvgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByDistance;
|
|
static TableField get avgSpeedByDistance {
|
|
return _fAvgSpeedByDistance = _fAvgSpeedByDistance ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevSpeed;
|
|
static TableField get sdevSpeed {
|
|
return _fSdevSpeed = _fSdevSpeed ??
|
|
SqlSyntax.setField(_fSdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPace;
|
|
static TableField get sdevPace {
|
|
return _fSdevPace =
|
|
_fSdevPace ?? SqlSyntax.setField(_fSdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinSpeed;
|
|
static TableField get minSpeed {
|
|
return _fMinSpeed =
|
|
_fMinSpeed ?? SqlSyntax.setField(_fMinSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxSpeed;
|
|
static TableField get maxSpeed {
|
|
return _fMaxSpeed =
|
|
_fMaxSpeed ?? SqlSyntax.setField(_fMaxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalAscent;
|
|
static TableField get totalAscent {
|
|
return _fTotalAscent = _fTotalAscent ??
|
|
SqlSyntax.setField(_fTotalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalDescent;
|
|
static TableField get totalDescent {
|
|
return _fTotalDescent = _fTotalDescent ??
|
|
SqlSyntax.setField(_fTotalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxRunningCadence;
|
|
static TableField get maxRunningCadence {
|
|
return _fMaxRunningCadence = _fMaxRunningCadence ??
|
|
SqlSyntax.setField(
|
|
_fMaxRunningCadence, 'maxRunningCadence', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTrigger;
|
|
static TableField get trigger {
|
|
return _fTrigger =
|
|
_fTrigger ?? SqlSyntax.setField(_fTrigger, 'trigger', DbType.text);
|
|
}
|
|
|
|
static TableField? _fAvgTemperature;
|
|
static TableField get avgTemperature {
|
|
return _fAvgTemperature = _fAvgTemperature ??
|
|
SqlSyntax.setField(_fAvgTemperature, 'avgTemperature', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxTemperature;
|
|
static TableField get maxTemperature {
|
|
return _fMaxTemperature = _fMaxTemperature ??
|
|
SqlSyntax.setField(_fMaxTemperature, 'maxTemperature', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgFractionalCadence;
|
|
static TableField get avgFractionalCadence {
|
|
return _fAvgFractionalCadence = _fAvgFractionalCadence ??
|
|
SqlSyntax.setField(
|
|
_fAvgFractionalCadence, 'avgFractionalCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxFractionalCadence;
|
|
static TableField get maxFractionalCadence {
|
|
return _fMaxFractionalCadence = _fMaxFractionalCadence ??
|
|
SqlSyntax.setField(
|
|
_fMaxFractionalCadence, 'maxFractionalCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalFractionalCycles;
|
|
static TableField get totalFractionalCycles {
|
|
return _fTotalFractionalCycles = _fTotalFractionalCycles ??
|
|
SqlSyntax.setField(
|
|
_fTotalFractionalCycles, 'totalFractionalCycles', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStanceTimePercent;
|
|
static TableField get avgStanceTimePercent {
|
|
return _fAvgStanceTimePercent = _fAvgStanceTimePercent ??
|
|
SqlSyntax.setField(
|
|
_fAvgStanceTimePercent, 'avgStanceTimePercent', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStanceTime;
|
|
static TableField get avgStanceTime {
|
|
return _fAvgStanceTime = _fAvgStanceTime ??
|
|
SqlSyntax.setField(_fAvgStanceTime, 'avgStanceTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgHeartRate;
|
|
static TableField get avgHeartRate {
|
|
return _fAvgHeartRate = _fAvgHeartRate ??
|
|
SqlSyntax.setField(_fAvgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxHeartRate;
|
|
static TableField get maxHeartRate {
|
|
return _fMaxHeartRate = _fMaxHeartRate ??
|
|
SqlSyntax.setField(_fMaxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgRunningCadence;
|
|
static TableField get avgRunningCadence {
|
|
return _fAvgRunningCadence = _fAvgRunningCadence ??
|
|
SqlSyntax.setField(
|
|
_fAvgRunningCadence, 'avgRunningCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgVerticalOscillation;
|
|
static TableField get avgVerticalOscillation {
|
|
return _fAvgVerticalOscillation = _fAvgVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fAvgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalElapsedTime;
|
|
static TableField get totalElapsedTime {
|
|
return _fTotalElapsedTime = _fTotalElapsedTime ??
|
|
SqlSyntax.setField(
|
|
_fTotalElapsedTime, 'totalElapsedTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalTimerTime;
|
|
static TableField get totalTimerTime {
|
|
return _fTotalTimerTime = _fTotalTimerTime ??
|
|
SqlSyntax.setField(_fTotalTimerTime, 'totalTimerTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalTrainingEffect;
|
|
static TableField get totalTrainingEffect {
|
|
return _fTotalTrainingEffect = _fTotalTrainingEffect ??
|
|
SqlSyntax.setField(
|
|
_fTotalTrainingEffect, 'totalTrainingEffect', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fNecLat;
|
|
static TableField get necLat {
|
|
return _fNecLat =
|
|
_fNecLat ?? SqlSyntax.setField(_fNecLat, 'necLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fNecLong;
|
|
static TableField get necLong {
|
|
return _fNecLong =
|
|
_fNecLong ?? SqlSyntax.setField(_fNecLong, 'necLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSwcLat;
|
|
static TableField get swcLat {
|
|
return _fSwcLat =
|
|
_fSwcLat ?? SqlSyntax.setField(_fSwcLat, 'swcLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSwcLong;
|
|
static TableField get swcLong {
|
|
return _fSwcLong =
|
|
_fSwcLong ?? SqlSyntax.setField(_fSwcLong, 'swcLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFirstLapIndex;
|
|
static TableField get firstLapIndex {
|
|
return _fFirstLapIndex = _fFirstLapIndex ??
|
|
SqlSyntax.setField(_fFirstLapIndex, 'firstLapIndex', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fNumLaps;
|
|
static TableField get numLaps {
|
|
return _fNumLaps =
|
|
_fNumLaps ?? SqlSyntax.setField(_fNumLaps, 'numLaps', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fNumSessions;
|
|
static TableField get numSessions {
|
|
return _fNumSessions = _fNumSessions ??
|
|
SqlSyntax.setField(_fNumSessions, 'numSessions', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLocalTimestamp;
|
|
static TableField get localTimestamp {
|
|
return _fLocalTimestamp = _fLocalTimestamp ??
|
|
SqlSyntax.setField(_fLocalTimestamp, 'localTimestamp', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fAvgPower;
|
|
static TableField get avgPower {
|
|
return _fAvgPower =
|
|
_fAvgPower ?? SqlSyntax.setField(_fAvgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPower;
|
|
static TableField get sdevPower {
|
|
return _fSdevPower = _fSdevPower ??
|
|
SqlSyntax.setField(_fSdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinPower;
|
|
static TableField get minPower {
|
|
return _fMinPower = _fMinPower ??
|
|
SqlSyntax.setField(_fMinPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxPower;
|
|
static TableField get maxPower {
|
|
return _fMaxPower = _fMaxPower ??
|
|
SqlSyntax.setField(_fMaxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMinHeartRate;
|
|
static TableField get minHeartRate {
|
|
return _fMinHeartRate = _fMinHeartRate ??
|
|
SqlSyntax.setField(_fMinHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevHeartRate;
|
|
static TableField get sdevHeartRate {
|
|
return _fSdevHeartRate = _fSdevHeartRate ??
|
|
SqlSyntax.setField(_fSdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgGroundTime;
|
|
static TableField get avgGroundTime {
|
|
return _fAvgGroundTime = _fAvgGroundTime ??
|
|
SqlSyntax.setField(_fAvgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevGroundTime;
|
|
static TableField get sdevGroundTime {
|
|
return _fSdevGroundTime = _fSdevGroundTime ??
|
|
SqlSyntax.setField(_fSdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgLegSpringStiffness;
|
|
static TableField get avgLegSpringStiffness {
|
|
return _fAvgLegSpringStiffness = _fAvgLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fAvgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevLegSpringStiffness;
|
|
static TableField get sdevLegSpringStiffness {
|
|
return _fSdevLegSpringStiffness = _fSdevLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fSdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgFormPower;
|
|
static TableField get avgFormPower {
|
|
return _fAvgFormPower = _fAvgFormPower ??
|
|
SqlSyntax.setField(_fAvgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevFormPower;
|
|
static TableField get sdevFormPower {
|
|
return _fSdevFormPower = _fSdevFormPower ??
|
|
SqlSyntax.setField(_fSdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgPowerRatio;
|
|
static TableField get avgPowerRatio {
|
|
return _fAvgPowerRatio = _fAvgPowerRatio ??
|
|
SqlSyntax.setField(_fAvgPowerRatio, 'avgPowerRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPowerRatio;
|
|
static TableField get sdevPowerRatio {
|
|
return _fSdevPowerRatio = _fSdevPowerRatio ??
|
|
SqlSyntax.setField(_fSdevPowerRatio, 'sdevPowerRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStrideRatio;
|
|
static TableField get avgStrideRatio {
|
|
return _fAvgStrideRatio = _fAvgStrideRatio ??
|
|
SqlSyntax.setField(_fAvgStrideRatio, 'avgStrideRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevStrideRatio;
|
|
static TableField get sdevStrideRatio {
|
|
return _fSdevStrideRatio = _fSdevStrideRatio ??
|
|
SqlSyntax.setField(_fSdevStrideRatio, 'sdevStrideRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStrydCadence;
|
|
static TableField get avgStrydCadence {
|
|
return _fAvgStrydCadence = _fAvgStrydCadence ??
|
|
SqlSyntax.setField(_fAvgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevStrydCadence;
|
|
static TableField get sdevStrydCadence {
|
|
return _fSdevStrydCadence = _fSdevStrydCadence ??
|
|
SqlSyntax.setField(_fSdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevVerticalOscillation;
|
|
static TableField get sdevVerticalOscillation {
|
|
return _fSdevVerticalOscillation = _fSdevVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fSdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fCp;
|
|
static TableField get cp {
|
|
return _fCp = _fCp ?? SqlSyntax.setField(_fCp, 'cp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFtp;
|
|
static TableField get ftp {
|
|
return _fFtp = _fFtp ?? SqlSyntax.setField(_fFtp, 'ftp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fNonParsable;
|
|
static TableField get nonParsable {
|
|
return _fNonParsable = _fNonParsable ??
|
|
SqlSyntax.setField(_fNonParsable, 'nonParsable', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fExcluded;
|
|
static TableField get excluded {
|
|
return _fExcluded =
|
|
_fExcluded ?? SqlSyntax.setField(_fExcluded, 'excluded', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fManual;
|
|
static TableField get manual {
|
|
return _fManual =
|
|
_fManual ?? SqlSyntax.setField(_fManual, 'manual', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbActivityFields
|
|
|
|
//region DbActivityManager
|
|
class DbActivityManager extends SqfEntityProvider {
|
|
DbActivityManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'activities';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbActivityManager
|
|
// region DbEvent
|
|
class DbEvent extends TableBase {
|
|
DbEvent(
|
|
{this.id,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.timerTrigger,
|
|
this.timeStamp,
|
|
this.positionLat,
|
|
this.positionLong,
|
|
this.distance,
|
|
this.altitude,
|
|
this.speed,
|
|
this.heartRate,
|
|
this.cadence,
|
|
this.fractionalCadence,
|
|
this.power,
|
|
this.strydCadence,
|
|
this.groundTime,
|
|
this.verticalOscillation,
|
|
this.formPower,
|
|
this.legSpringStiffness,
|
|
this.data,
|
|
this.activitiesId,
|
|
this.lapsId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbEvent.withFields(
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.timerTrigger,
|
|
this.timeStamp,
|
|
this.positionLat,
|
|
this.positionLong,
|
|
this.distance,
|
|
this.altitude,
|
|
this.speed,
|
|
this.heartRate,
|
|
this.cadence,
|
|
this.fractionalCadence,
|
|
this.power,
|
|
this.strydCadence,
|
|
this.groundTime,
|
|
this.verticalOscillation,
|
|
this.formPower,
|
|
this.legSpringStiffness,
|
|
this.data,
|
|
this.activitiesId,
|
|
this.lapsId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbEvent.withId(
|
|
this.id,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.timerTrigger,
|
|
this.timeStamp,
|
|
this.positionLat,
|
|
this.positionLong,
|
|
this.distance,
|
|
this.altitude,
|
|
this.speed,
|
|
this.heartRate,
|
|
this.cadence,
|
|
this.fractionalCadence,
|
|
this.power,
|
|
this.strydCadence,
|
|
this.groundTime,
|
|
this.verticalOscillation,
|
|
this.formPower,
|
|
this.legSpringStiffness,
|
|
this.data,
|
|
this.activitiesId,
|
|
this.lapsId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbEvent.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['event'] != null) {
|
|
event = o['event'].toString();
|
|
}
|
|
if (o['eventType'] != null) {
|
|
eventType = o['eventType'].toString();
|
|
}
|
|
if (o['eventGroup'] != null) {
|
|
eventGroup = int.tryParse(o['eventGroup'].toString());
|
|
}
|
|
if (o['timerTrigger'] != null) {
|
|
timerTrigger = o['timerTrigger'].toString();
|
|
}
|
|
if (o['timeStamp'] != null) {
|
|
timeStamp = int.tryParse(o['timeStamp'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['timeStamp'].toString())!)
|
|
: DateTime.tryParse(o['timeStamp'].toString());
|
|
}
|
|
if (o['positionLat'] != null) {
|
|
positionLat = double.tryParse(o['positionLat'].toString());
|
|
}
|
|
if (o['positionLong'] != null) {
|
|
positionLong = double.tryParse(o['positionLong'].toString());
|
|
}
|
|
if (o['distance'] != null) {
|
|
distance = double.tryParse(o['distance'].toString());
|
|
}
|
|
if (o['altitude'] != null) {
|
|
altitude = double.tryParse(o['altitude'].toString());
|
|
}
|
|
if (o['speed'] != null) {
|
|
speed = double.tryParse(o['speed'].toString());
|
|
}
|
|
if (o['heartRate'] != null) {
|
|
heartRate = int.tryParse(o['heartRate'].toString());
|
|
}
|
|
if (o['cadence'] != null) {
|
|
cadence = double.tryParse(o['cadence'].toString());
|
|
}
|
|
if (o['fractionalCadence'] != null) {
|
|
fractionalCadence = double.tryParse(o['fractionalCadence'].toString());
|
|
}
|
|
if (o['power'] != null) {
|
|
power = int.tryParse(o['power'].toString());
|
|
}
|
|
if (o['strydCadence'] != null) {
|
|
strydCadence = double.tryParse(o['strydCadence'].toString());
|
|
}
|
|
if (o['groundTime'] != null) {
|
|
groundTime = double.tryParse(o['groundTime'].toString());
|
|
}
|
|
if (o['verticalOscillation'] != null) {
|
|
verticalOscillation =
|
|
double.tryParse(o['verticalOscillation'].toString());
|
|
}
|
|
if (o['formPower'] != null) {
|
|
formPower = int.tryParse(o['formPower'].toString());
|
|
}
|
|
if (o['legSpringStiffness'] != null) {
|
|
legSpringStiffness = double.tryParse(o['legSpringStiffness'].toString());
|
|
}
|
|
if (o['data'] != null) {
|
|
data = double.tryParse(o['data'].toString());
|
|
}
|
|
activitiesId = int.tryParse(o['activitiesId'].toString());
|
|
|
|
lapsId = int.tryParse(o['lapsId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbActivity = o['dbActivity'] != null
|
|
? DbActivity.fromMap(o['dbActivity'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbLap = o['dbLap'] != null
|
|
? DbLap.fromMap(o['dbLap'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbEvent)
|
|
int? id;
|
|
String? event;
|
|
String? eventType;
|
|
int? eventGroup;
|
|
String? timerTrigger;
|
|
DateTime? timeStamp;
|
|
double? positionLat;
|
|
double? positionLong;
|
|
double? distance;
|
|
double? altitude;
|
|
double? speed;
|
|
int? heartRate;
|
|
double? cadence;
|
|
double? fractionalCadence;
|
|
int? power;
|
|
double? strydCadence;
|
|
double? groundTime;
|
|
double? verticalOscillation;
|
|
int? formPower;
|
|
double? legSpringStiffness;
|
|
double? data;
|
|
int? activitiesId;
|
|
int? lapsId;
|
|
|
|
// end FIELDS (DbEvent)
|
|
|
|
// RELATIONSHIPS (DbEvent)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivity', 'plField2'..]) or so on..
|
|
DbActivity? plDbActivity;
|
|
|
|
/// get DbActivity By ActivitiesId
|
|
Future<DbActivity?> getDbActivity(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbActivity().getById(activitiesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbLap', 'plField2'..]) or so on..
|
|
DbLap? plDbLap;
|
|
|
|
/// get DbLap By LapsId
|
|
Future<DbLap?> getDbLap(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbLap()
|
|
.getById(lapsId, loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbEvent)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbEvent)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervals', 'plField2'..]) or so on..
|
|
List<DbInterval>? plDbIntervals;
|
|
|
|
/// get DbInterval(s) filtered by id=firstRecordId
|
|
DbIntervalFilterBuilder? getDbIntervals(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbInterval()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.firstRecordId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervalsBylastRecordId', 'plField2'..]) or so on..
|
|
List<DbInterval>? plDbIntervalsBylastRecordId;
|
|
|
|
/// get DbInterval(s) filtered by id=lastRecordId
|
|
DbIntervalFilterBuilder? getDbIntervalsBylastRecordId(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbInterval()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.lastRecordId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbEvent)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbEventManager? __mnDbEvent;
|
|
|
|
DbEventManager get _mnDbEvent {
|
|
return __mnDbEvent = __mnDbEvent ?? DbEventManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (timerTrigger != null || !forView) {
|
|
map['timerTrigger'] = timerTrigger;
|
|
}
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (positionLat != null || !forView) {
|
|
map['positionLat'] = positionLat;
|
|
}
|
|
if (positionLong != null || !forView) {
|
|
map['positionLong'] = positionLong;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (altitude != null || !forView) {
|
|
map['altitude'] = altitude;
|
|
}
|
|
if (speed != null || !forView) {
|
|
map['speed'] = speed;
|
|
}
|
|
if (heartRate != null || !forView) {
|
|
map['heartRate'] = heartRate;
|
|
}
|
|
if (cadence != null || !forView) {
|
|
map['cadence'] = cadence;
|
|
}
|
|
if (fractionalCadence != null || !forView) {
|
|
map['fractionalCadence'] = fractionalCadence;
|
|
}
|
|
if (power != null || !forView) {
|
|
map['power'] = power;
|
|
}
|
|
if (strydCadence != null || !forView) {
|
|
map['strydCadence'] = strydCadence;
|
|
}
|
|
if (groundTime != null || !forView) {
|
|
map['groundTime'] = groundTime;
|
|
}
|
|
if (verticalOscillation != null || !forView) {
|
|
map['verticalOscillation'] = verticalOscillation;
|
|
}
|
|
if (formPower != null || !forView) {
|
|
map['formPower'] = formPower;
|
|
}
|
|
if (legSpringStiffness != null || !forView) {
|
|
map['legSpringStiffness'] = legSpringStiffness;
|
|
}
|
|
if (data != null || !forView) {
|
|
map['data'] = data;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
if (lapsId != null) {
|
|
map['lapsId'] = forView
|
|
? plDbLap == null
|
|
? lapsId
|
|
: plDbLap!.event
|
|
: lapsId;
|
|
} else if (lapsId != null || !forView) {
|
|
map['lapsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (timerTrigger != null || !forView) {
|
|
map['timerTrigger'] = timerTrigger;
|
|
}
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (positionLat != null || !forView) {
|
|
map['positionLat'] = positionLat;
|
|
}
|
|
if (positionLong != null || !forView) {
|
|
map['positionLong'] = positionLong;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (altitude != null || !forView) {
|
|
map['altitude'] = altitude;
|
|
}
|
|
if (speed != null || !forView) {
|
|
map['speed'] = speed;
|
|
}
|
|
if (heartRate != null || !forView) {
|
|
map['heartRate'] = heartRate;
|
|
}
|
|
if (cadence != null || !forView) {
|
|
map['cadence'] = cadence;
|
|
}
|
|
if (fractionalCadence != null || !forView) {
|
|
map['fractionalCadence'] = fractionalCadence;
|
|
}
|
|
if (power != null || !forView) {
|
|
map['power'] = power;
|
|
}
|
|
if (strydCadence != null || !forView) {
|
|
map['strydCadence'] = strydCadence;
|
|
}
|
|
if (groundTime != null || !forView) {
|
|
map['groundTime'] = groundTime;
|
|
}
|
|
if (verticalOscillation != null || !forView) {
|
|
map['verticalOscillation'] = verticalOscillation;
|
|
}
|
|
if (formPower != null || !forView) {
|
|
map['formPower'] = formPower;
|
|
}
|
|
if (legSpringStiffness != null || !forView) {
|
|
map['legSpringStiffness'] = legSpringStiffness;
|
|
}
|
|
if (data != null || !forView) {
|
|
map['data'] = data;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
if (lapsId != null) {
|
|
map['lapsId'] = forView
|
|
? plDbLap == null
|
|
? lapsId
|
|
: plDbLap!.event
|
|
: lapsId;
|
|
} else if (lapsId != null || !forView) {
|
|
map['lapsId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbEvent)
|
|
if (!forQuery) {
|
|
map['DbIntervals'] = await getDbIntervals()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbIntervals'] = await getDbIntervals()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbEvent)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbEvent]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbEvent]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
timerTrigger,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
positionLat,
|
|
positionLong,
|
|
distance,
|
|
altitude,
|
|
speed,
|
|
heartRate,
|
|
cadence,
|
|
fractionalCadence,
|
|
power,
|
|
strydCadence,
|
|
groundTime,
|
|
verticalOscillation,
|
|
formPower,
|
|
legSpringStiffness,
|
|
data,
|
|
activitiesId,
|
|
lapsId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
timerTrigger,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
positionLat,
|
|
positionLong,
|
|
distance,
|
|
altitude,
|
|
speed,
|
|
heartRate,
|
|
cadence,
|
|
fractionalCadence,
|
|
power,
|
|
strydCadence,
|
|
groundTime,
|
|
verticalOscillation,
|
|
formPower,
|
|
legSpringStiffness,
|
|
data,
|
|
activitiesId,
|
|
lapsId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbEvent>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbEvent.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbEvent>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbEvent>[];
|
|
try {
|
|
objList = list
|
|
.map((dbevent) => DbEvent.fromMap(dbevent as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbEvent.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbEvent>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbEvent> objList = <DbEvent>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbEvent.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('events.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('events.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('events.plDbIntervalsBylastRecordId') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalsBylastRecordId'))) {
|
|
/*_loadedfields!.add('events.plDbIntervalsBylastRecordId'); */ obj
|
|
.plDbIntervalsBylastRecordId =
|
|
obj.plDbIntervalsBylastRecordId ??
|
|
await obj.getDbIntervalsBylastRecordId()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbEvent by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbEvent] if exist, otherwise returns null
|
|
Future<DbEvent?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbEvent? obj;
|
|
final data = await _mnDbEvent.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbEvent.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('events.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('events.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('events.plDbIntervalsBylastRecordId') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalsBylastRecordId'))) {
|
|
/*_loadedfields!.add('events.plDbIntervalsBylastRecordId'); */ obj
|
|
.plDbIntervalsBylastRecordId =
|
|
obj.plDbIntervalsBylastRecordId ??
|
|
await obj.getDbIntervalsBylastRecordId()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbEvent) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbEvent.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbEvent.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbEvent) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbEvent.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbEvent.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbEvent. Returns a new Primary Key value of DbEvent
|
|
|
|
/// <returns>Returns a new Primary Key value of DbEvent
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbEvent> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbEvent> dbevents,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbevents) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbevents.length; i++) {
|
|
if (dbevents[i].id == null) {
|
|
dbevents[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbEvent.rawInsert(
|
|
'INSERT OR REPLACE INTO events (id, event, eventType, eventGroup, timerTrigger, timeStamp, positionLat, positionLong, distance, altitude, speed, heartRate, cadence, fractionalCadence, power, strydCadence, groundTime, verticalOscillation, formPower, legSpringStiffness, data, activitiesId, lapsId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
timerTrigger,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
positionLat,
|
|
positionLong,
|
|
distance,
|
|
altitude,
|
|
speed,
|
|
heartRate,
|
|
cadence,
|
|
fractionalCadence,
|
|
power,
|
|
strydCadence,
|
|
groundTime,
|
|
verticalOscillation,
|
|
formPower,
|
|
legSpringStiffness,
|
|
data,
|
|
activitiesId,
|
|
lapsId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbEvent id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbEvent id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbEvent Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbEvent>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbEvent> dbevents,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbEvent.rawInsertAll(
|
|
'INSERT OR REPLACE INTO events (id, event, eventType, eventGroup, timerTrigger, timeStamp, positionLat, positionLong, distance, altitude, speed, heartRate, cadence, fractionalCadence, power, strydCadence, groundTime, verticalOscillation, formPower, legSpringStiffness, data, activitiesId, lapsId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
dbevents,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbEvent
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbEvent invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbInterval()
|
|
.select()
|
|
.firstRecordId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbInterval()
|
|
.select()
|
|
.lastRecordId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbEvent
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbEvent.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbEvent] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbEventFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbEventFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
activitiesId = activitiesId ?? 0;
|
|
lapsId = lapsId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbevent
|
|
|
|
// region DbEventField
|
|
class DbEventField extends FilterBase {
|
|
DbEventField(DbEventFilterBuilder dbeventFB) : super(dbeventFB);
|
|
|
|
@override
|
|
DbEventFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder isNull() {
|
|
return super.isNull() as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbEventFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbEventField get not {
|
|
return super.not as DbEventField;
|
|
}
|
|
}
|
|
// endregion DbEventField
|
|
|
|
// region DbEventFilterBuilder
|
|
class DbEventFilterBuilder extends ConjunctionBase {
|
|
DbEventFilterBuilder(DbEvent obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbEvent = obj._mnDbEvent;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbEventManager? _mnDbEvent;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbEventFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbEventFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbEventFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbEventFilterBuilder where(String? whereCriteria, {dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbEventFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbEventFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbEventFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbEventFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbEventFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbEventFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbEventFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbEventField _setField(DbEventField? field, String colName, DbType dbtype) {
|
|
return DbEventField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbEventField? _id;
|
|
DbEventField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _event;
|
|
DbEventField get event {
|
|
return _event = _setField(_event, 'event', DbType.text);
|
|
}
|
|
|
|
DbEventField? _eventType;
|
|
DbEventField get eventType {
|
|
return _eventType = _setField(_eventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
DbEventField? _eventGroup;
|
|
DbEventField get eventGroup {
|
|
return _eventGroup = _setField(_eventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _timerTrigger;
|
|
DbEventField get timerTrigger {
|
|
return _timerTrigger =
|
|
_setField(_timerTrigger, 'timerTrigger', DbType.text);
|
|
}
|
|
|
|
DbEventField? _timeStamp;
|
|
DbEventField get timeStamp {
|
|
return _timeStamp = _setField(_timeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
DbEventField? _positionLat;
|
|
DbEventField get positionLat {
|
|
return _positionLat = _setField(_positionLat, 'positionLat', DbType.real);
|
|
}
|
|
|
|
DbEventField? _positionLong;
|
|
DbEventField get positionLong {
|
|
return _positionLong =
|
|
_setField(_positionLong, 'positionLong', DbType.real);
|
|
}
|
|
|
|
DbEventField? _distance;
|
|
DbEventField get distance {
|
|
return _distance = _setField(_distance, 'distance', DbType.real);
|
|
}
|
|
|
|
DbEventField? _altitude;
|
|
DbEventField get altitude {
|
|
return _altitude = _setField(_altitude, 'altitude', DbType.real);
|
|
}
|
|
|
|
DbEventField? _speed;
|
|
DbEventField get speed {
|
|
return _speed = _setField(_speed, 'speed', DbType.real);
|
|
}
|
|
|
|
DbEventField? _heartRate;
|
|
DbEventField get heartRate {
|
|
return _heartRate = _setField(_heartRate, 'heartRate', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _cadence;
|
|
DbEventField get cadence {
|
|
return _cadence = _setField(_cadence, 'cadence', DbType.real);
|
|
}
|
|
|
|
DbEventField? _fractionalCadence;
|
|
DbEventField get fractionalCadence {
|
|
return _fractionalCadence =
|
|
_setField(_fractionalCadence, 'fractionalCadence', DbType.real);
|
|
}
|
|
|
|
DbEventField? _power;
|
|
DbEventField get power {
|
|
return _power = _setField(_power, 'power', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _strydCadence;
|
|
DbEventField get strydCadence {
|
|
return _strydCadence =
|
|
_setField(_strydCadence, 'strydCadence', DbType.real);
|
|
}
|
|
|
|
DbEventField? _groundTime;
|
|
DbEventField get groundTime {
|
|
return _groundTime = _setField(_groundTime, 'groundTime', DbType.real);
|
|
}
|
|
|
|
DbEventField? _verticalOscillation;
|
|
DbEventField get verticalOscillation {
|
|
return _verticalOscillation =
|
|
_setField(_verticalOscillation, 'verticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbEventField? _formPower;
|
|
DbEventField get formPower {
|
|
return _formPower = _setField(_formPower, 'formPower', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _legSpringStiffness;
|
|
DbEventField get legSpringStiffness {
|
|
return _legSpringStiffness =
|
|
_setField(_legSpringStiffness, 'legSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbEventField? _data;
|
|
DbEventField get data {
|
|
return _data = _setField(_data, 'data', DbType.real);
|
|
}
|
|
|
|
DbEventField? _activitiesId;
|
|
DbEventField get activitiesId {
|
|
return _activitiesId =
|
|
_setField(_activitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
|
|
DbEventField? _lapsId;
|
|
DbEventField get lapsId {
|
|
return _lapsId = _setField(_lapsId, 'lapsId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbEvent> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbInterval) according to DeleteRule.CASCADE
|
|
final idListDbIntervalBYfirstRecordId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalBYfirstRecordId = await DbInterval()
|
|
.select()
|
|
.where('firstRecordId IN (${idListDbIntervalBYfirstRecordId['sql']})',
|
|
parameterValue: idListDbIntervalBYfirstRecordId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalBYfirstRecordId.success) {
|
|
return resDbIntervalBYfirstRecordId;
|
|
}
|
|
// Delete sub records where in (DbInterval) according to DeleteRule.CASCADE
|
|
final idListDbIntervalBYlastRecordId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalBYlastRecordId = await DbInterval()
|
|
.select()
|
|
.where('lastRecordId IN (${idListDbIntervalBYlastRecordId['sql']})',
|
|
parameterValue: idListDbIntervalBYlastRecordId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalBYlastRecordId.success) {
|
|
return resDbIntervalBYlastRecordId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbEvent!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbEvent!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from events ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbEvent!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbEvent] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbEvent?
|
|
@override
|
|
Future<DbEvent?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbEvent!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbEvent? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbEvent.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('events.plDbIntervals') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervals'))) {
|
|
/*_loadedfields!.add('events.plDbIntervals'); */ obj.plDbIntervals =
|
|
obj.plDbIntervals ??
|
|
await obj.getDbIntervals()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('events.plDbIntervalsBylastRecordId') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalsBylastRecordId'))) {
|
|
/*_loadedfields!.add('events.plDbIntervalsBylastRecordId'); */ obj
|
|
.plDbIntervalsBylastRecordId =
|
|
obj.plDbIntervalsBylastRecordId ??
|
|
await obj.getDbIntervalsBylastRecordId()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbEvent]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbEvent?
|
|
@override
|
|
Future<DbEvent> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbEvent();
|
|
}
|
|
|
|
/// This method returns int. [DbEvent]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbeventCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbeventsFuture = await _mnDbEvent!.toList(qparams);
|
|
final int count = dbeventsFuture[0]['CNT'] as int;
|
|
if (dbeventCount != null) {
|
|
dbeventCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbEvent> [DbEvent]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbEvent>
|
|
@override
|
|
Future<List<DbEvent>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbEvent> dbeventsData = await DbEvent.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbeventsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbEvent]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbEvent]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbEvent]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbEvent!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbEvent]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM events WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbEvent!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbEvent]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbEvent!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbEvent.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbEvent!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbEventFilterBuilder
|
|
|
|
// region DbEventFields
|
|
class DbEventFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fEvent;
|
|
static TableField get event {
|
|
return _fEvent =
|
|
_fEvent ?? SqlSyntax.setField(_fEvent, 'event', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventType;
|
|
static TableField get eventType {
|
|
return _fEventType = _fEventType ??
|
|
SqlSyntax.setField(_fEventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventGroup;
|
|
static TableField get eventGroup {
|
|
return _fEventGroup = _fEventGroup ??
|
|
SqlSyntax.setField(_fEventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTimerTrigger;
|
|
static TableField get timerTrigger {
|
|
return _fTimerTrigger = _fTimerTrigger ??
|
|
SqlSyntax.setField(_fTimerTrigger, 'timerTrigger', DbType.text);
|
|
}
|
|
|
|
static TableField? _fTimeStamp;
|
|
static TableField get timeStamp {
|
|
return _fTimeStamp = _fTimeStamp ??
|
|
SqlSyntax.setField(_fTimeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fPositionLat;
|
|
static TableField get positionLat {
|
|
return _fPositionLat = _fPositionLat ??
|
|
SqlSyntax.setField(_fPositionLat, 'positionLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fPositionLong;
|
|
static TableField get positionLong {
|
|
return _fPositionLong = _fPositionLong ??
|
|
SqlSyntax.setField(_fPositionLong, 'positionLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fDistance;
|
|
static TableField get distance {
|
|
return _fDistance =
|
|
_fDistance ?? SqlSyntax.setField(_fDistance, 'distance', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAltitude;
|
|
static TableField get altitude {
|
|
return _fAltitude =
|
|
_fAltitude ?? SqlSyntax.setField(_fAltitude, 'altitude', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSpeed;
|
|
static TableField get speed {
|
|
return _fSpeed =
|
|
_fSpeed ?? SqlSyntax.setField(_fSpeed, 'speed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fHeartRate;
|
|
static TableField get heartRate {
|
|
return _fHeartRate = _fHeartRate ??
|
|
SqlSyntax.setField(_fHeartRate, 'heartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fCadence;
|
|
static TableField get cadence {
|
|
return _fCadence =
|
|
_fCadence ?? SqlSyntax.setField(_fCadence, 'cadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFractionalCadence;
|
|
static TableField get fractionalCadence {
|
|
return _fFractionalCadence = _fFractionalCadence ??
|
|
SqlSyntax.setField(
|
|
_fFractionalCadence, 'fractionalCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fPower;
|
|
static TableField get power {
|
|
return _fPower =
|
|
_fPower ?? SqlSyntax.setField(_fPower, 'power', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fStrydCadence;
|
|
static TableField get strydCadence {
|
|
return _fStrydCadence = _fStrydCadence ??
|
|
SqlSyntax.setField(_fStrydCadence, 'strydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fGroundTime;
|
|
static TableField get groundTime {
|
|
return _fGroundTime = _fGroundTime ??
|
|
SqlSyntax.setField(_fGroundTime, 'groundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fVerticalOscillation;
|
|
static TableField get verticalOscillation {
|
|
return _fVerticalOscillation = _fVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fVerticalOscillation, 'verticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFormPower;
|
|
static TableField get formPower {
|
|
return _fFormPower = _fFormPower ??
|
|
SqlSyntax.setField(_fFormPower, 'formPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLegSpringStiffness;
|
|
static TableField get legSpringStiffness {
|
|
return _fLegSpringStiffness = _fLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fLegSpringStiffness, 'legSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fData;
|
|
static TableField get data {
|
|
return _fData = _fData ?? SqlSyntax.setField(_fData, 'data', DbType.real);
|
|
}
|
|
|
|
static TableField? _fActivitiesId;
|
|
static TableField get activitiesId {
|
|
return _fActivitiesId = _fActivitiesId ??
|
|
SqlSyntax.setField(_fActivitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLapsId;
|
|
static TableField get lapsId {
|
|
return _fLapsId =
|
|
_fLapsId ?? SqlSyntax.setField(_fLapsId, 'lapsId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbEventFields
|
|
|
|
//region DbEventManager
|
|
class DbEventManager extends SqfEntityProvider {
|
|
DbEventManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'events';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbEventManager
|
|
// region DbLap
|
|
class DbLap extends TableBase {
|
|
DbLap(
|
|
{this.id,
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.endPositionLat,
|
|
this.endPositionLong,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.sport,
|
|
this.subSport,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.maxRunningCadence,
|
|
this.intensity,
|
|
this.lapTrigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.activitiesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbLap.withFields(
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.endPositionLat,
|
|
this.endPositionLong,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.sport,
|
|
this.subSport,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.maxRunningCadence,
|
|
this.intensity,
|
|
this.lapTrigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbLap.withId(
|
|
this.id,
|
|
this.timeStamp,
|
|
this.startTime,
|
|
this.startPositionLat,
|
|
this.startPositionLong,
|
|
this.endPositionLat,
|
|
this.endPositionLong,
|
|
this.avgHeartRate,
|
|
this.maxHeartRate,
|
|
this.avgRunningCadence,
|
|
this.event,
|
|
this.eventType,
|
|
this.eventGroup,
|
|
this.sport,
|
|
this.subSport,
|
|
this.avgVerticalOscillation,
|
|
this.totalElapsedTime,
|
|
this.totalTimerTime,
|
|
this.totalDistance,
|
|
this.totalStrides,
|
|
this.totalCalories,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.avgStanceTimePercent,
|
|
this.avgStanceTime,
|
|
this.maxRunningCadence,
|
|
this.intensity,
|
|
this.lapTrigger,
|
|
this.avgTemperature,
|
|
this.maxTemperature,
|
|
this.avgFractionalCadence,
|
|
this.maxFractionalCadence,
|
|
this.totalFractionalCycles,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.minHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.avgFormPower,
|
|
this.sdevFormPower,
|
|
this.avgStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.sdevVerticalOscillation,
|
|
this.avgPowerRatio,
|
|
this.sdevPowerRatio,
|
|
this.avgStrideRatio,
|
|
this.sdevStrideRatio,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbLap.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['timeStamp'] != null) {
|
|
timeStamp = int.tryParse(o['timeStamp'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['timeStamp'].toString())!)
|
|
: DateTime.tryParse(o['timeStamp'].toString());
|
|
}
|
|
if (o['startTime'] != null) {
|
|
startTime = int.tryParse(o['startTime'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['startTime'].toString())!)
|
|
: DateTime.tryParse(o['startTime'].toString());
|
|
}
|
|
if (o['startPositionLat'] != null) {
|
|
startPositionLat = double.tryParse(o['startPositionLat'].toString());
|
|
}
|
|
if (o['startPositionLong'] != null) {
|
|
startPositionLong = double.tryParse(o['startPositionLong'].toString());
|
|
}
|
|
if (o['endPositionLat'] != null) {
|
|
endPositionLat = double.tryParse(o['endPositionLat'].toString());
|
|
}
|
|
if (o['endPositionLong'] != null) {
|
|
endPositionLong = double.tryParse(o['endPositionLong'].toString());
|
|
}
|
|
if (o['avgHeartRate'] != null) {
|
|
avgHeartRate = int.tryParse(o['avgHeartRate'].toString());
|
|
}
|
|
if (o['maxHeartRate'] != null) {
|
|
maxHeartRate = int.tryParse(o['maxHeartRate'].toString());
|
|
}
|
|
if (o['avgRunningCadence'] != null) {
|
|
avgRunningCadence = double.tryParse(o['avgRunningCadence'].toString());
|
|
}
|
|
if (o['event'] != null) {
|
|
event = o['event'].toString();
|
|
}
|
|
if (o['eventType'] != null) {
|
|
eventType = o['eventType'].toString();
|
|
}
|
|
if (o['eventGroup'] != null) {
|
|
eventGroup = int.tryParse(o['eventGroup'].toString());
|
|
}
|
|
if (o['sport'] != null) {
|
|
sport = o['sport'].toString();
|
|
}
|
|
if (o['subSport'] != null) {
|
|
subSport = o['subSport'].toString();
|
|
}
|
|
if (o['avgVerticalOscillation'] != null) {
|
|
avgVerticalOscillation =
|
|
double.tryParse(o['avgVerticalOscillation'].toString());
|
|
}
|
|
if (o['totalElapsedTime'] != null) {
|
|
totalElapsedTime = int.tryParse(o['totalElapsedTime'].toString());
|
|
}
|
|
if (o['totalTimerTime'] != null) {
|
|
totalTimerTime = int.tryParse(o['totalTimerTime'].toString());
|
|
}
|
|
if (o['totalDistance'] != null) {
|
|
totalDistance = int.tryParse(o['totalDistance'].toString());
|
|
}
|
|
if (o['totalStrides'] != null) {
|
|
totalStrides = int.tryParse(o['totalStrides'].toString());
|
|
}
|
|
if (o['totalCalories'] != null) {
|
|
totalCalories = int.tryParse(o['totalCalories'].toString());
|
|
}
|
|
if (o['avgSpeed'] != null) {
|
|
avgSpeed = double.tryParse(o['avgSpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByMeasurements'] != null) {
|
|
avgSpeedByMeasurements =
|
|
double.tryParse(o['avgSpeedByMeasurements'].toString());
|
|
}
|
|
if (o['avgSpeedBySpeed'] != null) {
|
|
avgSpeedBySpeed = double.tryParse(o['avgSpeedBySpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByDistance'] != null) {
|
|
avgSpeedByDistance = double.tryParse(o['avgSpeedByDistance'].toString());
|
|
}
|
|
if (o['sdevSpeed'] != null) {
|
|
sdevSpeed = double.tryParse(o['sdevSpeed'].toString());
|
|
}
|
|
if (o['sdevPace'] != null) {
|
|
sdevPace = double.tryParse(o['sdevPace'].toString());
|
|
}
|
|
if (o['minSpeed'] != null) {
|
|
minSpeed = double.tryParse(o['minSpeed'].toString());
|
|
}
|
|
if (o['maxSpeed'] != null) {
|
|
maxSpeed = double.tryParse(o['maxSpeed'].toString());
|
|
}
|
|
if (o['totalAscent'] != null) {
|
|
totalAscent = int.tryParse(o['totalAscent'].toString());
|
|
}
|
|
if (o['totalDescent'] != null) {
|
|
totalDescent = int.tryParse(o['totalDescent'].toString());
|
|
}
|
|
if (o['avgStanceTimePercent'] != null) {
|
|
avgStanceTimePercent =
|
|
double.tryParse(o['avgStanceTimePercent'].toString());
|
|
}
|
|
if (o['avgStanceTime'] != null) {
|
|
avgStanceTime = double.tryParse(o['avgStanceTime'].toString());
|
|
}
|
|
if (o['maxRunningCadence'] != null) {
|
|
maxRunningCadence = int.tryParse(o['maxRunningCadence'].toString());
|
|
}
|
|
if (o['intensity'] != null) {
|
|
intensity = int.tryParse(o['intensity'].toString());
|
|
}
|
|
if (o['lapTrigger'] != null) {
|
|
lapTrigger = o['lapTrigger'].toString();
|
|
}
|
|
if (o['avgTemperature'] != null) {
|
|
avgTemperature = int.tryParse(o['avgTemperature'].toString());
|
|
}
|
|
if (o['maxTemperature'] != null) {
|
|
maxTemperature = int.tryParse(o['maxTemperature'].toString());
|
|
}
|
|
if (o['avgFractionalCadence'] != null) {
|
|
avgFractionalCadence =
|
|
double.tryParse(o['avgFractionalCadence'].toString());
|
|
}
|
|
if (o['maxFractionalCadence'] != null) {
|
|
maxFractionalCadence =
|
|
double.tryParse(o['maxFractionalCadence'].toString());
|
|
}
|
|
if (o['totalFractionalCycles'] != null) {
|
|
totalFractionalCycles =
|
|
double.tryParse(o['totalFractionalCycles'].toString());
|
|
}
|
|
if (o['avgPower'] != null) {
|
|
avgPower = double.tryParse(o['avgPower'].toString());
|
|
}
|
|
if (o['minPower'] != null) {
|
|
minPower = int.tryParse(o['minPower'].toString());
|
|
}
|
|
if (o['maxPower'] != null) {
|
|
maxPower = int.tryParse(o['maxPower'].toString());
|
|
}
|
|
if (o['sdevPower'] != null) {
|
|
sdevPower = double.tryParse(o['sdevPower'].toString());
|
|
}
|
|
if (o['minHeartRate'] != null) {
|
|
minHeartRate = int.tryParse(o['minHeartRate'].toString());
|
|
}
|
|
if (o['sdevHeartRate'] != null) {
|
|
sdevHeartRate = double.tryParse(o['sdevHeartRate'].toString());
|
|
}
|
|
if (o['avgGroundTime'] != null) {
|
|
avgGroundTime = double.tryParse(o['avgGroundTime'].toString());
|
|
}
|
|
if (o['sdevGroundTime'] != null) {
|
|
sdevGroundTime = double.tryParse(o['sdevGroundTime'].toString());
|
|
}
|
|
if (o['avgLegSpringStiffness'] != null) {
|
|
avgLegSpringStiffness =
|
|
double.tryParse(o['avgLegSpringStiffness'].toString());
|
|
}
|
|
if (o['sdevLegSpringStiffness'] != null) {
|
|
sdevLegSpringStiffness =
|
|
double.tryParse(o['sdevLegSpringStiffness'].toString());
|
|
}
|
|
if (o['avgFormPower'] != null) {
|
|
avgFormPower = double.tryParse(o['avgFormPower'].toString());
|
|
}
|
|
if (o['sdevFormPower'] != null) {
|
|
sdevFormPower = double.tryParse(o['sdevFormPower'].toString());
|
|
}
|
|
if (o['avgStrydCadence'] != null) {
|
|
avgStrydCadence = double.tryParse(o['avgStrydCadence'].toString());
|
|
}
|
|
if (o['sdevStrydCadence'] != null) {
|
|
sdevStrydCadence = double.tryParse(o['sdevStrydCadence'].toString());
|
|
}
|
|
if (o['sdevVerticalOscillation'] != null) {
|
|
sdevVerticalOscillation =
|
|
double.tryParse(o['sdevVerticalOscillation'].toString());
|
|
}
|
|
if (o['avgPowerRatio'] != null) {
|
|
avgPowerRatio = double.tryParse(o['avgPowerRatio'].toString());
|
|
}
|
|
if (o['sdevPowerRatio'] != null) {
|
|
sdevPowerRatio = double.tryParse(o['sdevPowerRatio'].toString());
|
|
}
|
|
if (o['avgStrideRatio'] != null) {
|
|
avgStrideRatio = double.tryParse(o['avgStrideRatio'].toString());
|
|
}
|
|
if (o['sdevStrideRatio'] != null) {
|
|
sdevStrideRatio = double.tryParse(o['sdevStrideRatio'].toString());
|
|
}
|
|
if (o['cp'] != null) {
|
|
cp = double.tryParse(o['cp'].toString());
|
|
}
|
|
if (o['ftp'] != null) {
|
|
ftp = double.tryParse(o['ftp'].toString());
|
|
}
|
|
if (o['movingTime'] != null) {
|
|
movingTime = int.tryParse(o['movingTime'].toString());
|
|
}
|
|
activitiesId = int.tryParse(o['activitiesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbActivity = o['dbActivity'] != null
|
|
? DbActivity.fromMap(o['dbActivity'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbLap)
|
|
int? id;
|
|
DateTime? timeStamp;
|
|
DateTime? startTime;
|
|
double? startPositionLat;
|
|
double? startPositionLong;
|
|
double? endPositionLat;
|
|
double? endPositionLong;
|
|
int? avgHeartRate;
|
|
int? maxHeartRate;
|
|
double? avgRunningCadence;
|
|
String? event;
|
|
String? eventType;
|
|
int? eventGroup;
|
|
String? sport;
|
|
String? subSport;
|
|
double? avgVerticalOscillation;
|
|
int? totalElapsedTime;
|
|
int? totalTimerTime;
|
|
int? totalDistance;
|
|
int? totalStrides;
|
|
int? totalCalories;
|
|
double? avgSpeed;
|
|
double? avgSpeedByMeasurements;
|
|
double? avgSpeedBySpeed;
|
|
double? avgSpeedByDistance;
|
|
double? sdevSpeed;
|
|
double? sdevPace;
|
|
double? minSpeed;
|
|
double? maxSpeed;
|
|
int? totalAscent;
|
|
int? totalDescent;
|
|
double? avgStanceTimePercent;
|
|
double? avgStanceTime;
|
|
int? maxRunningCadence;
|
|
int? intensity;
|
|
String? lapTrigger;
|
|
int? avgTemperature;
|
|
int? maxTemperature;
|
|
double? avgFractionalCadence;
|
|
double? maxFractionalCadence;
|
|
double? totalFractionalCycles;
|
|
double? avgPower;
|
|
int? minPower;
|
|
int? maxPower;
|
|
double? sdevPower;
|
|
int? minHeartRate;
|
|
double? sdevHeartRate;
|
|
double? avgGroundTime;
|
|
double? sdevGroundTime;
|
|
double? avgLegSpringStiffness;
|
|
double? sdevLegSpringStiffness;
|
|
double? avgFormPower;
|
|
double? sdevFormPower;
|
|
double? avgStrydCadence;
|
|
double? sdevStrydCadence;
|
|
double? sdevVerticalOscillation;
|
|
double? avgPowerRatio;
|
|
double? sdevPowerRatio;
|
|
double? avgStrideRatio;
|
|
double? sdevStrideRatio;
|
|
double? cp;
|
|
double? ftp;
|
|
int? movingTime;
|
|
int? activitiesId;
|
|
|
|
// end FIELDS (DbLap)
|
|
|
|
// RELATIONSHIPS (DbLap)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivity', 'plField2'..]) or so on..
|
|
DbActivity? plDbActivity;
|
|
|
|
/// get DbActivity By ActivitiesId
|
|
Future<DbActivity?> getDbActivity(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbActivity().getById(activitiesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbLap)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbLap)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbEvents', 'plField2'..]) or so on..
|
|
List<DbEvent>? plDbEvents;
|
|
|
|
/// get DbEvent(s) filtered by id=lapsId
|
|
DbEventFilterBuilder? getDbEvents(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbEvent()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.lapsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbLapTaggings', 'plField2'..]) or so on..
|
|
List<DbLapTagging>? plDbLapTaggings;
|
|
|
|
/// get DbLapTagging(s) filtered by id=lapsId
|
|
DbLapTaggingFilterBuilder? getDbLapTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbLapTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.lapsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbLap)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbLapManager? __mnDbLap;
|
|
|
|
DbLapManager get _mnDbLap {
|
|
return __mnDbLap = __mnDbLap ?? DbLapManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (startTime != null) {
|
|
map['startTime'] = forJson
|
|
? startTime!.toString()
|
|
: forQuery
|
|
? startTime!.millisecondsSinceEpoch
|
|
: startTime;
|
|
} else if (startTime != null || !forView) {
|
|
map['startTime'] = null;
|
|
}
|
|
if (startPositionLat != null || !forView) {
|
|
map['startPositionLat'] = startPositionLat;
|
|
}
|
|
if (startPositionLong != null || !forView) {
|
|
map['startPositionLong'] = startPositionLong;
|
|
}
|
|
if (endPositionLat != null || !forView) {
|
|
map['endPositionLat'] = endPositionLat;
|
|
}
|
|
if (endPositionLong != null || !forView) {
|
|
map['endPositionLong'] = endPositionLong;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (avgRunningCadence != null || !forView) {
|
|
map['avgRunningCadence'] = avgRunningCadence;
|
|
}
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (sport != null || !forView) {
|
|
map['sport'] = sport;
|
|
}
|
|
if (subSport != null || !forView) {
|
|
map['subSport'] = subSport;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (totalElapsedTime != null || !forView) {
|
|
map['totalElapsedTime'] = totalElapsedTime;
|
|
}
|
|
if (totalTimerTime != null || !forView) {
|
|
map['totalTimerTime'] = totalTimerTime;
|
|
}
|
|
if (totalDistance != null || !forView) {
|
|
map['totalDistance'] = totalDistance;
|
|
}
|
|
if (totalStrides != null || !forView) {
|
|
map['totalStrides'] = totalStrides;
|
|
}
|
|
if (totalCalories != null || !forView) {
|
|
map['totalCalories'] = totalCalories;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (avgStanceTimePercent != null || !forView) {
|
|
map['avgStanceTimePercent'] = avgStanceTimePercent;
|
|
}
|
|
if (avgStanceTime != null || !forView) {
|
|
map['avgStanceTime'] = avgStanceTime;
|
|
}
|
|
if (maxRunningCadence != null || !forView) {
|
|
map['maxRunningCadence'] = maxRunningCadence;
|
|
}
|
|
if (intensity != null || !forView) {
|
|
map['intensity'] = intensity;
|
|
}
|
|
if (lapTrigger != null || !forView) {
|
|
map['lapTrigger'] = lapTrigger;
|
|
}
|
|
if (avgTemperature != null || !forView) {
|
|
map['avgTemperature'] = avgTemperature;
|
|
}
|
|
if (maxTemperature != null || !forView) {
|
|
map['maxTemperature'] = maxTemperature;
|
|
}
|
|
if (avgFractionalCadence != null || !forView) {
|
|
map['avgFractionalCadence'] = avgFractionalCadence;
|
|
}
|
|
if (maxFractionalCadence != null || !forView) {
|
|
map['maxFractionalCadence'] = maxFractionalCadence;
|
|
}
|
|
if (totalFractionalCycles != null || !forView) {
|
|
map['totalFractionalCycles'] = totalFractionalCycles;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (avgPowerRatio != null || !forView) {
|
|
map['avgPowerRatio'] = avgPowerRatio;
|
|
}
|
|
if (sdevPowerRatio != null || !forView) {
|
|
map['sdevPowerRatio'] = sdevPowerRatio;
|
|
}
|
|
if (avgStrideRatio != null || !forView) {
|
|
map['avgStrideRatio'] = avgStrideRatio;
|
|
}
|
|
if (sdevStrideRatio != null || !forView) {
|
|
map['sdevStrideRatio'] = sdevStrideRatio;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (startTime != null) {
|
|
map['startTime'] = forJson
|
|
? startTime!.toString()
|
|
: forQuery
|
|
? startTime!.millisecondsSinceEpoch
|
|
: startTime;
|
|
} else if (startTime != null || !forView) {
|
|
map['startTime'] = null;
|
|
}
|
|
if (startPositionLat != null || !forView) {
|
|
map['startPositionLat'] = startPositionLat;
|
|
}
|
|
if (startPositionLong != null || !forView) {
|
|
map['startPositionLong'] = startPositionLong;
|
|
}
|
|
if (endPositionLat != null || !forView) {
|
|
map['endPositionLat'] = endPositionLat;
|
|
}
|
|
if (endPositionLong != null || !forView) {
|
|
map['endPositionLong'] = endPositionLong;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (avgRunningCadence != null || !forView) {
|
|
map['avgRunningCadence'] = avgRunningCadence;
|
|
}
|
|
if (event != null || !forView) {
|
|
map['event'] = event;
|
|
}
|
|
if (eventType != null || !forView) {
|
|
map['eventType'] = eventType;
|
|
}
|
|
if (eventGroup != null || !forView) {
|
|
map['eventGroup'] = eventGroup;
|
|
}
|
|
if (sport != null || !forView) {
|
|
map['sport'] = sport;
|
|
}
|
|
if (subSport != null || !forView) {
|
|
map['subSport'] = subSport;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (totalElapsedTime != null || !forView) {
|
|
map['totalElapsedTime'] = totalElapsedTime;
|
|
}
|
|
if (totalTimerTime != null || !forView) {
|
|
map['totalTimerTime'] = totalTimerTime;
|
|
}
|
|
if (totalDistance != null || !forView) {
|
|
map['totalDistance'] = totalDistance;
|
|
}
|
|
if (totalStrides != null || !forView) {
|
|
map['totalStrides'] = totalStrides;
|
|
}
|
|
if (totalCalories != null || !forView) {
|
|
map['totalCalories'] = totalCalories;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (avgStanceTimePercent != null || !forView) {
|
|
map['avgStanceTimePercent'] = avgStanceTimePercent;
|
|
}
|
|
if (avgStanceTime != null || !forView) {
|
|
map['avgStanceTime'] = avgStanceTime;
|
|
}
|
|
if (maxRunningCadence != null || !forView) {
|
|
map['maxRunningCadence'] = maxRunningCadence;
|
|
}
|
|
if (intensity != null || !forView) {
|
|
map['intensity'] = intensity;
|
|
}
|
|
if (lapTrigger != null || !forView) {
|
|
map['lapTrigger'] = lapTrigger;
|
|
}
|
|
if (avgTemperature != null || !forView) {
|
|
map['avgTemperature'] = avgTemperature;
|
|
}
|
|
if (maxTemperature != null || !forView) {
|
|
map['maxTemperature'] = maxTemperature;
|
|
}
|
|
if (avgFractionalCadence != null || !forView) {
|
|
map['avgFractionalCadence'] = avgFractionalCadence;
|
|
}
|
|
if (maxFractionalCadence != null || !forView) {
|
|
map['maxFractionalCadence'] = maxFractionalCadence;
|
|
}
|
|
if (totalFractionalCycles != null || !forView) {
|
|
map['totalFractionalCycles'] = totalFractionalCycles;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (avgPowerRatio != null || !forView) {
|
|
map['avgPowerRatio'] = avgPowerRatio;
|
|
}
|
|
if (sdevPowerRatio != null || !forView) {
|
|
map['sdevPowerRatio'] = sdevPowerRatio;
|
|
}
|
|
if (avgStrideRatio != null || !forView) {
|
|
map['avgStrideRatio'] = avgStrideRatio;
|
|
}
|
|
if (sdevStrideRatio != null || !forView) {
|
|
map['sdevStrideRatio'] = sdevStrideRatio;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbLap)
|
|
if (!forQuery) {
|
|
map['DbEvents'] = await getDbEvents()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbLapTaggings'] = await getDbLapTaggings()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbLap)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbLap]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbLap]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
endPositionLat,
|
|
endPositionLong,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
sport,
|
|
subSport,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
maxRunningCadence,
|
|
intensity,
|
|
lapTrigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
activitiesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
endPositionLat,
|
|
endPositionLong,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
sport,
|
|
subSport,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
maxRunningCadence,
|
|
intensity,
|
|
lapTrigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
activitiesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbLap>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLap.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbLap>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbLap>[];
|
|
try {
|
|
objList = list
|
|
.map((dblap) => DbLap.fromMap(dblap as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLap.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbLap>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbLap> objList = <DbLap>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbLap.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('laps.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('laps.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('laps.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('laps.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbLap by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbLap] if exist, otherwise returns null
|
|
Future<DbLap?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbLap? obj;
|
|
final data = await _mnDbLap.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbLap.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('laps.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('laps.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('laps.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('laps.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbLap) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLap.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbLap.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbLap) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLap.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbLap.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbLap. Returns a new Primary Key value of DbLap
|
|
|
|
/// <returns>Returns a new Primary Key value of DbLap
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbLap> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbLap> dblaps,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dblaps) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dblaps.length; i++) {
|
|
if (dblaps[i].id == null) {
|
|
dblaps[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbLap.rawInsert(
|
|
'INSERT OR REPLACE INTO laps (id, timeStamp, startTime, startPositionLat, startPositionLong, endPositionLat, endPositionLong, avgHeartRate, maxHeartRate, avgRunningCadence, event, eventType, eventGroup, sport, subSport, avgVerticalOscillation, totalElapsedTime, totalTimerTime, totalDistance, totalStrides, totalCalories, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, sdevSpeed, sdevPace, minSpeed, maxSpeed, totalAscent, totalDescent, avgStanceTimePercent, avgStanceTime, maxRunningCadence, intensity, lapTrigger, avgTemperature, maxTemperature, avgFractionalCadence, maxFractionalCadence, totalFractionalCycles, avgPower, minPower, maxPower, sdevPower, minHeartRate, sdevHeartRate, avgGroundTime, sdevGroundTime, avgLegSpringStiffness, sdevLegSpringStiffness, avgFormPower, sdevFormPower, avgStrydCadence, sdevStrydCadence, sdevVerticalOscillation, avgPowerRatio, sdevPowerRatio, avgStrideRatio, sdevStrideRatio, cp, ftp, movingTime, activitiesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
startTime != null ? startTime!.millisecondsSinceEpoch : null,
|
|
startPositionLat,
|
|
startPositionLong,
|
|
endPositionLat,
|
|
endPositionLong,
|
|
avgHeartRate,
|
|
maxHeartRate,
|
|
avgRunningCadence,
|
|
event,
|
|
eventType,
|
|
eventGroup,
|
|
sport,
|
|
subSport,
|
|
avgVerticalOscillation,
|
|
totalElapsedTime,
|
|
totalTimerTime,
|
|
totalDistance,
|
|
totalStrides,
|
|
totalCalories,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
minSpeed,
|
|
maxSpeed,
|
|
totalAscent,
|
|
totalDescent,
|
|
avgStanceTimePercent,
|
|
avgStanceTime,
|
|
maxRunningCadence,
|
|
intensity,
|
|
lapTrigger,
|
|
avgTemperature,
|
|
maxTemperature,
|
|
avgFractionalCadence,
|
|
maxFractionalCadence,
|
|
totalFractionalCycles,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
minHeartRate,
|
|
sdevHeartRate,
|
|
avgGroundTime,
|
|
sdevGroundTime,
|
|
avgLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
avgFormPower,
|
|
sdevFormPower,
|
|
avgStrydCadence,
|
|
sdevStrydCadence,
|
|
sdevVerticalOscillation,
|
|
avgPowerRatio,
|
|
sdevPowerRatio,
|
|
avgStrideRatio,
|
|
sdevStrideRatio,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
activitiesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true, successMessage: 'DbLap id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbLap id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbLap Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbLap>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbLap> dblaps,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbLap.rawInsertAll(
|
|
'INSERT OR REPLACE INTO laps (id, timeStamp, startTime, startPositionLat, startPositionLong, endPositionLat, endPositionLong, avgHeartRate, maxHeartRate, avgRunningCadence, event, eventType, eventGroup, sport, subSport, avgVerticalOscillation, totalElapsedTime, totalTimerTime, totalDistance, totalStrides, totalCalories, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, sdevSpeed, sdevPace, minSpeed, maxSpeed, totalAscent, totalDescent, avgStanceTimePercent, avgStanceTime, maxRunningCadence, intensity, lapTrigger, avgTemperature, maxTemperature, avgFractionalCadence, maxFractionalCadence, totalFractionalCycles, avgPower, minPower, maxPower, sdevPower, minHeartRate, sdevHeartRate, avgGroundTime, sdevGroundTime, avgLegSpringStiffness, sdevLegSpringStiffness, avgFormPower, sdevFormPower, avgStrydCadence, sdevStrydCadence, sdevVerticalOscillation, avgPowerRatio, sdevPowerRatio, avgStrideRatio, sdevStrideRatio, cp, ftp, movingTime, activitiesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
dblaps,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbLap
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbLap invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result =
|
|
await DbEvent().select().lapsId.equals(id).and.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbLapTagging()
|
|
.select()
|
|
.lapsId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbLap
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbLap.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbLap] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLapFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLapFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
activitiesId = activitiesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dblap
|
|
|
|
// region DbLapField
|
|
class DbLapField extends FilterBase {
|
|
DbLapField(DbLapFilterBuilder dblapFB) : super(dblapFB);
|
|
|
|
@override
|
|
DbLapFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder isNull() {
|
|
return super.isNull() as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbLapFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapField get not {
|
|
return super.not as DbLapField;
|
|
}
|
|
}
|
|
// endregion DbLapField
|
|
|
|
// region DbLapFilterBuilder
|
|
class DbLapFilterBuilder extends ConjunctionBase {
|
|
DbLapFilterBuilder(DbLap obj, bool? getIsDeleted) : super(obj, getIsDeleted) {
|
|
_mnDbLap = obj._mnDbLap;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbLapManager? _mnDbLap;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbLapFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbLapFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbLapFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbLapFilterBuilder where(String? whereCriteria, {dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbLapFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbLapFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbLapFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLapFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLapFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLapFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLapFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbLapField _setField(DbLapField? field, String colName, DbType dbtype) {
|
|
return DbLapField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbLapField? _id;
|
|
DbLapField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _timeStamp;
|
|
DbLapField get timeStamp {
|
|
return _timeStamp = _setField(_timeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
DbLapField? _startTime;
|
|
DbLapField get startTime {
|
|
return _startTime = _setField(_startTime, 'startTime', DbType.datetime);
|
|
}
|
|
|
|
DbLapField? _startPositionLat;
|
|
DbLapField get startPositionLat {
|
|
return _startPositionLat =
|
|
_setField(_startPositionLat, 'startPositionLat', DbType.real);
|
|
}
|
|
|
|
DbLapField? _startPositionLong;
|
|
DbLapField get startPositionLong {
|
|
return _startPositionLong =
|
|
_setField(_startPositionLong, 'startPositionLong', DbType.real);
|
|
}
|
|
|
|
DbLapField? _endPositionLat;
|
|
DbLapField get endPositionLat {
|
|
return _endPositionLat =
|
|
_setField(_endPositionLat, 'endPositionLat', DbType.real);
|
|
}
|
|
|
|
DbLapField? _endPositionLong;
|
|
DbLapField get endPositionLong {
|
|
return _endPositionLong =
|
|
_setField(_endPositionLong, 'endPositionLong', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgHeartRate;
|
|
DbLapField get avgHeartRate {
|
|
return _avgHeartRate =
|
|
_setField(_avgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _maxHeartRate;
|
|
DbLapField get maxHeartRate {
|
|
return _maxHeartRate =
|
|
_setField(_maxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _avgRunningCadence;
|
|
DbLapField get avgRunningCadence {
|
|
return _avgRunningCadence =
|
|
_setField(_avgRunningCadence, 'avgRunningCadence', DbType.real);
|
|
}
|
|
|
|
DbLapField? _event;
|
|
DbLapField get event {
|
|
return _event = _setField(_event, 'event', DbType.text);
|
|
}
|
|
|
|
DbLapField? _eventType;
|
|
DbLapField get eventType {
|
|
return _eventType = _setField(_eventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
DbLapField? _eventGroup;
|
|
DbLapField get eventGroup {
|
|
return _eventGroup = _setField(_eventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _sport;
|
|
DbLapField get sport {
|
|
return _sport = _setField(_sport, 'sport', DbType.text);
|
|
}
|
|
|
|
DbLapField? _subSport;
|
|
DbLapField get subSport {
|
|
return _subSport = _setField(_subSport, 'subSport', DbType.text);
|
|
}
|
|
|
|
DbLapField? _avgVerticalOscillation;
|
|
DbLapField get avgVerticalOscillation {
|
|
return _avgVerticalOscillation = _setField(
|
|
_avgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbLapField? _totalElapsedTime;
|
|
DbLapField get totalElapsedTime {
|
|
return _totalElapsedTime =
|
|
_setField(_totalElapsedTime, 'totalElapsedTime', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _totalTimerTime;
|
|
DbLapField get totalTimerTime {
|
|
return _totalTimerTime =
|
|
_setField(_totalTimerTime, 'totalTimerTime', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _totalDistance;
|
|
DbLapField get totalDistance {
|
|
return _totalDistance =
|
|
_setField(_totalDistance, 'totalDistance', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _totalStrides;
|
|
DbLapField get totalStrides {
|
|
return _totalStrides =
|
|
_setField(_totalStrides, 'totalStrides', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _totalCalories;
|
|
DbLapField get totalCalories {
|
|
return _totalCalories =
|
|
_setField(_totalCalories, 'totalCalories', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _avgSpeed;
|
|
DbLapField get avgSpeed {
|
|
return _avgSpeed = _setField(_avgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgSpeedByMeasurements;
|
|
DbLapField get avgSpeedByMeasurements {
|
|
return _avgSpeedByMeasurements = _setField(
|
|
_avgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgSpeedBySpeed;
|
|
DbLapField get avgSpeedBySpeed {
|
|
return _avgSpeedBySpeed =
|
|
_setField(_avgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgSpeedByDistance;
|
|
DbLapField get avgSpeedByDistance {
|
|
return _avgSpeedByDistance =
|
|
_setField(_avgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevSpeed;
|
|
DbLapField get sdevSpeed {
|
|
return _sdevSpeed = _setField(_sdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevPace;
|
|
DbLapField get sdevPace {
|
|
return _sdevPace = _setField(_sdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
DbLapField? _minSpeed;
|
|
DbLapField get minSpeed {
|
|
return _minSpeed = _setField(_minSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
DbLapField? _maxSpeed;
|
|
DbLapField get maxSpeed {
|
|
return _maxSpeed = _setField(_maxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
DbLapField? _totalAscent;
|
|
DbLapField get totalAscent {
|
|
return _totalAscent =
|
|
_setField(_totalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _totalDescent;
|
|
DbLapField get totalDescent {
|
|
return _totalDescent =
|
|
_setField(_totalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _avgStanceTimePercent;
|
|
DbLapField get avgStanceTimePercent {
|
|
return _avgStanceTimePercent =
|
|
_setField(_avgStanceTimePercent, 'avgStanceTimePercent', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgStanceTime;
|
|
DbLapField get avgStanceTime {
|
|
return _avgStanceTime =
|
|
_setField(_avgStanceTime, 'avgStanceTime', DbType.real);
|
|
}
|
|
|
|
DbLapField? _maxRunningCadence;
|
|
DbLapField get maxRunningCadence {
|
|
return _maxRunningCadence =
|
|
_setField(_maxRunningCadence, 'maxRunningCadence', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _intensity;
|
|
DbLapField get intensity {
|
|
return _intensity = _setField(_intensity, 'intensity', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _lapTrigger;
|
|
DbLapField get lapTrigger {
|
|
return _lapTrigger = _setField(_lapTrigger, 'lapTrigger', DbType.text);
|
|
}
|
|
|
|
DbLapField? _avgTemperature;
|
|
DbLapField get avgTemperature {
|
|
return _avgTemperature =
|
|
_setField(_avgTemperature, 'avgTemperature', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _maxTemperature;
|
|
DbLapField get maxTemperature {
|
|
return _maxTemperature =
|
|
_setField(_maxTemperature, 'maxTemperature', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _avgFractionalCadence;
|
|
DbLapField get avgFractionalCadence {
|
|
return _avgFractionalCadence =
|
|
_setField(_avgFractionalCadence, 'avgFractionalCadence', DbType.real);
|
|
}
|
|
|
|
DbLapField? _maxFractionalCadence;
|
|
DbLapField get maxFractionalCadence {
|
|
return _maxFractionalCadence =
|
|
_setField(_maxFractionalCadence, 'maxFractionalCadence', DbType.real);
|
|
}
|
|
|
|
DbLapField? _totalFractionalCycles;
|
|
DbLapField get totalFractionalCycles {
|
|
return _totalFractionalCycles =
|
|
_setField(_totalFractionalCycles, 'totalFractionalCycles', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgPower;
|
|
DbLapField get avgPower {
|
|
return _avgPower = _setField(_avgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
DbLapField? _minPower;
|
|
DbLapField get minPower {
|
|
return _minPower = _setField(_minPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _maxPower;
|
|
DbLapField get maxPower {
|
|
return _maxPower = _setField(_maxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _sdevPower;
|
|
DbLapField get sdevPower {
|
|
return _sdevPower = _setField(_sdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
DbLapField? _minHeartRate;
|
|
DbLapField get minHeartRate {
|
|
return _minHeartRate =
|
|
_setField(_minHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _sdevHeartRate;
|
|
DbLapField get sdevHeartRate {
|
|
return _sdevHeartRate =
|
|
_setField(_sdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgGroundTime;
|
|
DbLapField get avgGroundTime {
|
|
return _avgGroundTime =
|
|
_setField(_avgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevGroundTime;
|
|
DbLapField get sdevGroundTime {
|
|
return _sdevGroundTime =
|
|
_setField(_sdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgLegSpringStiffness;
|
|
DbLapField get avgLegSpringStiffness {
|
|
return _avgLegSpringStiffness =
|
|
_setField(_avgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevLegSpringStiffness;
|
|
DbLapField get sdevLegSpringStiffness {
|
|
return _sdevLegSpringStiffness = _setField(
|
|
_sdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgFormPower;
|
|
DbLapField get avgFormPower {
|
|
return _avgFormPower =
|
|
_setField(_avgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevFormPower;
|
|
DbLapField get sdevFormPower {
|
|
return _sdevFormPower =
|
|
_setField(_sdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgStrydCadence;
|
|
DbLapField get avgStrydCadence {
|
|
return _avgStrydCadence =
|
|
_setField(_avgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevStrydCadence;
|
|
DbLapField get sdevStrydCadence {
|
|
return _sdevStrydCadence =
|
|
_setField(_sdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevVerticalOscillation;
|
|
DbLapField get sdevVerticalOscillation {
|
|
return _sdevVerticalOscillation = _setField(
|
|
_sdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgPowerRatio;
|
|
DbLapField get avgPowerRatio {
|
|
return _avgPowerRatio =
|
|
_setField(_avgPowerRatio, 'avgPowerRatio', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevPowerRatio;
|
|
DbLapField get sdevPowerRatio {
|
|
return _sdevPowerRatio =
|
|
_setField(_sdevPowerRatio, 'sdevPowerRatio', DbType.real);
|
|
}
|
|
|
|
DbLapField? _avgStrideRatio;
|
|
DbLapField get avgStrideRatio {
|
|
return _avgStrideRatio =
|
|
_setField(_avgStrideRatio, 'avgStrideRatio', DbType.real);
|
|
}
|
|
|
|
DbLapField? _sdevStrideRatio;
|
|
DbLapField get sdevStrideRatio {
|
|
return _sdevStrideRatio =
|
|
_setField(_sdevStrideRatio, 'sdevStrideRatio', DbType.real);
|
|
}
|
|
|
|
DbLapField? _cp;
|
|
DbLapField get cp {
|
|
return _cp = _setField(_cp, 'cp', DbType.real);
|
|
}
|
|
|
|
DbLapField? _ftp;
|
|
DbLapField get ftp {
|
|
return _ftp = _setField(_ftp, 'ftp', DbType.real);
|
|
}
|
|
|
|
DbLapField? _movingTime;
|
|
DbLapField get movingTime {
|
|
return _movingTime = _setField(_movingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
DbLapField? _activitiesId;
|
|
DbLapField get activitiesId {
|
|
return _activitiesId =
|
|
_setField(_activitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbLap> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbEvent) according to DeleteRule.CASCADE
|
|
final idListDbEventBYlapsId = toListPrimaryKeySQL(false);
|
|
final resDbEventBYlapsId = await DbEvent()
|
|
.select()
|
|
.where('lapsId IN (${idListDbEventBYlapsId['sql']})',
|
|
parameterValue: idListDbEventBYlapsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbEventBYlapsId.success) {
|
|
return resDbEventBYlapsId;
|
|
}
|
|
// Delete sub records where in (DbLapTagging) according to DeleteRule.CASCADE
|
|
final idListDbLapTaggingBYlapsId = toListPrimaryKeySQL(false);
|
|
final resDbLapTaggingBYlapsId = await DbLapTagging()
|
|
.select()
|
|
.where('lapsId IN (${idListDbLapTaggingBYlapsId['sql']})',
|
|
parameterValue: idListDbLapTaggingBYlapsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbLapTaggingBYlapsId.success) {
|
|
return resDbLapTaggingBYlapsId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbLap!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbLap!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from laps ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbLap!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbLap] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLap?
|
|
@override
|
|
Future<DbLap?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbLap!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbLap? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbLap.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('laps.plDbEvents') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbEvents'))) {
|
|
/*_loadedfields!.add('laps.plDbEvents'); */ obj.plDbEvents =
|
|
obj.plDbEvents ??
|
|
await obj.getDbEvents()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('laps.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('laps.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbLap]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLap?
|
|
@override
|
|
Future<DbLap> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbLap();
|
|
}
|
|
|
|
/// This method returns int. [DbLap]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dblapCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dblapsFuture = await _mnDbLap!.toList(qparams);
|
|
final int count = dblapsFuture[0]['CNT'] as int;
|
|
if (dblapCount != null) {
|
|
dblapCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbLap> [DbLap]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbLap>
|
|
@override
|
|
Future<List<DbLap>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbLap> dblapsData = await DbLap.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dblapsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbLap]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbLap]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbLap]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbLap!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbLap]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM laps WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbLap!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbLap]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLap!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbLap.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLap!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbLapFilterBuilder
|
|
|
|
// region DbLapFields
|
|
class DbLapFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTimeStamp;
|
|
static TableField get timeStamp {
|
|
return _fTimeStamp = _fTimeStamp ??
|
|
SqlSyntax.setField(_fTimeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fStartTime;
|
|
static TableField get startTime {
|
|
return _fStartTime = _fStartTime ??
|
|
SqlSyntax.setField(_fStartTime, 'startTime', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fStartPositionLat;
|
|
static TableField get startPositionLat {
|
|
return _fStartPositionLat = _fStartPositionLat ??
|
|
SqlSyntax.setField(_fStartPositionLat, 'startPositionLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fStartPositionLong;
|
|
static TableField get startPositionLong {
|
|
return _fStartPositionLong = _fStartPositionLong ??
|
|
SqlSyntax.setField(
|
|
_fStartPositionLong, 'startPositionLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fEndPositionLat;
|
|
static TableField get endPositionLat {
|
|
return _fEndPositionLat = _fEndPositionLat ??
|
|
SqlSyntax.setField(_fEndPositionLat, 'endPositionLat', DbType.real);
|
|
}
|
|
|
|
static TableField? _fEndPositionLong;
|
|
static TableField get endPositionLong {
|
|
return _fEndPositionLong = _fEndPositionLong ??
|
|
SqlSyntax.setField(_fEndPositionLong, 'endPositionLong', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgHeartRate;
|
|
static TableField get avgHeartRate {
|
|
return _fAvgHeartRate = _fAvgHeartRate ??
|
|
SqlSyntax.setField(_fAvgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxHeartRate;
|
|
static TableField get maxHeartRate {
|
|
return _fMaxHeartRate = _fMaxHeartRate ??
|
|
SqlSyntax.setField(_fMaxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgRunningCadence;
|
|
static TableField get avgRunningCadence {
|
|
return _fAvgRunningCadence = _fAvgRunningCadence ??
|
|
SqlSyntax.setField(
|
|
_fAvgRunningCadence, 'avgRunningCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fEvent;
|
|
static TableField get event {
|
|
return _fEvent =
|
|
_fEvent ?? SqlSyntax.setField(_fEvent, 'event', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventType;
|
|
static TableField get eventType {
|
|
return _fEventType = _fEventType ??
|
|
SqlSyntax.setField(_fEventType, 'eventType', DbType.text);
|
|
}
|
|
|
|
static TableField? _fEventGroup;
|
|
static TableField get eventGroup {
|
|
return _fEventGroup = _fEventGroup ??
|
|
SqlSyntax.setField(_fEventGroup, 'eventGroup', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSport;
|
|
static TableField get sport {
|
|
return _fSport =
|
|
_fSport ?? SqlSyntax.setField(_fSport, 'sport', DbType.text);
|
|
}
|
|
|
|
static TableField? _fSubSport;
|
|
static TableField get subSport {
|
|
return _fSubSport =
|
|
_fSubSport ?? SqlSyntax.setField(_fSubSport, 'subSport', DbType.text);
|
|
}
|
|
|
|
static TableField? _fAvgVerticalOscillation;
|
|
static TableField get avgVerticalOscillation {
|
|
return _fAvgVerticalOscillation = _fAvgVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fAvgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalElapsedTime;
|
|
static TableField get totalElapsedTime {
|
|
return _fTotalElapsedTime = _fTotalElapsedTime ??
|
|
SqlSyntax.setField(
|
|
_fTotalElapsedTime, 'totalElapsedTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalTimerTime;
|
|
static TableField get totalTimerTime {
|
|
return _fTotalTimerTime = _fTotalTimerTime ??
|
|
SqlSyntax.setField(_fTotalTimerTime, 'totalTimerTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalDistance;
|
|
static TableField get totalDistance {
|
|
return _fTotalDistance = _fTotalDistance ??
|
|
SqlSyntax.setField(_fTotalDistance, 'totalDistance', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalStrides;
|
|
static TableField get totalStrides {
|
|
return _fTotalStrides = _fTotalStrides ??
|
|
SqlSyntax.setField(_fTotalStrides, 'totalStrides', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalCalories;
|
|
static TableField get totalCalories {
|
|
return _fTotalCalories = _fTotalCalories ??
|
|
SqlSyntax.setField(_fTotalCalories, 'totalCalories', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgSpeed;
|
|
static TableField get avgSpeed {
|
|
return _fAvgSpeed =
|
|
_fAvgSpeed ?? SqlSyntax.setField(_fAvgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByMeasurements;
|
|
static TableField get avgSpeedByMeasurements {
|
|
return _fAvgSpeedByMeasurements = _fAvgSpeedByMeasurements ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedBySpeed;
|
|
static TableField get avgSpeedBySpeed {
|
|
return _fAvgSpeedBySpeed = _fAvgSpeedBySpeed ??
|
|
SqlSyntax.setField(_fAvgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByDistance;
|
|
static TableField get avgSpeedByDistance {
|
|
return _fAvgSpeedByDistance = _fAvgSpeedByDistance ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevSpeed;
|
|
static TableField get sdevSpeed {
|
|
return _fSdevSpeed = _fSdevSpeed ??
|
|
SqlSyntax.setField(_fSdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPace;
|
|
static TableField get sdevPace {
|
|
return _fSdevPace =
|
|
_fSdevPace ?? SqlSyntax.setField(_fSdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinSpeed;
|
|
static TableField get minSpeed {
|
|
return _fMinSpeed =
|
|
_fMinSpeed ?? SqlSyntax.setField(_fMinSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxSpeed;
|
|
static TableField get maxSpeed {
|
|
return _fMaxSpeed =
|
|
_fMaxSpeed ?? SqlSyntax.setField(_fMaxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalAscent;
|
|
static TableField get totalAscent {
|
|
return _fTotalAscent = _fTotalAscent ??
|
|
SqlSyntax.setField(_fTotalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalDescent;
|
|
static TableField get totalDescent {
|
|
return _fTotalDescent = _fTotalDescent ??
|
|
SqlSyntax.setField(_fTotalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgStanceTimePercent;
|
|
static TableField get avgStanceTimePercent {
|
|
return _fAvgStanceTimePercent = _fAvgStanceTimePercent ??
|
|
SqlSyntax.setField(
|
|
_fAvgStanceTimePercent, 'avgStanceTimePercent', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStanceTime;
|
|
static TableField get avgStanceTime {
|
|
return _fAvgStanceTime = _fAvgStanceTime ??
|
|
SqlSyntax.setField(_fAvgStanceTime, 'avgStanceTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxRunningCadence;
|
|
static TableField get maxRunningCadence {
|
|
return _fMaxRunningCadence = _fMaxRunningCadence ??
|
|
SqlSyntax.setField(
|
|
_fMaxRunningCadence, 'maxRunningCadence', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fIntensity;
|
|
static TableField get intensity {
|
|
return _fIntensity = _fIntensity ??
|
|
SqlSyntax.setField(_fIntensity, 'intensity', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLapTrigger;
|
|
static TableField get lapTrigger {
|
|
return _fLapTrigger = _fLapTrigger ??
|
|
SqlSyntax.setField(_fLapTrigger, 'lapTrigger', DbType.text);
|
|
}
|
|
|
|
static TableField? _fAvgTemperature;
|
|
static TableField get avgTemperature {
|
|
return _fAvgTemperature = _fAvgTemperature ??
|
|
SqlSyntax.setField(_fAvgTemperature, 'avgTemperature', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxTemperature;
|
|
static TableField get maxTemperature {
|
|
return _fMaxTemperature = _fMaxTemperature ??
|
|
SqlSyntax.setField(_fMaxTemperature, 'maxTemperature', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgFractionalCadence;
|
|
static TableField get avgFractionalCadence {
|
|
return _fAvgFractionalCadence = _fAvgFractionalCadence ??
|
|
SqlSyntax.setField(
|
|
_fAvgFractionalCadence, 'avgFractionalCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxFractionalCadence;
|
|
static TableField get maxFractionalCadence {
|
|
return _fMaxFractionalCadence = _fMaxFractionalCadence ??
|
|
SqlSyntax.setField(
|
|
_fMaxFractionalCadence, 'maxFractionalCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalFractionalCycles;
|
|
static TableField get totalFractionalCycles {
|
|
return _fTotalFractionalCycles = _fTotalFractionalCycles ??
|
|
SqlSyntax.setField(
|
|
_fTotalFractionalCycles, 'totalFractionalCycles', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgPower;
|
|
static TableField get avgPower {
|
|
return _fAvgPower =
|
|
_fAvgPower ?? SqlSyntax.setField(_fAvgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinPower;
|
|
static TableField get minPower {
|
|
return _fMinPower = _fMinPower ??
|
|
SqlSyntax.setField(_fMinPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxPower;
|
|
static TableField get maxPower {
|
|
return _fMaxPower = _fMaxPower ??
|
|
SqlSyntax.setField(_fMaxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevPower;
|
|
static TableField get sdevPower {
|
|
return _fSdevPower = _fSdevPower ??
|
|
SqlSyntax.setField(_fSdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinHeartRate;
|
|
static TableField get minHeartRate {
|
|
return _fMinHeartRate = _fMinHeartRate ??
|
|
SqlSyntax.setField(_fMinHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevHeartRate;
|
|
static TableField get sdevHeartRate {
|
|
return _fSdevHeartRate = _fSdevHeartRate ??
|
|
SqlSyntax.setField(_fSdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgGroundTime;
|
|
static TableField get avgGroundTime {
|
|
return _fAvgGroundTime = _fAvgGroundTime ??
|
|
SqlSyntax.setField(_fAvgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevGroundTime;
|
|
static TableField get sdevGroundTime {
|
|
return _fSdevGroundTime = _fSdevGroundTime ??
|
|
SqlSyntax.setField(_fSdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgLegSpringStiffness;
|
|
static TableField get avgLegSpringStiffness {
|
|
return _fAvgLegSpringStiffness = _fAvgLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fAvgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevLegSpringStiffness;
|
|
static TableField get sdevLegSpringStiffness {
|
|
return _fSdevLegSpringStiffness = _fSdevLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fSdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgFormPower;
|
|
static TableField get avgFormPower {
|
|
return _fAvgFormPower = _fAvgFormPower ??
|
|
SqlSyntax.setField(_fAvgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevFormPower;
|
|
static TableField get sdevFormPower {
|
|
return _fSdevFormPower = _fSdevFormPower ??
|
|
SqlSyntax.setField(_fSdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStrydCadence;
|
|
static TableField get avgStrydCadence {
|
|
return _fAvgStrydCadence = _fAvgStrydCadence ??
|
|
SqlSyntax.setField(_fAvgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevStrydCadence;
|
|
static TableField get sdevStrydCadence {
|
|
return _fSdevStrydCadence = _fSdevStrydCadence ??
|
|
SqlSyntax.setField(_fSdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevVerticalOscillation;
|
|
static TableField get sdevVerticalOscillation {
|
|
return _fSdevVerticalOscillation = _fSdevVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fSdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgPowerRatio;
|
|
static TableField get avgPowerRatio {
|
|
return _fAvgPowerRatio = _fAvgPowerRatio ??
|
|
SqlSyntax.setField(_fAvgPowerRatio, 'avgPowerRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPowerRatio;
|
|
static TableField get sdevPowerRatio {
|
|
return _fSdevPowerRatio = _fSdevPowerRatio ??
|
|
SqlSyntax.setField(_fSdevPowerRatio, 'sdevPowerRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStrideRatio;
|
|
static TableField get avgStrideRatio {
|
|
return _fAvgStrideRatio = _fAvgStrideRatio ??
|
|
SqlSyntax.setField(_fAvgStrideRatio, 'avgStrideRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevStrideRatio;
|
|
static TableField get sdevStrideRatio {
|
|
return _fSdevStrideRatio = _fSdevStrideRatio ??
|
|
SqlSyntax.setField(_fSdevStrideRatio, 'sdevStrideRatio', DbType.real);
|
|
}
|
|
|
|
static TableField? _fCp;
|
|
static TableField get cp {
|
|
return _fCp = _fCp ?? SqlSyntax.setField(_fCp, 'cp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFtp;
|
|
static TableField get ftp {
|
|
return _fFtp = _fFtp ?? SqlSyntax.setField(_fFtp, 'ftp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMovingTime;
|
|
static TableField get movingTime {
|
|
return _fMovingTime = _fMovingTime ??
|
|
SqlSyntax.setField(_fMovingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fActivitiesId;
|
|
static TableField get activitiesId {
|
|
return _fActivitiesId = _fActivitiesId ??
|
|
SqlSyntax.setField(_fActivitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbLapFields
|
|
|
|
//region DbLapManager
|
|
class DbLapManager extends SqfEntityProvider {
|
|
DbLapManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'laps';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbLapManager
|
|
// region DbInterval
|
|
class DbInterval extends TableBase {
|
|
DbInterval(
|
|
{this.id,
|
|
this.timeStamp,
|
|
this.duration,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.distance,
|
|
this.avgHeartRate,
|
|
this.minHeartRate,
|
|
this.maxHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgCadence,
|
|
this.minCadence,
|
|
this.maxCadence,
|
|
this.sdevCadence,
|
|
this.avgStrydCadence,
|
|
this.minStrydCadence,
|
|
this.maxStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.avgGroundTime,
|
|
this.minGroundTime,
|
|
this.maxGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgVerticalOscillation,
|
|
this.minVerticalOscillation,
|
|
this.maxVerticalOscillation,
|
|
this.sdevVerticalOscillation,
|
|
this.avgFormPower,
|
|
this.maxFormPower,
|
|
this.minFormPower,
|
|
this.sdevFormPower,
|
|
this.avgLegSpringStiffness,
|
|
this.maxLegSpringStiffness,
|
|
this.minLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.firstRecordId,
|
|
this.lastRecordId,
|
|
this.athletesId,
|
|
this.activitiesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbInterval.withFields(
|
|
this.timeStamp,
|
|
this.duration,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.distance,
|
|
this.avgHeartRate,
|
|
this.minHeartRate,
|
|
this.maxHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgCadence,
|
|
this.minCadence,
|
|
this.maxCadence,
|
|
this.sdevCadence,
|
|
this.avgStrydCadence,
|
|
this.minStrydCadence,
|
|
this.maxStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.avgGroundTime,
|
|
this.minGroundTime,
|
|
this.maxGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgVerticalOscillation,
|
|
this.minVerticalOscillation,
|
|
this.maxVerticalOscillation,
|
|
this.sdevVerticalOscillation,
|
|
this.avgFormPower,
|
|
this.maxFormPower,
|
|
this.minFormPower,
|
|
this.sdevFormPower,
|
|
this.avgLegSpringStiffness,
|
|
this.maxLegSpringStiffness,
|
|
this.minLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.firstRecordId,
|
|
this.lastRecordId,
|
|
this.athletesId,
|
|
this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbInterval.withId(
|
|
this.id,
|
|
this.timeStamp,
|
|
this.duration,
|
|
this.avgPower,
|
|
this.minPower,
|
|
this.maxPower,
|
|
this.sdevPower,
|
|
this.avgSpeed,
|
|
this.avgSpeedByMeasurements,
|
|
this.avgSpeedBySpeed,
|
|
this.avgSpeedByDistance,
|
|
this.minSpeed,
|
|
this.maxSpeed,
|
|
this.sdevSpeed,
|
|
this.sdevPace,
|
|
this.distance,
|
|
this.avgHeartRate,
|
|
this.minHeartRate,
|
|
this.maxHeartRate,
|
|
this.sdevHeartRate,
|
|
this.avgCadence,
|
|
this.minCadence,
|
|
this.maxCadence,
|
|
this.sdevCadence,
|
|
this.avgStrydCadence,
|
|
this.minStrydCadence,
|
|
this.maxStrydCadence,
|
|
this.sdevStrydCadence,
|
|
this.avgGroundTime,
|
|
this.minGroundTime,
|
|
this.maxGroundTime,
|
|
this.sdevGroundTime,
|
|
this.avgVerticalOscillation,
|
|
this.minVerticalOscillation,
|
|
this.maxVerticalOscillation,
|
|
this.sdevVerticalOscillation,
|
|
this.avgFormPower,
|
|
this.maxFormPower,
|
|
this.minFormPower,
|
|
this.sdevFormPower,
|
|
this.avgLegSpringStiffness,
|
|
this.maxLegSpringStiffness,
|
|
this.minLegSpringStiffness,
|
|
this.sdevLegSpringStiffness,
|
|
this.totalAscent,
|
|
this.totalDescent,
|
|
this.cp,
|
|
this.ftp,
|
|
this.movingTime,
|
|
this.firstRecordId,
|
|
this.lastRecordId,
|
|
this.athletesId,
|
|
this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbInterval.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['timeStamp'] != null) {
|
|
timeStamp = int.tryParse(o['timeStamp'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['timeStamp'].toString())!)
|
|
: DateTime.tryParse(o['timeStamp'].toString());
|
|
}
|
|
if (o['duration'] != null) {
|
|
duration = int.tryParse(o['duration'].toString());
|
|
}
|
|
if (o['avgPower'] != null) {
|
|
avgPower = double.tryParse(o['avgPower'].toString());
|
|
}
|
|
if (o['minPower'] != null) {
|
|
minPower = int.tryParse(o['minPower'].toString());
|
|
}
|
|
if (o['maxPower'] != null) {
|
|
maxPower = int.tryParse(o['maxPower'].toString());
|
|
}
|
|
if (o['sdevPower'] != null) {
|
|
sdevPower = double.tryParse(o['sdevPower'].toString());
|
|
}
|
|
if (o['avgSpeed'] != null) {
|
|
avgSpeed = double.tryParse(o['avgSpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByMeasurements'] != null) {
|
|
avgSpeedByMeasurements =
|
|
double.tryParse(o['avgSpeedByMeasurements'].toString());
|
|
}
|
|
if (o['avgSpeedBySpeed'] != null) {
|
|
avgSpeedBySpeed = double.tryParse(o['avgSpeedBySpeed'].toString());
|
|
}
|
|
if (o['avgSpeedByDistance'] != null) {
|
|
avgSpeedByDistance = double.tryParse(o['avgSpeedByDistance'].toString());
|
|
}
|
|
if (o['minSpeed'] != null) {
|
|
minSpeed = double.tryParse(o['minSpeed'].toString());
|
|
}
|
|
if (o['maxSpeed'] != null) {
|
|
maxSpeed = double.tryParse(o['maxSpeed'].toString());
|
|
}
|
|
if (o['sdevSpeed'] != null) {
|
|
sdevSpeed = double.tryParse(o['sdevSpeed'].toString());
|
|
}
|
|
if (o['sdevPace'] != null) {
|
|
sdevPace = double.tryParse(o['sdevPace'].toString());
|
|
}
|
|
if (o['distance'] != null) {
|
|
distance = int.tryParse(o['distance'].toString());
|
|
}
|
|
if (o['avgHeartRate'] != null) {
|
|
avgHeartRate = int.tryParse(o['avgHeartRate'].toString());
|
|
}
|
|
if (o['minHeartRate'] != null) {
|
|
minHeartRate = int.tryParse(o['minHeartRate'].toString());
|
|
}
|
|
if (o['maxHeartRate'] != null) {
|
|
maxHeartRate = int.tryParse(o['maxHeartRate'].toString());
|
|
}
|
|
if (o['sdevHeartRate'] != null) {
|
|
sdevHeartRate = double.tryParse(o['sdevHeartRate'].toString());
|
|
}
|
|
if (o['avgCadence'] != null) {
|
|
avgCadence = double.tryParse(o['avgCadence'].toString());
|
|
}
|
|
if (o['minCadence'] != null) {
|
|
minCadence = double.tryParse(o['minCadence'].toString());
|
|
}
|
|
if (o['maxCadence'] != null) {
|
|
maxCadence = double.tryParse(o['maxCadence'].toString());
|
|
}
|
|
if (o['sdevCadence'] != null) {
|
|
sdevCadence = double.tryParse(o['sdevCadence'].toString());
|
|
}
|
|
if (o['avgStrydCadence'] != null) {
|
|
avgStrydCadence = double.tryParse(o['avgStrydCadence'].toString());
|
|
}
|
|
if (o['minStrydCadence'] != null) {
|
|
minStrydCadence = double.tryParse(o['minStrydCadence'].toString());
|
|
}
|
|
if (o['maxStrydCadence'] != null) {
|
|
maxStrydCadence = double.tryParse(o['maxStrydCadence'].toString());
|
|
}
|
|
if (o['sdevStrydCadence'] != null) {
|
|
sdevStrydCadence = double.tryParse(o['sdevStrydCadence'].toString());
|
|
}
|
|
if (o['avgGroundTime'] != null) {
|
|
avgGroundTime = double.tryParse(o['avgGroundTime'].toString());
|
|
}
|
|
if (o['minGroundTime'] != null) {
|
|
minGroundTime = double.tryParse(o['minGroundTime'].toString());
|
|
}
|
|
if (o['maxGroundTime'] != null) {
|
|
maxGroundTime = double.tryParse(o['maxGroundTime'].toString());
|
|
}
|
|
if (o['sdevGroundTime'] != null) {
|
|
sdevGroundTime = double.tryParse(o['sdevGroundTime'].toString());
|
|
}
|
|
if (o['avgVerticalOscillation'] != null) {
|
|
avgVerticalOscillation =
|
|
double.tryParse(o['avgVerticalOscillation'].toString());
|
|
}
|
|
if (o['minVerticalOscillation'] != null) {
|
|
minVerticalOscillation =
|
|
double.tryParse(o['minVerticalOscillation'].toString());
|
|
}
|
|
if (o['maxVerticalOscillation'] != null) {
|
|
maxVerticalOscillation =
|
|
double.tryParse(o['maxVerticalOscillation'].toString());
|
|
}
|
|
if (o['sdevVerticalOscillation'] != null) {
|
|
sdevVerticalOscillation =
|
|
double.tryParse(o['sdevVerticalOscillation'].toString());
|
|
}
|
|
if (o['avgFormPower'] != null) {
|
|
avgFormPower = double.tryParse(o['avgFormPower'].toString());
|
|
}
|
|
if (o['maxFormPower'] != null) {
|
|
maxFormPower = int.tryParse(o['maxFormPower'].toString());
|
|
}
|
|
if (o['minFormPower'] != null) {
|
|
minFormPower = int.tryParse(o['minFormPower'].toString());
|
|
}
|
|
if (o['sdevFormPower'] != null) {
|
|
sdevFormPower = double.tryParse(o['sdevFormPower'].toString());
|
|
}
|
|
if (o['avgLegSpringStiffness'] != null) {
|
|
avgLegSpringStiffness =
|
|
double.tryParse(o['avgLegSpringStiffness'].toString());
|
|
}
|
|
if (o['maxLegSpringStiffness'] != null) {
|
|
maxLegSpringStiffness =
|
|
double.tryParse(o['maxLegSpringStiffness'].toString());
|
|
}
|
|
if (o['minLegSpringStiffness'] != null) {
|
|
minLegSpringStiffness =
|
|
double.tryParse(o['minLegSpringStiffness'].toString());
|
|
}
|
|
if (o['sdevLegSpringStiffness'] != null) {
|
|
sdevLegSpringStiffness =
|
|
double.tryParse(o['sdevLegSpringStiffness'].toString());
|
|
}
|
|
if (o['totalAscent'] != null) {
|
|
totalAscent = int.tryParse(o['totalAscent'].toString());
|
|
}
|
|
if (o['totalDescent'] != null) {
|
|
totalDescent = int.tryParse(o['totalDescent'].toString());
|
|
}
|
|
if (o['cp'] != null) {
|
|
cp = double.tryParse(o['cp'].toString());
|
|
}
|
|
if (o['ftp'] != null) {
|
|
ftp = double.tryParse(o['ftp'].toString());
|
|
}
|
|
if (o['movingTime'] != null) {
|
|
movingTime = int.tryParse(o['movingTime'].toString());
|
|
}
|
|
firstRecordId = int.tryParse(o['firstRecordId'].toString());
|
|
|
|
lastRecordId = int.tryParse(o['lastRecordId'].toString());
|
|
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
activitiesId = int.tryParse(o['activitiesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbEvent = o['dbEvent'] != null
|
|
? DbEvent.fromMap(o['dbEvent'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbEventByLastRecordId = o['dbEvent'] != null
|
|
? DbEvent.fromMap(o['dbEvent'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbActivity = o['dbActivity'] != null
|
|
? DbActivity.fromMap(o['dbActivity'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbInterval)
|
|
int? id;
|
|
DateTime? timeStamp;
|
|
int? duration;
|
|
double? avgPower;
|
|
int? minPower;
|
|
int? maxPower;
|
|
double? sdevPower;
|
|
double? avgSpeed;
|
|
double? avgSpeedByMeasurements;
|
|
double? avgSpeedBySpeed;
|
|
double? avgSpeedByDistance;
|
|
double? minSpeed;
|
|
double? maxSpeed;
|
|
double? sdevSpeed;
|
|
double? sdevPace;
|
|
int? distance;
|
|
int? avgHeartRate;
|
|
int? minHeartRate;
|
|
int? maxHeartRate;
|
|
double? sdevHeartRate;
|
|
double? avgCadence;
|
|
double? minCadence;
|
|
double? maxCadence;
|
|
double? sdevCadence;
|
|
double? avgStrydCadence;
|
|
double? minStrydCadence;
|
|
double? maxStrydCadence;
|
|
double? sdevStrydCadence;
|
|
double? avgGroundTime;
|
|
double? minGroundTime;
|
|
double? maxGroundTime;
|
|
double? sdevGroundTime;
|
|
double? avgVerticalOscillation;
|
|
double? minVerticalOscillation;
|
|
double? maxVerticalOscillation;
|
|
double? sdevVerticalOscillation;
|
|
double? avgFormPower;
|
|
int? maxFormPower;
|
|
int? minFormPower;
|
|
double? sdevFormPower;
|
|
double? avgLegSpringStiffness;
|
|
double? maxLegSpringStiffness;
|
|
double? minLegSpringStiffness;
|
|
double? sdevLegSpringStiffness;
|
|
int? totalAscent;
|
|
int? totalDescent;
|
|
double? cp;
|
|
double? ftp;
|
|
int? movingTime;
|
|
int? firstRecordId;
|
|
int? lastRecordId;
|
|
int? athletesId;
|
|
int? activitiesId;
|
|
|
|
// end FIELDS (DbInterval)
|
|
|
|
// RELATIONSHIPS (DbInterval)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbEvent', 'plField2'..]) or so on..
|
|
DbEvent? plDbEvent;
|
|
|
|
/// get DbEvent By FirstRecordId
|
|
Future<DbEvent?> getDbEvent(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbEvent().getById(firstRecordId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbEventByLastRecordId', 'plField2'..]) or so on..
|
|
DbEvent? plDbEventByLastRecordId;
|
|
|
|
/// get DbEvent By LastRecordId
|
|
Future<DbEvent?> getDbEventByLastRecordId(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbEvent().getById(lastRecordId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivity', 'plField2'..]) or so on..
|
|
DbActivity? plDbActivity;
|
|
|
|
/// get DbActivity By ActivitiesId
|
|
Future<DbActivity?> getDbActivity(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbActivity().getById(activitiesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbInterval)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbInterval)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervalTaggings', 'plField2'..]) or so on..
|
|
List<DbIntervalTagging>? plDbIntervalTaggings;
|
|
|
|
/// get DbIntervalTagging(s) filtered by id=intervalsId
|
|
DbIntervalTaggingFilterBuilder? getDbIntervalTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbIntervalTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.intervalsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbInterval)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbIntervalManager? __mnDbInterval;
|
|
|
|
DbIntervalManager get _mnDbInterval {
|
|
return __mnDbInterval = __mnDbInterval ?? DbIntervalManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (duration != null || !forView) {
|
|
map['duration'] = duration;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgCadence != null || !forView) {
|
|
map['avgCadence'] = avgCadence;
|
|
}
|
|
if (minCadence != null || !forView) {
|
|
map['minCadence'] = minCadence;
|
|
}
|
|
if (maxCadence != null || !forView) {
|
|
map['maxCadence'] = maxCadence;
|
|
}
|
|
if (sdevCadence != null || !forView) {
|
|
map['sdevCadence'] = sdevCadence;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (minStrydCadence != null || !forView) {
|
|
map['minStrydCadence'] = minStrydCadence;
|
|
}
|
|
if (maxStrydCadence != null || !forView) {
|
|
map['maxStrydCadence'] = maxStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (minGroundTime != null || !forView) {
|
|
map['minGroundTime'] = minGroundTime;
|
|
}
|
|
if (maxGroundTime != null || !forView) {
|
|
map['maxGroundTime'] = maxGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (minVerticalOscillation != null || !forView) {
|
|
map['minVerticalOscillation'] = minVerticalOscillation;
|
|
}
|
|
if (maxVerticalOscillation != null || !forView) {
|
|
map['maxVerticalOscillation'] = maxVerticalOscillation;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (maxFormPower != null || !forView) {
|
|
map['maxFormPower'] = maxFormPower;
|
|
}
|
|
if (minFormPower != null || !forView) {
|
|
map['minFormPower'] = minFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (maxLegSpringStiffness != null || !forView) {
|
|
map['maxLegSpringStiffness'] = maxLegSpringStiffness;
|
|
}
|
|
if (minLegSpringStiffness != null || !forView) {
|
|
map['minLegSpringStiffness'] = minLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (firstRecordId != null) {
|
|
map['firstRecordId'] = forView
|
|
? plDbEvent == null
|
|
? firstRecordId
|
|
: plDbEvent!.event
|
|
: firstRecordId;
|
|
} else if (firstRecordId != null || !forView) {
|
|
map['firstRecordId'] = null;
|
|
}
|
|
if (lastRecordId != null) {
|
|
map['lastRecordId'] = forView
|
|
? plDbEvent == null
|
|
? lastRecordId
|
|
: plDbEvent!.event
|
|
: lastRecordId;
|
|
} else if (lastRecordId != null || !forView) {
|
|
map['lastRecordId'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (timeStamp != null) {
|
|
map['timeStamp'] = forJson
|
|
? timeStamp!.toString()
|
|
: forQuery
|
|
? timeStamp!.millisecondsSinceEpoch
|
|
: timeStamp;
|
|
} else if (timeStamp != null || !forView) {
|
|
map['timeStamp'] = null;
|
|
}
|
|
if (duration != null || !forView) {
|
|
map['duration'] = duration;
|
|
}
|
|
if (avgPower != null || !forView) {
|
|
map['avgPower'] = avgPower;
|
|
}
|
|
if (minPower != null || !forView) {
|
|
map['minPower'] = minPower;
|
|
}
|
|
if (maxPower != null || !forView) {
|
|
map['maxPower'] = maxPower;
|
|
}
|
|
if (sdevPower != null || !forView) {
|
|
map['sdevPower'] = sdevPower;
|
|
}
|
|
if (avgSpeed != null || !forView) {
|
|
map['avgSpeed'] = avgSpeed;
|
|
}
|
|
if (avgSpeedByMeasurements != null || !forView) {
|
|
map['avgSpeedByMeasurements'] = avgSpeedByMeasurements;
|
|
}
|
|
if (avgSpeedBySpeed != null || !forView) {
|
|
map['avgSpeedBySpeed'] = avgSpeedBySpeed;
|
|
}
|
|
if (avgSpeedByDistance != null || !forView) {
|
|
map['avgSpeedByDistance'] = avgSpeedByDistance;
|
|
}
|
|
if (minSpeed != null || !forView) {
|
|
map['minSpeed'] = minSpeed;
|
|
}
|
|
if (maxSpeed != null || !forView) {
|
|
map['maxSpeed'] = maxSpeed;
|
|
}
|
|
if (sdevSpeed != null || !forView) {
|
|
map['sdevSpeed'] = sdevSpeed;
|
|
}
|
|
if (sdevPace != null || !forView) {
|
|
map['sdevPace'] = sdevPace;
|
|
}
|
|
if (distance != null || !forView) {
|
|
map['distance'] = distance;
|
|
}
|
|
if (avgHeartRate != null || !forView) {
|
|
map['avgHeartRate'] = avgHeartRate;
|
|
}
|
|
if (minHeartRate != null || !forView) {
|
|
map['minHeartRate'] = minHeartRate;
|
|
}
|
|
if (maxHeartRate != null || !forView) {
|
|
map['maxHeartRate'] = maxHeartRate;
|
|
}
|
|
if (sdevHeartRate != null || !forView) {
|
|
map['sdevHeartRate'] = sdevHeartRate;
|
|
}
|
|
if (avgCadence != null || !forView) {
|
|
map['avgCadence'] = avgCadence;
|
|
}
|
|
if (minCadence != null || !forView) {
|
|
map['minCadence'] = minCadence;
|
|
}
|
|
if (maxCadence != null || !forView) {
|
|
map['maxCadence'] = maxCadence;
|
|
}
|
|
if (sdevCadence != null || !forView) {
|
|
map['sdevCadence'] = sdevCadence;
|
|
}
|
|
if (avgStrydCadence != null || !forView) {
|
|
map['avgStrydCadence'] = avgStrydCadence;
|
|
}
|
|
if (minStrydCadence != null || !forView) {
|
|
map['minStrydCadence'] = minStrydCadence;
|
|
}
|
|
if (maxStrydCadence != null || !forView) {
|
|
map['maxStrydCadence'] = maxStrydCadence;
|
|
}
|
|
if (sdevStrydCadence != null || !forView) {
|
|
map['sdevStrydCadence'] = sdevStrydCadence;
|
|
}
|
|
if (avgGroundTime != null || !forView) {
|
|
map['avgGroundTime'] = avgGroundTime;
|
|
}
|
|
if (minGroundTime != null || !forView) {
|
|
map['minGroundTime'] = minGroundTime;
|
|
}
|
|
if (maxGroundTime != null || !forView) {
|
|
map['maxGroundTime'] = maxGroundTime;
|
|
}
|
|
if (sdevGroundTime != null || !forView) {
|
|
map['sdevGroundTime'] = sdevGroundTime;
|
|
}
|
|
if (avgVerticalOscillation != null || !forView) {
|
|
map['avgVerticalOscillation'] = avgVerticalOscillation;
|
|
}
|
|
if (minVerticalOscillation != null || !forView) {
|
|
map['minVerticalOscillation'] = minVerticalOscillation;
|
|
}
|
|
if (maxVerticalOscillation != null || !forView) {
|
|
map['maxVerticalOscillation'] = maxVerticalOscillation;
|
|
}
|
|
if (sdevVerticalOscillation != null || !forView) {
|
|
map['sdevVerticalOscillation'] = sdevVerticalOscillation;
|
|
}
|
|
if (avgFormPower != null || !forView) {
|
|
map['avgFormPower'] = avgFormPower;
|
|
}
|
|
if (maxFormPower != null || !forView) {
|
|
map['maxFormPower'] = maxFormPower;
|
|
}
|
|
if (minFormPower != null || !forView) {
|
|
map['minFormPower'] = minFormPower;
|
|
}
|
|
if (sdevFormPower != null || !forView) {
|
|
map['sdevFormPower'] = sdevFormPower;
|
|
}
|
|
if (avgLegSpringStiffness != null || !forView) {
|
|
map['avgLegSpringStiffness'] = avgLegSpringStiffness;
|
|
}
|
|
if (maxLegSpringStiffness != null || !forView) {
|
|
map['maxLegSpringStiffness'] = maxLegSpringStiffness;
|
|
}
|
|
if (minLegSpringStiffness != null || !forView) {
|
|
map['minLegSpringStiffness'] = minLegSpringStiffness;
|
|
}
|
|
if (sdevLegSpringStiffness != null || !forView) {
|
|
map['sdevLegSpringStiffness'] = sdevLegSpringStiffness;
|
|
}
|
|
if (totalAscent != null || !forView) {
|
|
map['totalAscent'] = totalAscent;
|
|
}
|
|
if (totalDescent != null || !forView) {
|
|
map['totalDescent'] = totalDescent;
|
|
}
|
|
if (cp != null || !forView) {
|
|
map['cp'] = cp;
|
|
}
|
|
if (ftp != null || !forView) {
|
|
map['ftp'] = ftp;
|
|
}
|
|
if (movingTime != null || !forView) {
|
|
map['movingTime'] = movingTime;
|
|
}
|
|
if (firstRecordId != null) {
|
|
map['firstRecordId'] = forView
|
|
? plDbEvent == null
|
|
? firstRecordId
|
|
: plDbEvent!.event
|
|
: firstRecordId;
|
|
} else if (firstRecordId != null || !forView) {
|
|
map['firstRecordId'] = null;
|
|
}
|
|
if (lastRecordId != null) {
|
|
map['lastRecordId'] = forView
|
|
? plDbEvent == null
|
|
? lastRecordId
|
|
: plDbEvent!.event
|
|
: lastRecordId;
|
|
} else if (lastRecordId != null || !forView) {
|
|
map['lastRecordId'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbInterval)
|
|
if (!forQuery) {
|
|
map['DbIntervalTaggings'] = await getDbIntervalTaggings()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbInterval)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbInterval]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbInterval]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
duration,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
minSpeed,
|
|
maxSpeed,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
distance,
|
|
avgHeartRate,
|
|
minHeartRate,
|
|
maxHeartRate,
|
|
sdevHeartRate,
|
|
avgCadence,
|
|
minCadence,
|
|
maxCadence,
|
|
sdevCadence,
|
|
avgStrydCadence,
|
|
minStrydCadence,
|
|
maxStrydCadence,
|
|
sdevStrydCadence,
|
|
avgGroundTime,
|
|
minGroundTime,
|
|
maxGroundTime,
|
|
sdevGroundTime,
|
|
avgVerticalOscillation,
|
|
minVerticalOscillation,
|
|
maxVerticalOscillation,
|
|
sdevVerticalOscillation,
|
|
avgFormPower,
|
|
maxFormPower,
|
|
minFormPower,
|
|
sdevFormPower,
|
|
avgLegSpringStiffness,
|
|
maxLegSpringStiffness,
|
|
minLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
totalAscent,
|
|
totalDescent,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
firstRecordId,
|
|
lastRecordId,
|
|
athletesId,
|
|
activitiesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
duration,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
minSpeed,
|
|
maxSpeed,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
distance,
|
|
avgHeartRate,
|
|
minHeartRate,
|
|
maxHeartRate,
|
|
sdevHeartRate,
|
|
avgCadence,
|
|
minCadence,
|
|
maxCadence,
|
|
sdevCadence,
|
|
avgStrydCadence,
|
|
minStrydCadence,
|
|
maxStrydCadence,
|
|
sdevStrydCadence,
|
|
avgGroundTime,
|
|
minGroundTime,
|
|
maxGroundTime,
|
|
sdevGroundTime,
|
|
avgVerticalOscillation,
|
|
minVerticalOscillation,
|
|
maxVerticalOscillation,
|
|
sdevVerticalOscillation,
|
|
avgFormPower,
|
|
maxFormPower,
|
|
minFormPower,
|
|
sdevFormPower,
|
|
avgLegSpringStiffness,
|
|
maxLegSpringStiffness,
|
|
minLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
totalAscent,
|
|
totalDescent,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
firstRecordId,
|
|
lastRecordId,
|
|
athletesId,
|
|
activitiesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbInterval>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbInterval.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbInterval>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbInterval>[];
|
|
try {
|
|
objList = list
|
|
.map((dbinterval) =>
|
|
DbInterval.fromMap(dbinterval as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbInterval.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbInterval>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbInterval> objList = <DbInterval>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbInterval.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('intervals.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('intervals.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEvent'))) {
|
|
obj.plDbEvent =
|
|
obj.plDbEvent ?? await obj.getDbEvent(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEventByLastRecordId'))) {
|
|
obj.plDbEventByLastRecordId = obj.plDbEventByLastRecordId ??
|
|
await obj.getDbEventByLastRecordId(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbInterval by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbInterval] if exist, otherwise returns null
|
|
Future<DbInterval?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbInterval? obj;
|
|
final data = await _mnDbInterval.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbInterval.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('intervals.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('intervals.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEvent'))) {
|
|
obj.plDbEvent =
|
|
obj.plDbEvent ?? await obj.getDbEvent(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEventByLastRecordId'))) {
|
|
obj.plDbEventByLastRecordId = obj.plDbEventByLastRecordId ??
|
|
await obj.getDbEventByLastRecordId(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbInterval) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbInterval.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbInterval.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbInterval) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbInterval.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbInterval.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbInterval. Returns a new Primary Key value of DbInterval
|
|
|
|
/// <returns>Returns a new Primary Key value of DbInterval
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbInterval> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbInterval> dbintervals,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbintervals) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbintervals.length; i++) {
|
|
if (dbintervals[i].id == null) {
|
|
dbintervals[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbInterval.rawInsert(
|
|
'INSERT OR REPLACE INTO intervals (id, timeStamp, duration, avgPower, minPower, maxPower, sdevPower, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, minSpeed, maxSpeed, sdevSpeed, sdevPace, distance, avgHeartRate, minHeartRate, maxHeartRate, sdevHeartRate, avgCadence, minCadence, maxCadence, sdevCadence, avgStrydCadence, minStrydCadence, maxStrydCadence, sdevStrydCadence, avgGroundTime, minGroundTime, maxGroundTime, sdevGroundTime, avgVerticalOscillation, minVerticalOscillation, maxVerticalOscillation, sdevVerticalOscillation, avgFormPower, maxFormPower, minFormPower, sdevFormPower, avgLegSpringStiffness, maxLegSpringStiffness, minLegSpringStiffness, sdevLegSpringStiffness, totalAscent, totalDescent, cp, ftp, movingTime, firstRecordId, lastRecordId, athletesId, activitiesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
timeStamp != null ? timeStamp!.millisecondsSinceEpoch : null,
|
|
duration,
|
|
avgPower,
|
|
minPower,
|
|
maxPower,
|
|
sdevPower,
|
|
avgSpeed,
|
|
avgSpeedByMeasurements,
|
|
avgSpeedBySpeed,
|
|
avgSpeedByDistance,
|
|
minSpeed,
|
|
maxSpeed,
|
|
sdevSpeed,
|
|
sdevPace,
|
|
distance,
|
|
avgHeartRate,
|
|
minHeartRate,
|
|
maxHeartRate,
|
|
sdevHeartRate,
|
|
avgCadence,
|
|
minCadence,
|
|
maxCadence,
|
|
sdevCadence,
|
|
avgStrydCadence,
|
|
minStrydCadence,
|
|
maxStrydCadence,
|
|
sdevStrydCadence,
|
|
avgGroundTime,
|
|
minGroundTime,
|
|
maxGroundTime,
|
|
sdevGroundTime,
|
|
avgVerticalOscillation,
|
|
minVerticalOscillation,
|
|
maxVerticalOscillation,
|
|
sdevVerticalOscillation,
|
|
avgFormPower,
|
|
maxFormPower,
|
|
minFormPower,
|
|
sdevFormPower,
|
|
avgLegSpringStiffness,
|
|
maxLegSpringStiffness,
|
|
minLegSpringStiffness,
|
|
sdevLegSpringStiffness,
|
|
totalAscent,
|
|
totalDescent,
|
|
cp,
|
|
ftp,
|
|
movingTime,
|
|
firstRecordId,
|
|
lastRecordId,
|
|
athletesId,
|
|
activitiesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbInterval id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbInterval id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbInterval Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbInterval>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbInterval> dbintervals,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbInterval.rawInsertAll(
|
|
'INSERT OR REPLACE INTO intervals (id, timeStamp, duration, avgPower, minPower, maxPower, sdevPower, avgSpeed, avgSpeedByMeasurements, avgSpeedBySpeed, avgSpeedByDistance, minSpeed, maxSpeed, sdevSpeed, sdevPace, distance, avgHeartRate, minHeartRate, maxHeartRate, sdevHeartRate, avgCadence, minCadence, maxCadence, sdevCadence, avgStrydCadence, minStrydCadence, maxStrydCadence, sdevStrydCadence, avgGroundTime, minGroundTime, maxGroundTime, sdevGroundTime, avgVerticalOscillation, minVerticalOscillation, maxVerticalOscillation, sdevVerticalOscillation, avgFormPower, maxFormPower, minFormPower, sdevFormPower, avgLegSpringStiffness, maxLegSpringStiffness, minLegSpringStiffness, sdevLegSpringStiffness, totalAscent, totalDescent, cp, ftp, movingTime, firstRecordId, lastRecordId, athletesId, activitiesId) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
|
|
dbintervals,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbInterval
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbInterval invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbIntervalTagging()
|
|
.select()
|
|
.intervalsId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbInterval
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbInterval.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbInterval] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbIntervalFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbIntervalFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
firstRecordId = firstRecordId ?? 0;
|
|
lastRecordId = lastRecordId ?? 0;
|
|
athletesId = athletesId ?? 0;
|
|
activitiesId = activitiesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbinterval
|
|
|
|
// region DbIntervalField
|
|
class DbIntervalField extends FilterBase {
|
|
DbIntervalField(DbIntervalFilterBuilder dbintervalFB) : super(dbintervalFB);
|
|
|
|
@override
|
|
DbIntervalFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder isNull() {
|
|
return super.isNull() as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbIntervalFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalField get not {
|
|
return super.not as DbIntervalField;
|
|
}
|
|
}
|
|
// endregion DbIntervalField
|
|
|
|
// region DbIntervalFilterBuilder
|
|
class DbIntervalFilterBuilder extends ConjunctionBase {
|
|
DbIntervalFilterBuilder(DbInterval obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbInterval = obj._mnDbInterval;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbIntervalManager? _mnDbInterval;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbIntervalFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbIntervalFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbIntervalFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbIntervalFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbIntervalFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbIntervalFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbIntervalFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbIntervalFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbIntervalFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbIntervalFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbIntervalFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbIntervalField _setField(
|
|
DbIntervalField? field, String colName, DbType dbtype) {
|
|
return DbIntervalField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbIntervalField? _id;
|
|
DbIntervalField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _timeStamp;
|
|
DbIntervalField get timeStamp {
|
|
return _timeStamp = _setField(_timeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
DbIntervalField? _duration;
|
|
DbIntervalField get duration {
|
|
return _duration = _setField(_duration, 'duration', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _avgPower;
|
|
DbIntervalField get avgPower {
|
|
return _avgPower = _setField(_avgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minPower;
|
|
DbIntervalField get minPower {
|
|
return _minPower = _setField(_minPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _maxPower;
|
|
DbIntervalField get maxPower {
|
|
return _maxPower = _setField(_maxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _sdevPower;
|
|
DbIntervalField get sdevPower {
|
|
return _sdevPower = _setField(_sdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgSpeed;
|
|
DbIntervalField get avgSpeed {
|
|
return _avgSpeed = _setField(_avgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgSpeedByMeasurements;
|
|
DbIntervalField get avgSpeedByMeasurements {
|
|
return _avgSpeedByMeasurements = _setField(
|
|
_avgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgSpeedBySpeed;
|
|
DbIntervalField get avgSpeedBySpeed {
|
|
return _avgSpeedBySpeed =
|
|
_setField(_avgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgSpeedByDistance;
|
|
DbIntervalField get avgSpeedByDistance {
|
|
return _avgSpeedByDistance =
|
|
_setField(_avgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minSpeed;
|
|
DbIntervalField get minSpeed {
|
|
return _minSpeed = _setField(_minSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxSpeed;
|
|
DbIntervalField get maxSpeed {
|
|
return _maxSpeed = _setField(_maxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevSpeed;
|
|
DbIntervalField get sdevSpeed {
|
|
return _sdevSpeed = _setField(_sdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevPace;
|
|
DbIntervalField get sdevPace {
|
|
return _sdevPace = _setField(_sdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _distance;
|
|
DbIntervalField get distance {
|
|
return _distance = _setField(_distance, 'distance', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _avgHeartRate;
|
|
DbIntervalField get avgHeartRate {
|
|
return _avgHeartRate =
|
|
_setField(_avgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _minHeartRate;
|
|
DbIntervalField get minHeartRate {
|
|
return _minHeartRate =
|
|
_setField(_minHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _maxHeartRate;
|
|
DbIntervalField get maxHeartRate {
|
|
return _maxHeartRate =
|
|
_setField(_maxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _sdevHeartRate;
|
|
DbIntervalField get sdevHeartRate {
|
|
return _sdevHeartRate =
|
|
_setField(_sdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgCadence;
|
|
DbIntervalField get avgCadence {
|
|
return _avgCadence = _setField(_avgCadence, 'avgCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minCadence;
|
|
DbIntervalField get minCadence {
|
|
return _minCadence = _setField(_minCadence, 'minCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxCadence;
|
|
DbIntervalField get maxCadence {
|
|
return _maxCadence = _setField(_maxCadence, 'maxCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevCadence;
|
|
DbIntervalField get sdevCadence {
|
|
return _sdevCadence = _setField(_sdevCadence, 'sdevCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgStrydCadence;
|
|
DbIntervalField get avgStrydCadence {
|
|
return _avgStrydCadence =
|
|
_setField(_avgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minStrydCadence;
|
|
DbIntervalField get minStrydCadence {
|
|
return _minStrydCadence =
|
|
_setField(_minStrydCadence, 'minStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxStrydCadence;
|
|
DbIntervalField get maxStrydCadence {
|
|
return _maxStrydCadence =
|
|
_setField(_maxStrydCadence, 'maxStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevStrydCadence;
|
|
DbIntervalField get sdevStrydCadence {
|
|
return _sdevStrydCadence =
|
|
_setField(_sdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgGroundTime;
|
|
DbIntervalField get avgGroundTime {
|
|
return _avgGroundTime =
|
|
_setField(_avgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minGroundTime;
|
|
DbIntervalField get minGroundTime {
|
|
return _minGroundTime =
|
|
_setField(_minGroundTime, 'minGroundTime', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxGroundTime;
|
|
DbIntervalField get maxGroundTime {
|
|
return _maxGroundTime =
|
|
_setField(_maxGroundTime, 'maxGroundTime', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevGroundTime;
|
|
DbIntervalField get sdevGroundTime {
|
|
return _sdevGroundTime =
|
|
_setField(_sdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgVerticalOscillation;
|
|
DbIntervalField get avgVerticalOscillation {
|
|
return _avgVerticalOscillation = _setField(
|
|
_avgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minVerticalOscillation;
|
|
DbIntervalField get minVerticalOscillation {
|
|
return _minVerticalOscillation = _setField(
|
|
_minVerticalOscillation, 'minVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxVerticalOscillation;
|
|
DbIntervalField get maxVerticalOscillation {
|
|
return _maxVerticalOscillation = _setField(
|
|
_maxVerticalOscillation, 'maxVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevVerticalOscillation;
|
|
DbIntervalField get sdevVerticalOscillation {
|
|
return _sdevVerticalOscillation = _setField(
|
|
_sdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgFormPower;
|
|
DbIntervalField get avgFormPower {
|
|
return _avgFormPower =
|
|
_setField(_avgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxFormPower;
|
|
DbIntervalField get maxFormPower {
|
|
return _maxFormPower =
|
|
_setField(_maxFormPower, 'maxFormPower', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _minFormPower;
|
|
DbIntervalField get minFormPower {
|
|
return _minFormPower =
|
|
_setField(_minFormPower, 'minFormPower', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _sdevFormPower;
|
|
DbIntervalField get sdevFormPower {
|
|
return _sdevFormPower =
|
|
_setField(_sdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _avgLegSpringStiffness;
|
|
DbIntervalField get avgLegSpringStiffness {
|
|
return _avgLegSpringStiffness =
|
|
_setField(_avgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _maxLegSpringStiffness;
|
|
DbIntervalField get maxLegSpringStiffness {
|
|
return _maxLegSpringStiffness =
|
|
_setField(_maxLegSpringStiffness, 'maxLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _minLegSpringStiffness;
|
|
DbIntervalField get minLegSpringStiffness {
|
|
return _minLegSpringStiffness =
|
|
_setField(_minLegSpringStiffness, 'minLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _sdevLegSpringStiffness;
|
|
DbIntervalField get sdevLegSpringStiffness {
|
|
return _sdevLegSpringStiffness = _setField(
|
|
_sdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _totalAscent;
|
|
DbIntervalField get totalAscent {
|
|
return _totalAscent =
|
|
_setField(_totalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _totalDescent;
|
|
DbIntervalField get totalDescent {
|
|
return _totalDescent =
|
|
_setField(_totalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _cp;
|
|
DbIntervalField get cp {
|
|
return _cp = _setField(_cp, 'cp', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _ftp;
|
|
DbIntervalField get ftp {
|
|
return _ftp = _setField(_ftp, 'ftp', DbType.real);
|
|
}
|
|
|
|
DbIntervalField? _movingTime;
|
|
DbIntervalField get movingTime {
|
|
return _movingTime = _setField(_movingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _firstRecordId;
|
|
DbIntervalField get firstRecordId {
|
|
return _firstRecordId =
|
|
_setField(_firstRecordId, 'firstRecordId', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _lastRecordId;
|
|
DbIntervalField get lastRecordId {
|
|
return _lastRecordId =
|
|
_setField(_lastRecordId, 'lastRecordId', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _athletesId;
|
|
DbIntervalField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
DbIntervalField? _activitiesId;
|
|
DbIntervalField get activitiesId {
|
|
return _activitiesId =
|
|
_setField(_activitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbInterval> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbIntervalTagging) according to DeleteRule.CASCADE
|
|
final idListDbIntervalTaggingBYintervalsId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalTaggingBYintervalsId = await DbIntervalTagging()
|
|
.select()
|
|
.where(
|
|
'intervalsId IN (${idListDbIntervalTaggingBYintervalsId['sql']})',
|
|
parameterValue: idListDbIntervalTaggingBYintervalsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalTaggingBYintervalsId.success) {
|
|
return resDbIntervalTaggingBYintervalsId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbInterval!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbInterval!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from intervals ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbInterval!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbInterval] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbInterval?
|
|
@override
|
|
Future<DbInterval?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbInterval!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbInterval? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbInterval.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('intervals.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('intervals.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEvent'))) {
|
|
obj.plDbEvent =
|
|
obj.plDbEvent ?? await obj.getDbEvent(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbEventByLastRecordId'))) {
|
|
obj.plDbEventByLastRecordId = obj.plDbEventByLastRecordId ??
|
|
await obj.getDbEventByLastRecordId(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbInterval]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbInterval?
|
|
@override
|
|
Future<DbInterval> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbInterval();
|
|
}
|
|
|
|
/// This method returns int. [DbInterval]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbintervalCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbintervalsFuture = await _mnDbInterval!.toList(qparams);
|
|
final int count = dbintervalsFuture[0]['CNT'] as int;
|
|
if (dbintervalCount != null) {
|
|
dbintervalCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbInterval> [DbInterval]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbInterval>
|
|
@override
|
|
Future<List<DbInterval>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbInterval> dbintervalsData = await DbInterval.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbintervalsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbInterval]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbInterval]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbInterval]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbInterval!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbInterval]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM intervals WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbInterval!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbInterval]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbInterval!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbInterval.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbInterval!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbIntervalFilterBuilder
|
|
|
|
// region DbIntervalFields
|
|
class DbIntervalFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTimeStamp;
|
|
static TableField get timeStamp {
|
|
return _fTimeStamp = _fTimeStamp ??
|
|
SqlSyntax.setField(_fTimeStamp, 'timeStamp', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fDuration;
|
|
static TableField get duration {
|
|
return _fDuration = _fDuration ??
|
|
SqlSyntax.setField(_fDuration, 'duration', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgPower;
|
|
static TableField get avgPower {
|
|
return _fAvgPower =
|
|
_fAvgPower ?? SqlSyntax.setField(_fAvgPower, 'avgPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinPower;
|
|
static TableField get minPower {
|
|
return _fMinPower = _fMinPower ??
|
|
SqlSyntax.setField(_fMinPower, 'minPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxPower;
|
|
static TableField get maxPower {
|
|
return _fMaxPower = _fMaxPower ??
|
|
SqlSyntax.setField(_fMaxPower, 'maxPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevPower;
|
|
static TableField get sdevPower {
|
|
return _fSdevPower = _fSdevPower ??
|
|
SqlSyntax.setField(_fSdevPower, 'sdevPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeed;
|
|
static TableField get avgSpeed {
|
|
return _fAvgSpeed =
|
|
_fAvgSpeed ?? SqlSyntax.setField(_fAvgSpeed, 'avgSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByMeasurements;
|
|
static TableField get avgSpeedByMeasurements {
|
|
return _fAvgSpeedByMeasurements = _fAvgSpeedByMeasurements ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByMeasurements, 'avgSpeedByMeasurements', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedBySpeed;
|
|
static TableField get avgSpeedBySpeed {
|
|
return _fAvgSpeedBySpeed = _fAvgSpeedBySpeed ??
|
|
SqlSyntax.setField(_fAvgSpeedBySpeed, 'avgSpeedBySpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgSpeedByDistance;
|
|
static TableField get avgSpeedByDistance {
|
|
return _fAvgSpeedByDistance = _fAvgSpeedByDistance ??
|
|
SqlSyntax.setField(
|
|
_fAvgSpeedByDistance, 'avgSpeedByDistance', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinSpeed;
|
|
static TableField get minSpeed {
|
|
return _fMinSpeed =
|
|
_fMinSpeed ?? SqlSyntax.setField(_fMinSpeed, 'minSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxSpeed;
|
|
static TableField get maxSpeed {
|
|
return _fMaxSpeed =
|
|
_fMaxSpeed ?? SqlSyntax.setField(_fMaxSpeed, 'maxSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevSpeed;
|
|
static TableField get sdevSpeed {
|
|
return _fSdevSpeed = _fSdevSpeed ??
|
|
SqlSyntax.setField(_fSdevSpeed, 'sdevSpeed', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevPace;
|
|
static TableField get sdevPace {
|
|
return _fSdevPace =
|
|
_fSdevPace ?? SqlSyntax.setField(_fSdevPace, 'sdevPace', DbType.real);
|
|
}
|
|
|
|
static TableField? _fDistance;
|
|
static TableField get distance {
|
|
return _fDistance = _fDistance ??
|
|
SqlSyntax.setField(_fDistance, 'distance', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAvgHeartRate;
|
|
static TableField get avgHeartRate {
|
|
return _fAvgHeartRate = _fAvgHeartRate ??
|
|
SqlSyntax.setField(_fAvgHeartRate, 'avgHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMinHeartRate;
|
|
static TableField get minHeartRate {
|
|
return _fMinHeartRate = _fMinHeartRate ??
|
|
SqlSyntax.setField(_fMinHeartRate, 'minHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMaxHeartRate;
|
|
static TableField get maxHeartRate {
|
|
return _fMaxHeartRate = _fMaxHeartRate ??
|
|
SqlSyntax.setField(_fMaxHeartRate, 'maxHeartRate', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevHeartRate;
|
|
static TableField get sdevHeartRate {
|
|
return _fSdevHeartRate = _fSdevHeartRate ??
|
|
SqlSyntax.setField(_fSdevHeartRate, 'sdevHeartRate', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgCadence;
|
|
static TableField get avgCadence {
|
|
return _fAvgCadence = _fAvgCadence ??
|
|
SqlSyntax.setField(_fAvgCadence, 'avgCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinCadence;
|
|
static TableField get minCadence {
|
|
return _fMinCadence = _fMinCadence ??
|
|
SqlSyntax.setField(_fMinCadence, 'minCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxCadence;
|
|
static TableField get maxCadence {
|
|
return _fMaxCadence = _fMaxCadence ??
|
|
SqlSyntax.setField(_fMaxCadence, 'maxCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevCadence;
|
|
static TableField get sdevCadence {
|
|
return _fSdevCadence = _fSdevCadence ??
|
|
SqlSyntax.setField(_fSdevCadence, 'sdevCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgStrydCadence;
|
|
static TableField get avgStrydCadence {
|
|
return _fAvgStrydCadence = _fAvgStrydCadence ??
|
|
SqlSyntax.setField(_fAvgStrydCadence, 'avgStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinStrydCadence;
|
|
static TableField get minStrydCadence {
|
|
return _fMinStrydCadence = _fMinStrydCadence ??
|
|
SqlSyntax.setField(_fMinStrydCadence, 'minStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxStrydCadence;
|
|
static TableField get maxStrydCadence {
|
|
return _fMaxStrydCadence = _fMaxStrydCadence ??
|
|
SqlSyntax.setField(_fMaxStrydCadence, 'maxStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevStrydCadence;
|
|
static TableField get sdevStrydCadence {
|
|
return _fSdevStrydCadence = _fSdevStrydCadence ??
|
|
SqlSyntax.setField(_fSdevStrydCadence, 'sdevStrydCadence', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgGroundTime;
|
|
static TableField get avgGroundTime {
|
|
return _fAvgGroundTime = _fAvgGroundTime ??
|
|
SqlSyntax.setField(_fAvgGroundTime, 'avgGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinGroundTime;
|
|
static TableField get minGroundTime {
|
|
return _fMinGroundTime = _fMinGroundTime ??
|
|
SqlSyntax.setField(_fMinGroundTime, 'minGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxGroundTime;
|
|
static TableField get maxGroundTime {
|
|
return _fMaxGroundTime = _fMaxGroundTime ??
|
|
SqlSyntax.setField(_fMaxGroundTime, 'maxGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevGroundTime;
|
|
static TableField get sdevGroundTime {
|
|
return _fSdevGroundTime = _fSdevGroundTime ??
|
|
SqlSyntax.setField(_fSdevGroundTime, 'sdevGroundTime', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgVerticalOscillation;
|
|
static TableField get avgVerticalOscillation {
|
|
return _fAvgVerticalOscillation = _fAvgVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fAvgVerticalOscillation, 'avgVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinVerticalOscillation;
|
|
static TableField get minVerticalOscillation {
|
|
return _fMinVerticalOscillation = _fMinVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fMinVerticalOscillation, 'minVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxVerticalOscillation;
|
|
static TableField get maxVerticalOscillation {
|
|
return _fMaxVerticalOscillation = _fMaxVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fMaxVerticalOscillation, 'maxVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevVerticalOscillation;
|
|
static TableField get sdevVerticalOscillation {
|
|
return _fSdevVerticalOscillation = _fSdevVerticalOscillation ??
|
|
SqlSyntax.setField(
|
|
_fSdevVerticalOscillation, 'sdevVerticalOscillation', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgFormPower;
|
|
static TableField get avgFormPower {
|
|
return _fAvgFormPower = _fAvgFormPower ??
|
|
SqlSyntax.setField(_fAvgFormPower, 'avgFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxFormPower;
|
|
static TableField get maxFormPower {
|
|
return _fMaxFormPower = _fMaxFormPower ??
|
|
SqlSyntax.setField(_fMaxFormPower, 'maxFormPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fMinFormPower;
|
|
static TableField get minFormPower {
|
|
return _fMinFormPower = _fMinFormPower ??
|
|
SqlSyntax.setField(_fMinFormPower, 'minFormPower', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSdevFormPower;
|
|
static TableField get sdevFormPower {
|
|
return _fSdevFormPower = _fSdevFormPower ??
|
|
SqlSyntax.setField(_fSdevFormPower, 'sdevFormPower', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAvgLegSpringStiffness;
|
|
static TableField get avgLegSpringStiffness {
|
|
return _fAvgLegSpringStiffness = _fAvgLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fAvgLegSpringStiffness, 'avgLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMaxLegSpringStiffness;
|
|
static TableField get maxLegSpringStiffness {
|
|
return _fMaxLegSpringStiffness = _fMaxLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fMaxLegSpringStiffness, 'maxLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMinLegSpringStiffness;
|
|
static TableField get minLegSpringStiffness {
|
|
return _fMinLegSpringStiffness = _fMinLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fMinLegSpringStiffness, 'minLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fSdevLegSpringStiffness;
|
|
static TableField get sdevLegSpringStiffness {
|
|
return _fSdevLegSpringStiffness = _fSdevLegSpringStiffness ??
|
|
SqlSyntax.setField(
|
|
_fSdevLegSpringStiffness, 'sdevLegSpringStiffness', DbType.real);
|
|
}
|
|
|
|
static TableField? _fTotalAscent;
|
|
static TableField get totalAscent {
|
|
return _fTotalAscent = _fTotalAscent ??
|
|
SqlSyntax.setField(_fTotalAscent, 'totalAscent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fTotalDescent;
|
|
static TableField get totalDescent {
|
|
return _fTotalDescent = _fTotalDescent ??
|
|
SqlSyntax.setField(_fTotalDescent, 'totalDescent', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fCp;
|
|
static TableField get cp {
|
|
return _fCp = _fCp ?? SqlSyntax.setField(_fCp, 'cp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fFtp;
|
|
static TableField get ftp {
|
|
return _fFtp = _fFtp ?? SqlSyntax.setField(_fFtp, 'ftp', DbType.real);
|
|
}
|
|
|
|
static TableField? _fMovingTime;
|
|
static TableField get movingTime {
|
|
return _fMovingTime = _fMovingTime ??
|
|
SqlSyntax.setField(_fMovingTime, 'movingTime', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fFirstRecordId;
|
|
static TableField get firstRecordId {
|
|
return _fFirstRecordId = _fFirstRecordId ??
|
|
SqlSyntax.setField(_fFirstRecordId, 'firstRecordId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLastRecordId;
|
|
static TableField get lastRecordId {
|
|
return _fLastRecordId = _fLastRecordId ??
|
|
SqlSyntax.setField(_fLastRecordId, 'lastRecordId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fActivitiesId;
|
|
static TableField get activitiesId {
|
|
return _fActivitiesId = _fActivitiesId ??
|
|
SqlSyntax.setField(_fActivitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbIntervalFields
|
|
|
|
//region DbIntervalManager
|
|
class DbIntervalManager extends SqfEntityProvider {
|
|
DbIntervalManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'intervals';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbIntervalManager
|
|
// region DbWeight
|
|
class DbWeight extends TableBase {
|
|
DbWeight({this.id, this.date, this.value, this.athletesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbWeight.withFields(this.date, this.value, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbWeight.withId(this.id, this.date, this.value, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbWeight.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['date'] != null) {
|
|
date = int.tryParse(o['date'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['date'].toString())!)
|
|
: DateTime.tryParse(o['date'].toString());
|
|
}
|
|
if (o['value'] != null) {
|
|
value = double.tryParse(o['value'].toString());
|
|
}
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbWeight)
|
|
int? id;
|
|
DateTime? date;
|
|
double? value;
|
|
int? athletesId;
|
|
|
|
// end FIELDS (DbWeight)
|
|
|
|
// RELATIONSHIPS (DbWeight)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbWeight)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbWeightManager? __mnDbWeight;
|
|
|
|
DbWeightManager get _mnDbWeight {
|
|
return __mnDbWeight = __mnDbWeight ?? DbWeightManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (value != null || !forView) {
|
|
map['value'] = value;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (value != null || !forView) {
|
|
map['value'] = value;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbWeight]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbWeight]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
value,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
value,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbWeight>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbWeight.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbWeight>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbWeight>[];
|
|
try {
|
|
objList = list
|
|
.map((dbweight) => DbWeight.fromMap(dbweight as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbWeight.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbWeight>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbWeight> objList = <DbWeight>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbWeight.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbWeight by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbWeight] if exist, otherwise returns null
|
|
Future<DbWeight?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbWeight? obj;
|
|
final data = await _mnDbWeight.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbWeight.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbWeight) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbWeight.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbWeight.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbWeight) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbWeight.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbWeight.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbWeight. Returns a new Primary Key value of DbWeight
|
|
|
|
/// <returns>Returns a new Primary Key value of DbWeight
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbWeight> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbWeight> dbweights,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbweights) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbweights.length; i++) {
|
|
if (dbweights[i].id == null) {
|
|
dbweights[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbWeight.rawInsert(
|
|
'INSERT OR REPLACE INTO weights (id, date, value, athletesId) VALUES (?,?,?,?)',
|
|
[
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
value,
|
|
athletesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbWeight id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbWeight id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbWeight Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbWeight>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbWeight> dbweights,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbWeight.rawInsertAll(
|
|
'INSERT OR REPLACE INTO weights (id, date, value, athletesId) VALUES (?,?,?,?)',
|
|
dbweights,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbWeight
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbWeight invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbWeight
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbWeight.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbWeight] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbWeightFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbWeightFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
athletesId = athletesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbweight
|
|
|
|
// region DbWeightField
|
|
class DbWeightField extends FilterBase {
|
|
DbWeightField(DbWeightFilterBuilder dbweightFB) : super(dbweightFB);
|
|
|
|
@override
|
|
DbWeightFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder isNull() {
|
|
return super.isNull() as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbWeightFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbWeightField get not {
|
|
return super.not as DbWeightField;
|
|
}
|
|
}
|
|
// endregion DbWeightField
|
|
|
|
// region DbWeightFilterBuilder
|
|
class DbWeightFilterBuilder extends ConjunctionBase {
|
|
DbWeightFilterBuilder(DbWeight obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbWeight = obj._mnDbWeight;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbWeightManager? _mnDbWeight;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbWeightFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbWeightFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbWeightFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbWeightFilterBuilder where(String? whereCriteria, {dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbWeightFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbWeightFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbWeightFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbWeightFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbWeightFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbWeightFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbWeightFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbWeightField _setField(DbWeightField? field, String colName, DbType dbtype) {
|
|
return DbWeightField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbWeightField? _id;
|
|
DbWeightField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbWeightField? _date;
|
|
DbWeightField get date {
|
|
return _date = _setField(_date, 'date', DbType.date);
|
|
}
|
|
|
|
DbWeightField? _value;
|
|
DbWeightField get value {
|
|
return _value = _setField(_value, 'value', DbType.real);
|
|
}
|
|
|
|
DbWeightField? _athletesId;
|
|
DbWeightField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbWeight> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbWeight!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbWeight!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from weights ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbWeight!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbWeight] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbWeight?
|
|
@override
|
|
Future<DbWeight?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbWeight!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbWeight? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbWeight.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbWeight]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbWeight?
|
|
@override
|
|
Future<DbWeight> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbWeight();
|
|
}
|
|
|
|
/// This method returns int. [DbWeight]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbweightCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbweightsFuture = await _mnDbWeight!.toList(qparams);
|
|
final int count = dbweightsFuture[0]['CNT'] as int;
|
|
if (dbweightCount != null) {
|
|
dbweightCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbWeight> [DbWeight]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbWeight>
|
|
@override
|
|
Future<List<DbWeight>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbWeight> dbweightsData = await DbWeight.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbweightsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbWeight]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbWeight]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbWeight]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbWeight!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbWeight]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM weights WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbWeight!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbWeight]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbWeight!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbWeight.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbWeight!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbWeightFilterBuilder
|
|
|
|
// region DbWeightFields
|
|
class DbWeightFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fDate;
|
|
static TableField get date {
|
|
return _fDate = _fDate ?? SqlSyntax.setField(_fDate, 'date', DbType.date);
|
|
}
|
|
|
|
static TableField? _fValue;
|
|
static TableField get value {
|
|
return _fValue =
|
|
_fValue ?? SqlSyntax.setField(_fValue, 'value', DbType.real);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbWeightFields
|
|
|
|
//region DbWeightManager
|
|
class DbWeightManager extends SqfEntityProvider {
|
|
DbWeightManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'weights';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbWeightManager
|
|
// region DbHeartRateZoneSchema
|
|
class DbHeartRateZoneSchema extends TableBase {
|
|
DbHeartRateZoneSchema(
|
|
{this.id, this.date, this.name, this.base, this.athletesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbHeartRateZoneSchema.withFields(
|
|
this.date, this.name, this.base, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbHeartRateZoneSchema.withId(
|
|
this.id, this.date, this.name, this.base, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbHeartRateZoneSchema.fromMap(Map<String, dynamic> o,
|
|
{bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['date'] != null) {
|
|
date = int.tryParse(o['date'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['date'].toString())!)
|
|
: DateTime.tryParse(o['date'].toString());
|
|
}
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['base'] != null) {
|
|
base = int.tryParse(o['base'].toString());
|
|
}
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbHeartRateZoneSchema)
|
|
int? id;
|
|
DateTime? date;
|
|
String? name;
|
|
int? base;
|
|
int? athletesId;
|
|
|
|
// end FIELDS (DbHeartRateZoneSchema)
|
|
|
|
// RELATIONSHIPS (DbHeartRateZoneSchema)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbHeartRateZoneSchema)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbHeartRateZoneSchema)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbHeartRateZones', 'plField2'..]) or so on..
|
|
List<DbHeartRateZone>? plDbHeartRateZones;
|
|
|
|
/// get DbHeartRateZone(s) filtered by id=heartRateZoneSchemataId
|
|
DbHeartRateZoneFilterBuilder? getDbHeartRateZones(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbHeartRateZone()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.heartRateZoneSchemataId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbHeartRateZoneSchema)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbHeartRateZoneSchemaManager? __mnDbHeartRateZoneSchema;
|
|
|
|
DbHeartRateZoneSchemaManager get _mnDbHeartRateZoneSchema {
|
|
return __mnDbHeartRateZoneSchema =
|
|
__mnDbHeartRateZoneSchema ?? DbHeartRateZoneSchemaManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (base != null || !forView) {
|
|
map['base'] = base;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (base != null || !forView) {
|
|
map['base'] = base;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbHeartRateZoneSchema)
|
|
if (!forQuery) {
|
|
map['DbHeartRateZones'] = await getDbHeartRateZones()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbHeartRateZoneSchema)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZoneSchema]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZoneSchema]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbHeartRateZoneSchema>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbHeartRateZoneSchema.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbHeartRateZoneSchema>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbHeartRateZoneSchema>[];
|
|
try {
|
|
objList = list
|
|
.map((dbheartratezoneschema) => DbHeartRateZoneSchema.fromMap(
|
|
dbheartratezoneschema as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbHeartRateZoneSchema.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbHeartRateZoneSchema>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbHeartRateZoneSchema> objList = <DbHeartRateZoneSchema>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbHeartRateZoneSchema.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('heartRateZoneSchemata.plDbHeartRateZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZones'))) {
|
|
/*_loadedfields!.add('heartRateZoneSchemata.plDbHeartRateZones'); */ obj
|
|
.plDbHeartRateZones =
|
|
obj.plDbHeartRateZones ??
|
|
await obj.getDbHeartRateZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbHeartRateZoneSchema by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbHeartRateZoneSchema] if exist, otherwise returns null
|
|
Future<DbHeartRateZoneSchema?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbHeartRateZoneSchema? obj;
|
|
final data = await _mnDbHeartRateZoneSchema.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbHeartRateZoneSchema.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('heartRateZoneSchemata.plDbHeartRateZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZones'))) {
|
|
/*_loadedfields!.add('heartRateZoneSchemata.plDbHeartRateZones'); */ obj
|
|
.plDbHeartRateZones =
|
|
obj.plDbHeartRateZones ??
|
|
await obj.getDbHeartRateZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbHeartRateZoneSchema) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbHeartRateZoneSchema.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbHeartRateZoneSchema.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbHeartRateZoneSchema) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbHeartRateZoneSchema.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbHeartRateZoneSchema.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbHeartRateZoneSchema. Returns a new Primary Key value of DbHeartRateZoneSchema
|
|
|
|
/// <returns>Returns a new Primary Key value of DbHeartRateZoneSchema
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbHeartRateZoneSchema> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(
|
|
List<DbHeartRateZoneSchema> dbheartratezoneschemas,
|
|
{bool? exclusive,
|
|
bool? noResult,
|
|
bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbheartratezoneschemas) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbheartratezoneschemas.length; i++) {
|
|
if (dbheartratezoneschemas[i].id == null) {
|
|
dbheartratezoneschemas[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbHeartRateZoneSchema.rawInsert(
|
|
'INSERT OR REPLACE INTO heartRateZoneSchemata (id, date, name, base, athletesId) VALUES (?,?,?,?,?)',
|
|
[
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage:
|
|
'DbHeartRateZoneSchema id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbHeartRateZoneSchema id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage:
|
|
'DbHeartRateZoneSchema Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbHeartRateZoneSchema>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(
|
|
List<DbHeartRateZoneSchema> dbheartratezoneschemas,
|
|
{bool? exclusive,
|
|
bool? noResult,
|
|
bool? continueOnError}) async {
|
|
final results = await _mnDbHeartRateZoneSchema.rawInsertAll(
|
|
'INSERT OR REPLACE INTO heartRateZoneSchemata (id, date, name, base, athletesId) VALUES (?,?,?,?,?)',
|
|
dbheartratezoneschemas,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbHeartRateZoneSchema
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbHeartRateZoneSchema invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbHeartRateZone()
|
|
.select()
|
|
.heartRateZoneSchemataId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbHeartRateZoneSchema
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbHeartRateZoneSchema.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbHeartRateZoneSchema] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbHeartRateZoneSchemaFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbHeartRateZoneSchemaFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
athletesId = athletesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbheartratezoneschema
|
|
|
|
// region DbHeartRateZoneSchemaField
|
|
class DbHeartRateZoneSchemaField extends FilterBase {
|
|
DbHeartRateZoneSchemaField(
|
|
DbHeartRateZoneSchemaFilterBuilder dbheartratezoneschemaFB)
|
|
: super(dbheartratezoneschemaFB);
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder isNull() {
|
|
return super.isNull() as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue)
|
|
as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbHeartRateZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneSchemaField get not {
|
|
return super.not as DbHeartRateZoneSchemaField;
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneSchemaField
|
|
|
|
// region DbHeartRateZoneSchemaFilterBuilder
|
|
class DbHeartRateZoneSchemaFilterBuilder extends ConjunctionBase {
|
|
DbHeartRateZoneSchemaFilterBuilder(
|
|
DbHeartRateZoneSchema obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbHeartRateZoneSchema = obj._mnDbHeartRateZoneSchema;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbHeartRateZoneSchemaManager? _mnDbHeartRateZoneSchema;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbHeartRateZoneSchemaFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField _setField(
|
|
DbHeartRateZoneSchemaField? field, String colName, DbType dbtype) {
|
|
return DbHeartRateZoneSchemaField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField? _id;
|
|
DbHeartRateZoneSchemaField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField? _date;
|
|
DbHeartRateZoneSchemaField get date {
|
|
return _date = _setField(_date, 'date', DbType.date);
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField? _name;
|
|
DbHeartRateZoneSchemaField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField? _base;
|
|
DbHeartRateZoneSchemaField get base {
|
|
return _base = _setField(_base, 'base', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneSchemaField? _athletesId;
|
|
DbHeartRateZoneSchemaField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbHeartRateZoneSchema> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbHeartRateZone) according to DeleteRule.CASCADE
|
|
final idListDbHeartRateZoneBYheartRateZoneSchemataId =
|
|
toListPrimaryKeySQL(false);
|
|
final resDbHeartRateZoneBYheartRateZoneSchemataId = await DbHeartRateZone()
|
|
.select()
|
|
.where(
|
|
'heartRateZoneSchemataId IN (${idListDbHeartRateZoneBYheartRateZoneSchemataId['sql']})',
|
|
parameterValue:
|
|
idListDbHeartRateZoneBYheartRateZoneSchemataId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbHeartRateZoneBYheartRateZoneSchemataId.success) {
|
|
return resDbHeartRateZoneBYheartRateZoneSchemataId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbHeartRateZoneSchema!
|
|
.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbHeartRateZoneSchema!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from heartRateZoneSchemata ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbHeartRateZoneSchema!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbHeartRateZoneSchema] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbHeartRateZoneSchema?
|
|
@override
|
|
Future<DbHeartRateZoneSchema?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbHeartRateZoneSchema? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbHeartRateZoneSchema.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('heartRateZoneSchemata.plDbHeartRateZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbHeartRateZones'))) {
|
|
/*_loadedfields!.add('heartRateZoneSchemata.plDbHeartRateZones'); */ obj
|
|
.plDbHeartRateZones =
|
|
obj.plDbHeartRateZones ??
|
|
await obj.getDbHeartRateZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbHeartRateZoneSchema]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbHeartRateZoneSchema?
|
|
@override
|
|
Future<DbHeartRateZoneSchema> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbHeartRateZoneSchema();
|
|
}
|
|
|
|
/// This method returns int. [DbHeartRateZoneSchema]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount(
|
|
[VoidCallback Function(int c)? dbheartratezoneschemaCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbheartratezoneschemasFuture =
|
|
await _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
final int count = dbheartratezoneschemasFuture[0]['CNT'] as int;
|
|
if (dbheartratezoneschemaCount != null) {
|
|
dbheartratezoneschemaCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbHeartRateZoneSchema> [DbHeartRateZoneSchema]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbHeartRateZoneSchema>
|
|
@override
|
|
Future<List<DbHeartRateZoneSchema>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbHeartRateZoneSchema> dbheartratezoneschemasData =
|
|
await DbHeartRateZoneSchema.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbheartratezoneschemasData;
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZoneSchema]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbHeartRateZoneSchema]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbHeartRateZoneSchema]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbHeartRateZoneSchema]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM heartRateZoneSchemata WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbHeartRateZoneSchema]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbHeartRateZoneSchema.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbHeartRateZoneSchema!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneSchemaFilterBuilder
|
|
|
|
// region DbHeartRateZoneSchemaFields
|
|
class DbHeartRateZoneSchemaFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fDate;
|
|
static TableField get date {
|
|
return _fDate = _fDate ?? SqlSyntax.setField(_fDate, 'date', DbType.date);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fBase;
|
|
static TableField get base {
|
|
return _fBase =
|
|
_fBase ?? SqlSyntax.setField(_fBase, 'base', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneSchemaFields
|
|
|
|
//region DbHeartRateZoneSchemaManager
|
|
class DbHeartRateZoneSchemaManager extends SqfEntityProvider {
|
|
DbHeartRateZoneSchemaManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'heartRateZoneSchemata';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbHeartRateZoneSchemaManager
|
|
// region DbHeartRateZone
|
|
class DbHeartRateZone extends TableBase {
|
|
DbHeartRateZone(
|
|
{this.id,
|
|
this.name,
|
|
this.lowerPercentage,
|
|
this.upperPercentage,
|
|
this.lowerLimit,
|
|
this.upperLimit,
|
|
this.color,
|
|
this.heartRateZoneSchemataId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbHeartRateZone.withFields(
|
|
this.name,
|
|
this.lowerPercentage,
|
|
this.upperPercentage,
|
|
this.lowerLimit,
|
|
this.upperLimit,
|
|
this.color,
|
|
this.heartRateZoneSchemataId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbHeartRateZone.withId(
|
|
this.id,
|
|
this.name,
|
|
this.lowerPercentage,
|
|
this.upperPercentage,
|
|
this.lowerLimit,
|
|
this.upperLimit,
|
|
this.color,
|
|
this.heartRateZoneSchemataId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbHeartRateZone.fromMap(Map<String, dynamic> o,
|
|
{bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['lowerPercentage'] != null) {
|
|
lowerPercentage = int.tryParse(o['lowerPercentage'].toString());
|
|
}
|
|
if (o['upperPercentage'] != null) {
|
|
upperPercentage = int.tryParse(o['upperPercentage'].toString());
|
|
}
|
|
if (o['lowerLimit'] != null) {
|
|
lowerLimit = int.tryParse(o['lowerLimit'].toString());
|
|
}
|
|
if (o['upperLimit'] != null) {
|
|
upperLimit = int.tryParse(o['upperLimit'].toString());
|
|
}
|
|
if (o['color'] != null) {
|
|
color = int.tryParse(o['color'].toString());
|
|
}
|
|
heartRateZoneSchemataId =
|
|
int.tryParse(o['heartRateZoneSchemataId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbHeartRateZoneSchema = o['dbHeartRateZoneSchema'] != null
|
|
? DbHeartRateZoneSchema.fromMap(
|
|
o['dbHeartRateZoneSchema'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbHeartRateZone)
|
|
int? id;
|
|
String? name;
|
|
int? lowerPercentage;
|
|
int? upperPercentage;
|
|
int? lowerLimit;
|
|
int? upperLimit;
|
|
int? color;
|
|
int? heartRateZoneSchemataId;
|
|
|
|
// end FIELDS (DbHeartRateZone)
|
|
|
|
// RELATIONSHIPS (DbHeartRateZone)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbHeartRateZoneSchema', 'plField2'..]) or so on..
|
|
DbHeartRateZoneSchema? plDbHeartRateZoneSchema;
|
|
|
|
/// get DbHeartRateZoneSchema By HeartRateZoneSchemataId
|
|
Future<DbHeartRateZoneSchema?> getDbHeartRateZoneSchema(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbHeartRateZoneSchema().getById(heartRateZoneSchemataId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbHeartRateZone)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbHeartRateZoneManager? __mnDbHeartRateZone;
|
|
|
|
DbHeartRateZoneManager get _mnDbHeartRateZone {
|
|
return __mnDbHeartRateZone =
|
|
__mnDbHeartRateZone ?? DbHeartRateZoneManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (lowerPercentage != null || !forView) {
|
|
map['lowerPercentage'] = lowerPercentage;
|
|
}
|
|
if (upperPercentage != null || !forView) {
|
|
map['upperPercentage'] = upperPercentage;
|
|
}
|
|
if (lowerLimit != null || !forView) {
|
|
map['lowerLimit'] = lowerLimit;
|
|
}
|
|
if (upperLimit != null || !forView) {
|
|
map['upperLimit'] = upperLimit;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (heartRateZoneSchemataId != null) {
|
|
map['heartRateZoneSchemataId'] = forView
|
|
? plDbHeartRateZoneSchema == null
|
|
? heartRateZoneSchemataId
|
|
: plDbHeartRateZoneSchema!.name
|
|
: heartRateZoneSchemataId;
|
|
} else if (heartRateZoneSchemataId != null || !forView) {
|
|
map['heartRateZoneSchemataId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (lowerPercentage != null || !forView) {
|
|
map['lowerPercentage'] = lowerPercentage;
|
|
}
|
|
if (upperPercentage != null || !forView) {
|
|
map['upperPercentage'] = upperPercentage;
|
|
}
|
|
if (lowerLimit != null || !forView) {
|
|
map['lowerLimit'] = lowerLimit;
|
|
}
|
|
if (upperLimit != null || !forView) {
|
|
map['upperLimit'] = upperLimit;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (heartRateZoneSchemataId != null) {
|
|
map['heartRateZoneSchemataId'] = forView
|
|
? plDbHeartRateZoneSchema == null
|
|
? heartRateZoneSchemataId
|
|
: plDbHeartRateZoneSchema!.name
|
|
: heartRateZoneSchemataId;
|
|
} else if (heartRateZoneSchemataId != null || !forView) {
|
|
map['heartRateZoneSchemataId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZone]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZone]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
heartRateZoneSchemataId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
heartRateZoneSchemataId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbHeartRateZone>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbHeartRateZone.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbHeartRateZone>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbHeartRateZone>[];
|
|
try {
|
|
objList = list
|
|
.map((dbheartratezone) =>
|
|
DbHeartRateZone.fromMap(dbheartratezone as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbHeartRateZone.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbHeartRateZone>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbHeartRateZone> objList = <DbHeartRateZone>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbHeartRateZone.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbHeartRateZoneSchema'))) {
|
|
obj.plDbHeartRateZoneSchema = obj.plDbHeartRateZoneSchema ??
|
|
await obj.getDbHeartRateZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbHeartRateZone by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbHeartRateZone] if exist, otherwise returns null
|
|
Future<DbHeartRateZone?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbHeartRateZone? obj;
|
|
final data = await _mnDbHeartRateZone.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbHeartRateZone.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbHeartRateZoneSchema'))) {
|
|
obj.plDbHeartRateZoneSchema = obj.plDbHeartRateZoneSchema ??
|
|
await obj.getDbHeartRateZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbHeartRateZone) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbHeartRateZone.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbHeartRateZone.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbHeartRateZone) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbHeartRateZone.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbHeartRateZone.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbHeartRateZone. Returns a new Primary Key value of DbHeartRateZone
|
|
|
|
/// <returns>Returns a new Primary Key value of DbHeartRateZone
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbHeartRateZone> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbHeartRateZone> dbheartratezones,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbheartratezones) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbheartratezones.length; i++) {
|
|
if (dbheartratezones[i].id == null) {
|
|
dbheartratezones[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbHeartRateZone.rawInsert(
|
|
'INSERT OR REPLACE INTO heartRateZone (id, name, lowerPercentage, upperPercentage, lowerLimit, upperLimit, color, heartRateZoneSchemataId) VALUES (?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
heartRateZoneSchemataId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbHeartRateZone id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbHeartRateZone id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbHeartRateZone Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbHeartRateZone>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbHeartRateZone> dbheartratezones,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbHeartRateZone.rawInsertAll(
|
|
'INSERT OR REPLACE INTO heartRateZone (id, name, lowerPercentage, upperPercentage, lowerLimit, upperLimit, color, heartRateZoneSchemataId) VALUES (?,?,?,?,?,?,?,?)',
|
|
dbheartratezones,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbHeartRateZone
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbHeartRateZone invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbHeartRateZone
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbHeartRateZone.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbHeartRateZone] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbHeartRateZoneFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbHeartRateZoneFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
heartRateZoneSchemataId = heartRateZoneSchemataId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbheartratezone
|
|
|
|
// region DbHeartRateZoneField
|
|
class DbHeartRateZoneField extends FilterBase {
|
|
DbHeartRateZoneField(DbHeartRateZoneFilterBuilder dbheartratezoneFB)
|
|
: super(dbheartratezoneFB);
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder isNull() {
|
|
return super.isNull() as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbHeartRateZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbHeartRateZoneField get not {
|
|
return super.not as DbHeartRateZoneField;
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneField
|
|
|
|
// region DbHeartRateZoneFilterBuilder
|
|
class DbHeartRateZoneFilterBuilder extends ConjunctionBase {
|
|
DbHeartRateZoneFilterBuilder(DbHeartRateZone obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbHeartRateZone = obj._mnDbHeartRateZone;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbHeartRateZoneManager? _mnDbHeartRateZone;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbHeartRateZoneFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbHeartRateZoneFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbHeartRateZoneFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbHeartRateZoneFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbHeartRateZoneFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbHeartRateZoneFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbHeartRateZoneFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbHeartRateZoneFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbHeartRateZoneFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbHeartRateZoneFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbHeartRateZoneFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbHeartRateZoneField _setField(
|
|
DbHeartRateZoneField? field, String colName, DbType dbtype) {
|
|
return DbHeartRateZoneField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbHeartRateZoneField? _id;
|
|
DbHeartRateZoneField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _name;
|
|
DbHeartRateZoneField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbHeartRateZoneField? _lowerPercentage;
|
|
DbHeartRateZoneField get lowerPercentage {
|
|
return _lowerPercentage =
|
|
_setField(_lowerPercentage, 'lowerPercentage', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _upperPercentage;
|
|
DbHeartRateZoneField get upperPercentage {
|
|
return _upperPercentage =
|
|
_setField(_upperPercentage, 'upperPercentage', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _lowerLimit;
|
|
DbHeartRateZoneField get lowerLimit {
|
|
return _lowerLimit = _setField(_lowerLimit, 'lowerLimit', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _upperLimit;
|
|
DbHeartRateZoneField get upperLimit {
|
|
return _upperLimit = _setField(_upperLimit, 'upperLimit', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _color;
|
|
DbHeartRateZoneField get color {
|
|
return _color = _setField(_color, 'color', DbType.integer);
|
|
}
|
|
|
|
DbHeartRateZoneField? _heartRateZoneSchemataId;
|
|
DbHeartRateZoneField get heartRateZoneSchemataId {
|
|
return _heartRateZoneSchemataId = _setField(
|
|
_heartRateZoneSchemataId, 'heartRateZoneSchemataId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbHeartRateZone> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbHeartRateZone!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbHeartRateZone!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from heartRateZone ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbHeartRateZone!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbHeartRateZone] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbHeartRateZone?
|
|
@override
|
|
Future<DbHeartRateZone?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbHeartRateZone!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbHeartRateZone? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbHeartRateZone.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbHeartRateZoneSchema'))) {
|
|
obj.plDbHeartRateZoneSchema = obj.plDbHeartRateZoneSchema ??
|
|
await obj.getDbHeartRateZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbHeartRateZone]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbHeartRateZone?
|
|
@override
|
|
Future<DbHeartRateZone> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbHeartRateZone();
|
|
}
|
|
|
|
/// This method returns int. [DbHeartRateZone]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount(
|
|
[VoidCallback Function(int c)? dbheartratezoneCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbheartratezonesFuture = await _mnDbHeartRateZone!.toList(qparams);
|
|
final int count = dbheartratezonesFuture[0]['CNT'] as int;
|
|
if (dbheartratezoneCount != null) {
|
|
dbheartratezoneCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbHeartRateZone> [DbHeartRateZone]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbHeartRateZone>
|
|
@override
|
|
Future<List<DbHeartRateZone>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbHeartRateZone> dbheartratezonesData =
|
|
await DbHeartRateZone.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbheartratezonesData;
|
|
}
|
|
|
|
/// This method returns Json String [DbHeartRateZone]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbHeartRateZone]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbHeartRateZone]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbHeartRateZone!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbHeartRateZone]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM heartRateZone WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbHeartRateZone!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbHeartRateZone]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbHeartRateZone!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbHeartRateZone.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbHeartRateZone!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneFilterBuilder
|
|
|
|
// region DbHeartRateZoneFields
|
|
class DbHeartRateZoneFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fLowerPercentage;
|
|
static TableField get lowerPercentage {
|
|
return _fLowerPercentage = _fLowerPercentage ??
|
|
SqlSyntax.setField(
|
|
_fLowerPercentage, 'lowerPercentage', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fUpperPercentage;
|
|
static TableField get upperPercentage {
|
|
return _fUpperPercentage = _fUpperPercentage ??
|
|
SqlSyntax.setField(
|
|
_fUpperPercentage, 'upperPercentage', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLowerLimit;
|
|
static TableField get lowerLimit {
|
|
return _fLowerLimit = _fLowerLimit ??
|
|
SqlSyntax.setField(_fLowerLimit, 'lowerLimit', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fUpperLimit;
|
|
static TableField get upperLimit {
|
|
return _fUpperLimit = _fUpperLimit ??
|
|
SqlSyntax.setField(_fUpperLimit, 'upperLimit', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fColor;
|
|
static TableField get color {
|
|
return _fColor =
|
|
_fColor ?? SqlSyntax.setField(_fColor, 'color', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fHeartRateZoneSchemataId;
|
|
static TableField get heartRateZoneSchemataId {
|
|
return _fHeartRateZoneSchemataId = _fHeartRateZoneSchemataId ??
|
|
SqlSyntax.setField(_fHeartRateZoneSchemataId, 'heartRateZoneSchemataId',
|
|
DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbHeartRateZoneFields
|
|
|
|
//region DbHeartRateZoneManager
|
|
class DbHeartRateZoneManager extends SqfEntityProvider {
|
|
DbHeartRateZoneManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'heartRateZone';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbHeartRateZoneManager
|
|
// region DbPowerZoneSchema
|
|
class DbPowerZoneSchema extends TableBase {
|
|
DbPowerZoneSchema(
|
|
{this.id, this.date, this.name, this.base, this.athletesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbPowerZoneSchema.withFields(
|
|
this.date, this.name, this.base, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbPowerZoneSchema.withId(
|
|
this.id, this.date, this.name, this.base, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbPowerZoneSchema.fromMap(Map<String, dynamic> o,
|
|
{bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['date'] != null) {
|
|
date = int.tryParse(o['date'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['date'].toString())!)
|
|
: DateTime.tryParse(o['date'].toString());
|
|
}
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['base'] != null) {
|
|
base = int.tryParse(o['base'].toString());
|
|
}
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbPowerZoneSchema)
|
|
int? id;
|
|
DateTime? date;
|
|
String? name;
|
|
int? base;
|
|
int? athletesId;
|
|
|
|
// end FIELDS (DbPowerZoneSchema)
|
|
|
|
// RELATIONSHIPS (DbPowerZoneSchema)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbPowerZoneSchema)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbPowerZoneSchema)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbPowerZones', 'plField2'..]) or so on..
|
|
List<DbPowerZone>? plDbPowerZones;
|
|
|
|
/// get DbPowerZone(s) filtered by id=powerZoneSchemataId
|
|
DbPowerZoneFilterBuilder? getDbPowerZones(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbPowerZone()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.powerZoneSchemataId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbPowerZoneSchema)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbPowerZoneSchemaManager? __mnDbPowerZoneSchema;
|
|
|
|
DbPowerZoneSchemaManager get _mnDbPowerZoneSchema {
|
|
return __mnDbPowerZoneSchema =
|
|
__mnDbPowerZoneSchema ?? DbPowerZoneSchemaManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (base != null || !forView) {
|
|
map['base'] = base;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (date != null) {
|
|
map['date'] = forJson
|
|
? '$date!.year-$date!.month-$date!.day'
|
|
: forQuery
|
|
? DateTime(date!.year, date!.month, date!.day)
|
|
.millisecondsSinceEpoch
|
|
: date;
|
|
} else if (date != null || !forView) {
|
|
map['date'] = null;
|
|
}
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (base != null || !forView) {
|
|
map['base'] = base;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbPowerZoneSchema)
|
|
if (!forQuery) {
|
|
map['DbPowerZones'] = await getDbPowerZones()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbPowerZoneSchema)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZoneSchema]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZoneSchema]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbPowerZoneSchema>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbPowerZoneSchema.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbPowerZoneSchema>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbPowerZoneSchema>[];
|
|
try {
|
|
objList = list
|
|
.map((dbpowerzoneschema) => DbPowerZoneSchema.fromMap(
|
|
dbpowerzoneschema as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbPowerZoneSchema.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbPowerZoneSchema>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbPowerZoneSchema> objList = <DbPowerZoneSchema>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbPowerZoneSchema.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('powerZoneSchemata.plDbPowerZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZones'))) {
|
|
/*_loadedfields!.add('powerZoneSchemata.plDbPowerZones'); */ obj
|
|
.plDbPowerZones =
|
|
obj.plDbPowerZones ??
|
|
await obj.getDbPowerZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbPowerZoneSchema by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbPowerZoneSchema] if exist, otherwise returns null
|
|
Future<DbPowerZoneSchema?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbPowerZoneSchema? obj;
|
|
final data = await _mnDbPowerZoneSchema.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbPowerZoneSchema.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('powerZoneSchemata.plDbPowerZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZones'))) {
|
|
/*_loadedfields!.add('powerZoneSchemata.plDbPowerZones'); */ obj
|
|
.plDbPowerZones =
|
|
obj.plDbPowerZones ??
|
|
await obj.getDbPowerZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbPowerZoneSchema) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbPowerZoneSchema.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbPowerZoneSchema.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbPowerZoneSchema) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbPowerZoneSchema.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbPowerZoneSchema.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbPowerZoneSchema. Returns a new Primary Key value of DbPowerZoneSchema
|
|
|
|
/// <returns>Returns a new Primary Key value of DbPowerZoneSchema
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbPowerZoneSchema> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(
|
|
List<DbPowerZoneSchema> dbpowerzoneschemas,
|
|
{bool? exclusive,
|
|
bool? noResult,
|
|
bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbpowerzoneschemas) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbpowerzoneschemas.length; i++) {
|
|
if (dbpowerzoneschemas[i].id == null) {
|
|
dbpowerzoneschemas[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbPowerZoneSchema.rawInsert(
|
|
'INSERT OR REPLACE INTO powerZoneSchemata (id, date, name, base, athletesId) VALUES (?,?,?,?,?)',
|
|
[
|
|
id,
|
|
date != null ? date!.millisecondsSinceEpoch : null,
|
|
name,
|
|
base,
|
|
athletesId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbPowerZoneSchema id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbPowerZoneSchema id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage:
|
|
'DbPowerZoneSchema Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbPowerZoneSchema>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbPowerZoneSchema> dbpowerzoneschemas,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbPowerZoneSchema.rawInsertAll(
|
|
'INSERT OR REPLACE INTO powerZoneSchemata (id, date, name, base, athletesId) VALUES (?,?,?,?,?)',
|
|
dbpowerzoneschemas,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbPowerZoneSchema
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbPowerZoneSchema invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbPowerZone()
|
|
.select()
|
|
.powerZoneSchemataId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbPowerZoneSchema
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbPowerZoneSchema.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbPowerZoneSchema] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbPowerZoneSchemaFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbPowerZoneSchemaFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
athletesId = athletesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbpowerzoneschema
|
|
|
|
// region DbPowerZoneSchemaField
|
|
class DbPowerZoneSchemaField extends FilterBase {
|
|
DbPowerZoneSchemaField(DbPowerZoneSchemaFilterBuilder dbpowerzoneschemaFB)
|
|
: super(dbpowerzoneschemaFB);
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder isNull() {
|
|
return super.isNull() as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbPowerZoneSchemaFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneSchemaField get not {
|
|
return super.not as DbPowerZoneSchemaField;
|
|
}
|
|
}
|
|
// endregion DbPowerZoneSchemaField
|
|
|
|
// region DbPowerZoneSchemaFilterBuilder
|
|
class DbPowerZoneSchemaFilterBuilder extends ConjunctionBase {
|
|
DbPowerZoneSchemaFilterBuilder(DbPowerZoneSchema obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbPowerZoneSchema = obj._mnDbPowerZoneSchema;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbPowerZoneSchemaManager? _mnDbPowerZoneSchema;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbPowerZoneSchemaFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbPowerZoneSchemaField _setField(
|
|
DbPowerZoneSchemaField? field, String colName, DbType dbtype) {
|
|
return DbPowerZoneSchemaField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbPowerZoneSchemaField? _id;
|
|
DbPowerZoneSchemaField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneSchemaField? _date;
|
|
DbPowerZoneSchemaField get date {
|
|
return _date = _setField(_date, 'date', DbType.date);
|
|
}
|
|
|
|
DbPowerZoneSchemaField? _name;
|
|
DbPowerZoneSchemaField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbPowerZoneSchemaField? _base;
|
|
DbPowerZoneSchemaField get base {
|
|
return _base = _setField(_base, 'base', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneSchemaField? _athletesId;
|
|
DbPowerZoneSchemaField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbPowerZoneSchema> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbPowerZone) according to DeleteRule.CASCADE
|
|
final idListDbPowerZoneBYpowerZoneSchemataId = toListPrimaryKeySQL(false);
|
|
final resDbPowerZoneBYpowerZoneSchemataId = await DbPowerZone()
|
|
.select()
|
|
.where(
|
|
'powerZoneSchemataId IN (${idListDbPowerZoneBYpowerZoneSchemataId['sql']})',
|
|
parameterValue: idListDbPowerZoneBYpowerZoneSchemataId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbPowerZoneBYpowerZoneSchemataId.success) {
|
|
return resDbPowerZoneBYpowerZoneSchemataId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbPowerZoneSchema!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbPowerZoneSchema!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from powerZoneSchemata ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbPowerZoneSchema!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbPowerZoneSchema] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbPowerZoneSchema?
|
|
@override
|
|
Future<DbPowerZoneSchema?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbPowerZoneSchema!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbPowerZoneSchema? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbPowerZoneSchema.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('powerZoneSchemata.plDbPowerZones') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbPowerZones'))) {
|
|
/*_loadedfields!.add('powerZoneSchemata.plDbPowerZones'); */ obj
|
|
.plDbPowerZones =
|
|
obj.plDbPowerZones ??
|
|
await obj.getDbPowerZones()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbPowerZoneSchema]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbPowerZoneSchema?
|
|
@override
|
|
Future<DbPowerZoneSchema> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbPowerZoneSchema();
|
|
}
|
|
|
|
/// This method returns int. [DbPowerZoneSchema]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount(
|
|
[VoidCallback Function(int c)? dbpowerzoneschemaCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbpowerzoneschemasFuture =
|
|
await _mnDbPowerZoneSchema!.toList(qparams);
|
|
final int count = dbpowerzoneschemasFuture[0]['CNT'] as int;
|
|
if (dbpowerzoneschemaCount != null) {
|
|
dbpowerzoneschemaCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbPowerZoneSchema> [DbPowerZoneSchema]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbPowerZoneSchema>
|
|
@override
|
|
Future<List<DbPowerZoneSchema>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbPowerZoneSchema> dbpowerzoneschemasData =
|
|
await DbPowerZoneSchema.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbpowerzoneschemasData;
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZoneSchema]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbPowerZoneSchema]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbPowerZoneSchema]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbPowerZoneSchema!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbPowerZoneSchema]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM powerZoneSchemata WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbPowerZoneSchema!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbPowerZoneSchema]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbPowerZoneSchema!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbPowerZoneSchema.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbPowerZoneSchema!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbPowerZoneSchemaFilterBuilder
|
|
|
|
// region DbPowerZoneSchemaFields
|
|
class DbPowerZoneSchemaFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fDate;
|
|
static TableField get date {
|
|
return _fDate = _fDate ?? SqlSyntax.setField(_fDate, 'date', DbType.date);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fBase;
|
|
static TableField get base {
|
|
return _fBase =
|
|
_fBase ?? SqlSyntax.setField(_fBase, 'base', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbPowerZoneSchemaFields
|
|
|
|
//region DbPowerZoneSchemaManager
|
|
class DbPowerZoneSchemaManager extends SqfEntityProvider {
|
|
DbPowerZoneSchemaManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'powerZoneSchemata';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbPowerZoneSchemaManager
|
|
// region DbPowerZone
|
|
class DbPowerZone extends TableBase {
|
|
DbPowerZone(
|
|
{this.id,
|
|
this.name,
|
|
this.lowerPercentage,
|
|
this.upperPercentage,
|
|
this.lowerLimit,
|
|
this.upperLimit,
|
|
this.color,
|
|
this.powerZoneSchemataId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbPowerZone.withFields(this.name, this.lowerPercentage, this.upperPercentage,
|
|
this.lowerLimit, this.upperLimit, this.color, this.powerZoneSchemataId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbPowerZone.withId(
|
|
this.id,
|
|
this.name,
|
|
this.lowerPercentage,
|
|
this.upperPercentage,
|
|
this.lowerLimit,
|
|
this.upperLimit,
|
|
this.color,
|
|
this.powerZoneSchemataId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbPowerZone.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['lowerPercentage'] != null) {
|
|
lowerPercentage = int.tryParse(o['lowerPercentage'].toString());
|
|
}
|
|
if (o['upperPercentage'] != null) {
|
|
upperPercentage = int.tryParse(o['upperPercentage'].toString());
|
|
}
|
|
if (o['lowerLimit'] != null) {
|
|
lowerLimit = int.tryParse(o['lowerLimit'].toString());
|
|
}
|
|
if (o['upperLimit'] != null) {
|
|
upperLimit = int.tryParse(o['upperLimit'].toString());
|
|
}
|
|
if (o['color'] != null) {
|
|
color = int.tryParse(o['color'].toString());
|
|
}
|
|
powerZoneSchemataId = int.tryParse(o['powerZoneSchemataId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbPowerZoneSchema = o['dbPowerZoneSchema'] != null
|
|
? DbPowerZoneSchema.fromMap(
|
|
o['dbPowerZoneSchema'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbPowerZone)
|
|
int? id;
|
|
String? name;
|
|
int? lowerPercentage;
|
|
int? upperPercentage;
|
|
int? lowerLimit;
|
|
int? upperLimit;
|
|
int? color;
|
|
int? powerZoneSchemataId;
|
|
|
|
// end FIELDS (DbPowerZone)
|
|
|
|
// RELATIONSHIPS (DbPowerZone)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbPowerZoneSchema', 'plField2'..]) or so on..
|
|
DbPowerZoneSchema? plDbPowerZoneSchema;
|
|
|
|
/// get DbPowerZoneSchema By PowerZoneSchemataId
|
|
Future<DbPowerZoneSchema?> getDbPowerZoneSchema(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbPowerZoneSchema().getById(powerZoneSchemataId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbPowerZone)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbPowerZoneManager? __mnDbPowerZone;
|
|
|
|
DbPowerZoneManager get _mnDbPowerZone {
|
|
return __mnDbPowerZone = __mnDbPowerZone ?? DbPowerZoneManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (lowerPercentage != null || !forView) {
|
|
map['lowerPercentage'] = lowerPercentage;
|
|
}
|
|
if (upperPercentage != null || !forView) {
|
|
map['upperPercentage'] = upperPercentage;
|
|
}
|
|
if (lowerLimit != null || !forView) {
|
|
map['lowerLimit'] = lowerLimit;
|
|
}
|
|
if (upperLimit != null || !forView) {
|
|
map['upperLimit'] = upperLimit;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (powerZoneSchemataId != null) {
|
|
map['powerZoneSchemataId'] = forView
|
|
? plDbPowerZoneSchema == null
|
|
? powerZoneSchemataId
|
|
: plDbPowerZoneSchema!.name
|
|
: powerZoneSchemataId;
|
|
} else if (powerZoneSchemataId != null || !forView) {
|
|
map['powerZoneSchemataId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (lowerPercentage != null || !forView) {
|
|
map['lowerPercentage'] = lowerPercentage;
|
|
}
|
|
if (upperPercentage != null || !forView) {
|
|
map['upperPercentage'] = upperPercentage;
|
|
}
|
|
if (lowerLimit != null || !forView) {
|
|
map['lowerLimit'] = lowerLimit;
|
|
}
|
|
if (upperLimit != null || !forView) {
|
|
map['upperLimit'] = upperLimit;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (powerZoneSchemataId != null) {
|
|
map['powerZoneSchemataId'] = forView
|
|
? plDbPowerZoneSchema == null
|
|
? powerZoneSchemataId
|
|
: plDbPowerZoneSchema!.name
|
|
: powerZoneSchemataId;
|
|
} else if (powerZoneSchemataId != null || !forView) {
|
|
map['powerZoneSchemataId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZone]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZone]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
powerZoneSchemataId
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
powerZoneSchemataId
|
|
];
|
|
}
|
|
|
|
static Future<List<DbPowerZone>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbPowerZone.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbPowerZone>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbPowerZone>[];
|
|
try {
|
|
objList = list
|
|
.map((dbpowerzone) =>
|
|
DbPowerZone.fromMap(dbpowerzone as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbPowerZone.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbPowerZone>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbPowerZone> objList = <DbPowerZone>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbPowerZone.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbPowerZoneSchema'))) {
|
|
obj.plDbPowerZoneSchema = obj.plDbPowerZoneSchema ??
|
|
await obj.getDbPowerZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbPowerZone by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbPowerZone] if exist, otherwise returns null
|
|
Future<DbPowerZone?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbPowerZone? obj;
|
|
final data = await _mnDbPowerZone.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbPowerZone.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbPowerZoneSchema'))) {
|
|
obj.plDbPowerZoneSchema = obj.plDbPowerZoneSchema ??
|
|
await obj.getDbPowerZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbPowerZone) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbPowerZone.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbPowerZone.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbPowerZone) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbPowerZone.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbPowerZone.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbPowerZone. Returns a new Primary Key value of DbPowerZone
|
|
|
|
/// <returns>Returns a new Primary Key value of DbPowerZone
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbPowerZone> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbPowerZone> dbpowerzones,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbpowerzones) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbpowerzones.length; i++) {
|
|
if (dbpowerzones[i].id == null) {
|
|
dbpowerzones[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbPowerZone.rawInsert(
|
|
'INSERT OR REPLACE INTO powerZone (id, name, lowerPercentage, upperPercentage, lowerLimit, upperLimit, color, powerZoneSchemataId) VALUES (?,?,?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
name,
|
|
lowerPercentage,
|
|
upperPercentage,
|
|
lowerLimit,
|
|
upperLimit,
|
|
color,
|
|
powerZoneSchemataId
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbPowerZone id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbPowerZone id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbPowerZone Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbPowerZone>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbPowerZone> dbpowerzones,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbPowerZone.rawInsertAll(
|
|
'INSERT OR REPLACE INTO powerZone (id, name, lowerPercentage, upperPercentage, lowerLimit, upperLimit, color, powerZoneSchemataId) VALUES (?,?,?,?,?,?,?,?)',
|
|
dbpowerzones,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbPowerZone
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbPowerZone invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbPowerZone
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbPowerZone.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbPowerZone] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbPowerZoneFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbPowerZoneFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
powerZoneSchemataId = powerZoneSchemataId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbpowerzone
|
|
|
|
// region DbPowerZoneField
|
|
class DbPowerZoneField extends FilterBase {
|
|
DbPowerZoneField(DbPowerZoneFilterBuilder dbpowerzoneFB)
|
|
: super(dbpowerzoneFB);
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder isNull() {
|
|
return super.isNull() as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbPowerZoneFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbPowerZoneField get not {
|
|
return super.not as DbPowerZoneField;
|
|
}
|
|
}
|
|
// endregion DbPowerZoneField
|
|
|
|
// region DbPowerZoneFilterBuilder
|
|
class DbPowerZoneFilterBuilder extends ConjunctionBase {
|
|
DbPowerZoneFilterBuilder(DbPowerZone obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbPowerZone = obj._mnDbPowerZone;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbPowerZoneManager? _mnDbPowerZone;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbPowerZoneFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbPowerZoneFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbPowerZoneFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbPowerZoneFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbPowerZoneFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbPowerZoneFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbPowerZoneFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbPowerZoneFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbPowerZoneFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbPowerZoneFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbPowerZoneFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbPowerZoneField _setField(
|
|
DbPowerZoneField? field, String colName, DbType dbtype) {
|
|
return DbPowerZoneField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbPowerZoneField? _id;
|
|
DbPowerZoneField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _name;
|
|
DbPowerZoneField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbPowerZoneField? _lowerPercentage;
|
|
DbPowerZoneField get lowerPercentage {
|
|
return _lowerPercentage =
|
|
_setField(_lowerPercentage, 'lowerPercentage', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _upperPercentage;
|
|
DbPowerZoneField get upperPercentage {
|
|
return _upperPercentage =
|
|
_setField(_upperPercentage, 'upperPercentage', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _lowerLimit;
|
|
DbPowerZoneField get lowerLimit {
|
|
return _lowerLimit = _setField(_lowerLimit, 'lowerLimit', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _upperLimit;
|
|
DbPowerZoneField get upperLimit {
|
|
return _upperLimit = _setField(_upperLimit, 'upperLimit', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _color;
|
|
DbPowerZoneField get color {
|
|
return _color = _setField(_color, 'color', DbType.integer);
|
|
}
|
|
|
|
DbPowerZoneField? _powerZoneSchemataId;
|
|
DbPowerZoneField get powerZoneSchemataId {
|
|
return _powerZoneSchemataId =
|
|
_setField(_powerZoneSchemataId, 'powerZoneSchemataId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbPowerZone> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbPowerZone!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbPowerZone!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from powerZone ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbPowerZone!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbPowerZone] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbPowerZone?
|
|
@override
|
|
Future<DbPowerZone?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbPowerZone!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbPowerZone? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbPowerZone.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbPowerZoneSchema'))) {
|
|
obj.plDbPowerZoneSchema = obj.plDbPowerZoneSchema ??
|
|
await obj.getDbPowerZoneSchema(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbPowerZone]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbPowerZone?
|
|
@override
|
|
Future<DbPowerZone> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbPowerZone();
|
|
}
|
|
|
|
/// This method returns int. [DbPowerZone]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbpowerzoneCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbpowerzonesFuture = await _mnDbPowerZone!.toList(qparams);
|
|
final int count = dbpowerzonesFuture[0]['CNT'] as int;
|
|
if (dbpowerzoneCount != null) {
|
|
dbpowerzoneCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbPowerZone> [DbPowerZone]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbPowerZone>
|
|
@override
|
|
Future<List<DbPowerZone>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbPowerZone> dbpowerzonesData = await DbPowerZone.fromMapList(
|
|
data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbpowerzonesData;
|
|
}
|
|
|
|
/// This method returns Json String [DbPowerZone]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbPowerZone]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbPowerZone]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbPowerZone!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbPowerZone]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM powerZone WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbPowerZone!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbPowerZone]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbPowerZone!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbPowerZone.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbPowerZone!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbPowerZoneFilterBuilder
|
|
|
|
// region DbPowerZoneFields
|
|
class DbPowerZoneFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fLowerPercentage;
|
|
static TableField get lowerPercentage {
|
|
return _fLowerPercentage = _fLowerPercentage ??
|
|
SqlSyntax.setField(
|
|
_fLowerPercentage, 'lowerPercentage', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fUpperPercentage;
|
|
static TableField get upperPercentage {
|
|
return _fUpperPercentage = _fUpperPercentage ??
|
|
SqlSyntax.setField(
|
|
_fUpperPercentage, 'upperPercentage', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLowerLimit;
|
|
static TableField get lowerLimit {
|
|
return _fLowerLimit = _fLowerLimit ??
|
|
SqlSyntax.setField(_fLowerLimit, 'lowerLimit', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fUpperLimit;
|
|
static TableField get upperLimit {
|
|
return _fUpperLimit = _fUpperLimit ??
|
|
SqlSyntax.setField(_fUpperLimit, 'upperLimit', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fColor;
|
|
static TableField get color {
|
|
return _fColor =
|
|
_fColor ?? SqlSyntax.setField(_fColor, 'color', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fPowerZoneSchemataId;
|
|
static TableField get powerZoneSchemataId {
|
|
return _fPowerZoneSchemataId = _fPowerZoneSchemataId ??
|
|
SqlSyntax.setField(
|
|
_fPowerZoneSchemataId, 'powerZoneSchemataId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbPowerZoneFields
|
|
|
|
//region DbPowerZoneManager
|
|
class DbPowerZoneManager extends SqfEntityProvider {
|
|
DbPowerZoneManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'powerZone';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbPowerZoneManager
|
|
// region DbTag
|
|
class DbTag extends TableBase {
|
|
DbTag(
|
|
{this.id,
|
|
this.name,
|
|
this.color,
|
|
this.sortOrder,
|
|
this.system,
|
|
this.tagGroupsId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbTag.withFields(
|
|
this.name, this.color, this.sortOrder, this.system, this.tagGroupsId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbTag.withId(this.id, this.name, this.color, this.sortOrder, this.system,
|
|
this.tagGroupsId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbTag.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['color'] != null) {
|
|
color = int.tryParse(o['color'].toString());
|
|
}
|
|
if (o['sortOrder'] != null) {
|
|
sortOrder = int.tryParse(o['sortOrder'].toString());
|
|
}
|
|
if (o['system'] != null) {
|
|
system =
|
|
o['system'].toString() == '1' || o['system'].toString() == 'true';
|
|
}
|
|
tagGroupsId = int.tryParse(o['tagGroupsId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbTagGroup = o['dbTagGroup'] != null
|
|
? DbTagGroup.fromMap(o['dbTagGroup'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbTag)
|
|
int? id;
|
|
String? name;
|
|
int? color;
|
|
int? sortOrder;
|
|
bool? system;
|
|
int? tagGroupsId;
|
|
|
|
// end FIELDS (DbTag)
|
|
|
|
// RELATIONSHIPS (DbTag)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTagGroup', 'plField2'..]) or so on..
|
|
DbTagGroup? plDbTagGroup;
|
|
|
|
/// get DbTagGroup By TagGroupsId
|
|
Future<DbTagGroup?> getDbTagGroup(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbTagGroup().getById(tagGroupsId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbTag)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbTag)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbLapTaggings', 'plField2'..]) or so on..
|
|
List<DbLapTagging>? plDbLapTaggings;
|
|
|
|
/// get DbLapTagging(s) filtered by id=tagsId
|
|
DbLapTaggingFilterBuilder? getDbLapTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbLapTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.tagsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivityTaggings', 'plField2'..]) or so on..
|
|
List<DbActivityTagging>? plDbActivityTaggings;
|
|
|
|
/// get DbActivityTagging(s) filtered by id=tagsId
|
|
DbActivityTaggingFilterBuilder? getDbActivityTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbActivityTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.tagsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbIntervalTaggings', 'plField2'..]) or so on..
|
|
List<DbIntervalTagging>? plDbIntervalTaggings;
|
|
|
|
/// get DbIntervalTagging(s) filtered by id=tagsId
|
|
DbIntervalTaggingFilterBuilder? getDbIntervalTaggings(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbIntervalTagging()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.tagsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbTag)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbTagManager? __mnDbTag;
|
|
|
|
DbTagManager get _mnDbTag {
|
|
return __mnDbTag = __mnDbTag ?? DbTagManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (sortOrder != null || !forView) {
|
|
map['sortOrder'] = sortOrder;
|
|
}
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagGroupsId != null) {
|
|
map['tagGroupsId'] = forView
|
|
? plDbTagGroup == null
|
|
? tagGroupsId
|
|
: plDbTagGroup!.name
|
|
: tagGroupsId;
|
|
} else if (tagGroupsId != null || !forView) {
|
|
map['tagGroupsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (sortOrder != null || !forView) {
|
|
map['sortOrder'] = sortOrder;
|
|
}
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagGroupsId != null) {
|
|
map['tagGroupsId'] = forView
|
|
? plDbTagGroup == null
|
|
? tagGroupsId
|
|
: plDbTagGroup!.name
|
|
: tagGroupsId;
|
|
} else if (tagGroupsId != null || !forView) {
|
|
map['tagGroupsId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbTag)
|
|
if (!forQuery) {
|
|
map['DbLapTaggings'] = await getDbLapTaggings()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbActivityTaggings'] = await getDbActivityTaggings()!.toMapList();
|
|
}
|
|
if (!forQuery) {
|
|
map['DbIntervalTaggings'] = await getDbIntervalTaggings()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbTag)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbTag]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbTag]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [name, color, sortOrder, system, tagGroupsId];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [id, name, color, sortOrder, system, tagGroupsId];
|
|
}
|
|
|
|
static Future<List<DbTag>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbTag.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbTag>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbTag>[];
|
|
try {
|
|
objList = list
|
|
.map((dbtag) => DbTag.fromMap(dbtag as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbTag.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbTag>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbTag> objList = <DbTag>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbTag.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tags.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTagGroup'))) {
|
|
obj.plDbTagGroup = obj.plDbTagGroup ??
|
|
await obj.getDbTagGroup(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbTag by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbTag] if exist, otherwise returns null
|
|
Future<DbTag?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbTag? obj;
|
|
final data = await _mnDbTag.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbTag.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tags.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTagGroup'))) {
|
|
obj.plDbTagGroup = obj.plDbTagGroup ??
|
|
await obj.getDbTagGroup(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbTag) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbTag.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbTag.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbTag) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbTag.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbTag.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbTag. Returns a new Primary Key value of DbTag
|
|
|
|
/// <returns>Returns a new Primary Key value of DbTag
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbTag> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbTag> dbtags,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbtags) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbtags.length; i++) {
|
|
if (dbtags[i].id == null) {
|
|
dbtags[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbTag.rawInsert(
|
|
'INSERT OR REPLACE INTO tags (id, name, color, sortOrder, system, tagGroupsId) VALUES (?,?,?,?,?,?)',
|
|
[id, name, color, sortOrder, system, tagGroupsId],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true, successMessage: 'DbTag id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbTag id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbTag Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbTag>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbTag> dbtags,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbTag.rawInsertAll(
|
|
'INSERT OR REPLACE INTO tags (id, name, color, sortOrder, system, tagGroupsId) VALUES (?,?,?,?,?,?)',
|
|
dbtags,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbTag
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbTag invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result = await DbLapTagging()
|
|
.select()
|
|
.tagsId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbActivityTagging()
|
|
.select()
|
|
.tagsId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
{
|
|
result = await DbIntervalTagging()
|
|
.select()
|
|
.tagsId
|
|
.equals(id)
|
|
.and
|
|
.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbTag
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbTag.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbTag] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbTagFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbTagFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
tagGroupsId = tagGroupsId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbtag
|
|
|
|
// region DbTagField
|
|
class DbTagField extends FilterBase {
|
|
DbTagField(DbTagFilterBuilder dbtagFB) : super(dbtagFB);
|
|
|
|
@override
|
|
DbTagFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder isNull() {
|
|
return super.isNull() as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbTagFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagField get not {
|
|
return super.not as DbTagField;
|
|
}
|
|
}
|
|
// endregion DbTagField
|
|
|
|
// region DbTagFilterBuilder
|
|
class DbTagFilterBuilder extends ConjunctionBase {
|
|
DbTagFilterBuilder(DbTag obj, bool? getIsDeleted) : super(obj, getIsDeleted) {
|
|
_mnDbTag = obj._mnDbTag;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbTagManager? _mnDbTag;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbTagFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbTagFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbTagFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbTagFilterBuilder where(String? whereCriteria, {dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbTagFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbTagFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbTagFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbTagFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbTagFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbTagFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbTagFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbTagField _setField(DbTagField? field, String colName, DbType dbtype) {
|
|
return DbTagField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbTagField? _id;
|
|
DbTagField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbTagField? _name;
|
|
DbTagField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbTagField? _color;
|
|
DbTagField get color {
|
|
return _color = _setField(_color, 'color', DbType.integer);
|
|
}
|
|
|
|
DbTagField? _sortOrder;
|
|
DbTagField get sortOrder {
|
|
return _sortOrder = _setField(_sortOrder, 'sortOrder', DbType.integer);
|
|
}
|
|
|
|
DbTagField? _system;
|
|
DbTagField get system {
|
|
return _system = _setField(_system, 'system', DbType.bool);
|
|
}
|
|
|
|
DbTagField? _tagGroupsId;
|
|
DbTagField get tagGroupsId {
|
|
return _tagGroupsId =
|
|
_setField(_tagGroupsId, 'tagGroupsId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbTag> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbLapTagging) according to DeleteRule.CASCADE
|
|
final idListDbLapTaggingBYtagsId = toListPrimaryKeySQL(false);
|
|
final resDbLapTaggingBYtagsId = await DbLapTagging()
|
|
.select()
|
|
.where('tagsId IN (${idListDbLapTaggingBYtagsId['sql']})',
|
|
parameterValue: idListDbLapTaggingBYtagsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbLapTaggingBYtagsId.success) {
|
|
return resDbLapTaggingBYtagsId;
|
|
}
|
|
// Delete sub records where in (DbActivityTagging) according to DeleteRule.CASCADE
|
|
final idListDbActivityTaggingBYtagsId = toListPrimaryKeySQL(false);
|
|
final resDbActivityTaggingBYtagsId = await DbActivityTagging()
|
|
.select()
|
|
.where('tagsId IN (${idListDbActivityTaggingBYtagsId['sql']})',
|
|
parameterValue: idListDbActivityTaggingBYtagsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbActivityTaggingBYtagsId.success) {
|
|
return resDbActivityTaggingBYtagsId;
|
|
}
|
|
// Delete sub records where in (DbIntervalTagging) according to DeleteRule.CASCADE
|
|
final idListDbIntervalTaggingBYtagsId = toListPrimaryKeySQL(false);
|
|
final resDbIntervalTaggingBYtagsId = await DbIntervalTagging()
|
|
.select()
|
|
.where('tagsId IN (${idListDbIntervalTaggingBYtagsId['sql']})',
|
|
parameterValue: idListDbIntervalTaggingBYtagsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbIntervalTaggingBYtagsId.success) {
|
|
return resDbIntervalTaggingBYtagsId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbTag!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbTag!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from tags ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbTag!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbTag] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbTag?
|
|
@override
|
|
Future<DbTag?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbTag!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbTag? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbTag.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tags.plDbLapTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbLapTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbLapTaggings'); */ obj.plDbLapTaggings =
|
|
obj.plDbLapTaggings ??
|
|
await obj.getDbLapTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbActivityTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbActivityTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbActivityTaggings'); */ obj
|
|
.plDbActivityTaggings =
|
|
obj.plDbActivityTaggings ??
|
|
await obj.getDbActivityTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
if (/*!_loadedfields!.contains('tags.plDbIntervalTaggings') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbIntervalTaggings'))) {
|
|
/*_loadedfields!.add('tags.plDbIntervalTaggings'); */ obj
|
|
.plDbIntervalTaggings =
|
|
obj.plDbIntervalTaggings ??
|
|
await obj.getDbIntervalTaggings()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTagGroup'))) {
|
|
obj.plDbTagGroup = obj.plDbTagGroup ??
|
|
await obj.getDbTagGroup(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbTag]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbTag?
|
|
@override
|
|
Future<DbTag> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbTag();
|
|
}
|
|
|
|
/// This method returns int. [DbTag]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbtagCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbtagsFuture = await _mnDbTag!.toList(qparams);
|
|
final int count = dbtagsFuture[0]['CNT'] as int;
|
|
if (dbtagCount != null) {
|
|
dbtagCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbTag> [DbTag]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbTag>
|
|
@override
|
|
Future<List<DbTag>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbTag> dbtagsData = await DbTag.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbtagsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbTag]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbTag]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbTag]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbTag!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbTag]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM tags WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbTag!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbTag]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbTag!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbTag.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbTag!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbTagFilterBuilder
|
|
|
|
// region DbTagFields
|
|
class DbTagFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fColor;
|
|
static TableField get color {
|
|
return _fColor =
|
|
_fColor ?? SqlSyntax.setField(_fColor, 'color', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSortOrder;
|
|
static TableField get sortOrder {
|
|
return _fSortOrder = _fSortOrder ??
|
|
SqlSyntax.setField(_fSortOrder, 'sortOrder', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSystem;
|
|
static TableField get system {
|
|
return _fSystem =
|
|
_fSystem ?? SqlSyntax.setField(_fSystem, 'system', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fTagGroupsId;
|
|
static TableField get tagGroupsId {
|
|
return _fTagGroupsId = _fTagGroupsId ??
|
|
SqlSyntax.setField(_fTagGroupsId, 'tagGroupsId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbTagFields
|
|
|
|
//region DbTagManager
|
|
class DbTagManager extends SqfEntityProvider {
|
|
DbTagManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'tags';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbTagManager
|
|
// region DbTagGroup
|
|
class DbTagGroup extends TableBase {
|
|
DbTagGroup({this.id, this.name, this.color, this.system, this.athletesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbTagGroup.withFields(this.name, this.color, this.system, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbTagGroup.withId(
|
|
this.id, this.name, this.color, this.system, this.athletesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbTagGroup.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['name'] != null) {
|
|
name = o['name'].toString();
|
|
}
|
|
if (o['color'] != null) {
|
|
color = int.tryParse(o['color'].toString());
|
|
}
|
|
if (o['system'] != null) {
|
|
system =
|
|
o['system'].toString() == '1' || o['system'].toString() == 'true';
|
|
}
|
|
athletesId = int.tryParse(o['athletesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbAthlete = o['dbAthlete'] != null
|
|
? DbAthlete.fromMap(o['dbAthlete'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbTagGroup)
|
|
int? id;
|
|
String? name;
|
|
int? color;
|
|
bool? system;
|
|
int? athletesId;
|
|
|
|
// end FIELDS (DbTagGroup)
|
|
|
|
// RELATIONSHIPS (DbTagGroup)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbAthlete', 'plField2'..]) or so on..
|
|
DbAthlete? plDbAthlete;
|
|
|
|
/// get DbAthlete By AthletesId
|
|
Future<DbAthlete?> getDbAthlete(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbAthlete().getById(athletesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbTagGroup)
|
|
|
|
// COLLECTIONS & VIRTUALS (DbTagGroup)
|
|
/// to load children of items to this field, use preload parameter. Ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTags', 'plField2'..]) or so on..
|
|
List<DbTag>? plDbTags;
|
|
|
|
/// get DbTag(s) filtered by id=tagGroupsId
|
|
DbTagFilterBuilder? getDbTags(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
return DbTag()
|
|
.select(columnsToSelect: columnsToSelect, getIsDeleted: getIsDeleted)
|
|
.tagGroupsId
|
|
.equals(id)
|
|
.and;
|
|
}
|
|
|
|
// END COLLECTIONS & VIRTUALS (DbTagGroup)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbTagGroupManager? __mnDbTagGroup;
|
|
|
|
DbTagGroupManager get _mnDbTagGroup {
|
|
return __mnDbTagGroup = __mnDbTagGroup ?? DbTagGroupManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (name != null || !forView) {
|
|
map['name'] = name;
|
|
}
|
|
if (color != null || !forView) {
|
|
map['color'] = color;
|
|
}
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (athletesId != null) {
|
|
map['athletesId'] = forView
|
|
? plDbAthlete == null
|
|
? athletesId
|
|
: plDbAthlete!.state
|
|
: athletesId;
|
|
} else if (athletesId != null || !forView) {
|
|
map['athletesId'] = null;
|
|
}
|
|
|
|
// COLLECTIONS (DbTagGroup)
|
|
if (!forQuery) {
|
|
map['DbTags'] = await getDbTags()!.toMapList();
|
|
}
|
|
// END COLLECTIONS (DbTagGroup)
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbTagGroup]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbTagGroup]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [name, color, system, athletesId];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [id, name, color, system, athletesId];
|
|
}
|
|
|
|
static Future<List<DbTagGroup>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbTagGroup.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbTagGroup>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbTagGroup>[];
|
|
try {
|
|
objList = list
|
|
.map((dbtaggroup) =>
|
|
DbTagGroup.fromMap(dbtaggroup as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbTagGroup.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbTagGroup>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbTagGroup> objList = <DbTagGroup>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbTagGroup.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tagGroups.plDbTags') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTags'))) {
|
|
/*_loadedfields!.add('tagGroups.plDbTags'); */ obj.plDbTags =
|
|
obj.plDbTags ??
|
|
await obj.getDbTags()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbTagGroup by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbTagGroup] if exist, otherwise returns null
|
|
Future<DbTagGroup?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbTagGroup? obj;
|
|
final data = await _mnDbTagGroup.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbTagGroup.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tagGroups.plDbTags') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTags'))) {
|
|
/*_loadedfields!.add('tagGroups.plDbTags'); */ obj.plDbTags =
|
|
obj.plDbTags ??
|
|
await obj.getDbTags()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbTagGroup) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbTagGroup.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbTagGroup.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbTagGroup) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbTagGroup.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbTagGroup.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbTagGroup. Returns a new Primary Key value of DbTagGroup
|
|
|
|
/// <returns>Returns a new Primary Key value of DbTagGroup
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbTagGroup> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbTagGroup> dbtaggroups,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbtaggroups) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbtaggroups.length; i++) {
|
|
if (dbtaggroups[i].id == null) {
|
|
dbtaggroups[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbTagGroup.rawInsert(
|
|
'INSERT OR REPLACE INTO tagGroups (id, name, color, system, athletesId) VALUES (?,?,?,?,?)',
|
|
[id, name, color, system, athletesId],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbTagGroup id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbTagGroup id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbTagGroup Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbTagGroup>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbTagGroup> dbtaggroups,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbTagGroup.rawInsertAll(
|
|
'INSERT OR REPLACE INTO tagGroups (id, name, color, system, athletesId) VALUES (?,?,?,?,?)',
|
|
dbtaggroups,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbTagGroup
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbTagGroup invoked (id=$id)');
|
|
var result = BoolResult(success: false);
|
|
{
|
|
result =
|
|
await DbTag().select().tagGroupsId.equals(id).and.delete(hardDelete);
|
|
}
|
|
if (!result.success) {
|
|
return result;
|
|
}
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbTagGroup
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbTagGroup.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbTagGroup] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbTagGroupFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbTagGroupFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
athletesId = athletesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbtaggroup
|
|
|
|
// region DbTagGroupField
|
|
class DbTagGroupField extends FilterBase {
|
|
DbTagGroupField(DbTagGroupFilterBuilder dbtaggroupFB) : super(dbtaggroupFB);
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder isNull() {
|
|
return super.isNull() as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbTagGroupFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbTagGroupField get not {
|
|
return super.not as DbTagGroupField;
|
|
}
|
|
}
|
|
// endregion DbTagGroupField
|
|
|
|
// region DbTagGroupFilterBuilder
|
|
class DbTagGroupFilterBuilder extends ConjunctionBase {
|
|
DbTagGroupFilterBuilder(DbTagGroup obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbTagGroup = obj._mnDbTagGroup;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbTagGroupManager? _mnDbTagGroup;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbTagGroupFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbTagGroupFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbTagGroupFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbTagGroupFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbTagGroupFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbTagGroupFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbTagGroupFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbTagGroupFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbTagGroupFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbTagGroupFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbTagGroupFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbTagGroupField _setField(
|
|
DbTagGroupField? field, String colName, DbType dbtype) {
|
|
return DbTagGroupField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbTagGroupField? _id;
|
|
DbTagGroupField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbTagGroupField? _name;
|
|
DbTagGroupField get name {
|
|
return _name = _setField(_name, 'name', DbType.text);
|
|
}
|
|
|
|
DbTagGroupField? _color;
|
|
DbTagGroupField get color {
|
|
return _color = _setField(_color, 'color', DbType.integer);
|
|
}
|
|
|
|
DbTagGroupField? _system;
|
|
DbTagGroupField get system {
|
|
return _system = _setField(_system, 'system', DbType.bool);
|
|
}
|
|
|
|
DbTagGroupField? _athletesId;
|
|
DbTagGroupField get athletesId {
|
|
return _athletesId = _setField(_athletesId, 'athletesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbTagGroup> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
// Delete sub records where in (DbTag) according to DeleteRule.CASCADE
|
|
final idListDbTagBYtagGroupsId = toListPrimaryKeySQL(false);
|
|
final resDbTagBYtagGroupsId = await DbTag()
|
|
.select()
|
|
.where('tagGroupsId IN (${idListDbTagBYtagGroupsId['sql']})',
|
|
parameterValue: idListDbTagBYtagGroupsId['args'])
|
|
.delete(hardDelete);
|
|
if (!resDbTagBYtagGroupsId.success) {
|
|
return resDbTagBYtagGroupsId;
|
|
}
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbTagGroup!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbTagGroup!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from tagGroups ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbTagGroup!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbTagGroup] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbTagGroup?
|
|
@override
|
|
Future<DbTagGroup?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbTagGroup!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbTagGroup? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbTagGroup.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD CHILD
|
|
if (preload) {
|
|
loadedFields = loadedFields ?? [];
|
|
if (/*!_loadedfields!.contains('tagGroups.plDbTags') && */ (preloadFields ==
|
|
null ||
|
|
preloadFields.contains('plDbTags'))) {
|
|
/*_loadedfields!.add('tagGroups.plDbTags'); */ obj.plDbTags =
|
|
obj.plDbTags ??
|
|
await obj.getDbTags()!.toList(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: false /*, loadedFields:_loadedFields*/);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD CHILD
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbAthlete'))) {
|
|
obj.plDbAthlete = obj.plDbAthlete ??
|
|
await obj.getDbAthlete(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbTagGroup]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbTagGroup?
|
|
@override
|
|
Future<DbTagGroup> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbTagGroup();
|
|
}
|
|
|
|
/// This method returns int. [DbTagGroup]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dbtaggroupCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbtaggroupsFuture = await _mnDbTagGroup!.toList(qparams);
|
|
final int count = dbtaggroupsFuture[0]['CNT'] as int;
|
|
if (dbtaggroupCount != null) {
|
|
dbtaggroupCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbTagGroup> [DbTagGroup]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbTagGroup>
|
|
@override
|
|
Future<List<DbTagGroup>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbTagGroup> dbtaggroupsData = await DbTagGroup.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbtaggroupsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbTagGroup]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbTagGroup]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbTagGroup]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbTagGroup!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbTagGroup]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM tagGroups WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbTagGroup!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbTagGroup]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbTagGroup!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbTagGroup.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbTagGroup!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbTagGroupFilterBuilder
|
|
|
|
// region DbTagGroupFields
|
|
class DbTagGroupFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fName;
|
|
static TableField get name {
|
|
return _fName = _fName ?? SqlSyntax.setField(_fName, 'name', DbType.text);
|
|
}
|
|
|
|
static TableField? _fColor;
|
|
static TableField get color {
|
|
return _fColor =
|
|
_fColor ?? SqlSyntax.setField(_fColor, 'color', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSystem;
|
|
static TableField get system {
|
|
return _fSystem =
|
|
_fSystem ?? SqlSyntax.setField(_fSystem, 'system', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fAthletesId;
|
|
static TableField get athletesId {
|
|
return _fAthletesId = _fAthletesId ??
|
|
SqlSyntax.setField(_fAthletesId, 'athletesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbTagGroupFields
|
|
|
|
//region DbTagGroupManager
|
|
class DbTagGroupManager extends SqfEntityProvider {
|
|
DbTagGroupManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'tagGroups';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbTagGroupManager
|
|
// region DbLapTagging
|
|
class DbLapTagging extends TableBase {
|
|
DbLapTagging({this.id, this.system, this.tagsId, this.lapsId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbLapTagging.withFields(this.system, this.tagsId, this.lapsId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbLapTagging.withId(this.id, this.system, this.tagsId, this.lapsId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbLapTagging.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['system'] != null) {
|
|
system =
|
|
o['system'].toString() == '1' || o['system'].toString() == 'true';
|
|
}
|
|
tagsId = int.tryParse(o['tagsId'].toString());
|
|
|
|
lapsId = int.tryParse(o['lapsId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbTag = o['dbTag'] != null
|
|
? DbTag.fromMap(o['dbTag'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbLap = o['dbLap'] != null
|
|
? DbLap.fromMap(o['dbLap'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbLapTagging)
|
|
int? id;
|
|
bool? system;
|
|
int? tagsId;
|
|
int? lapsId;
|
|
|
|
// end FIELDS (DbLapTagging)
|
|
|
|
// RELATIONSHIPS (DbLapTagging)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTag', 'plField2'..]) or so on..
|
|
DbTag? plDbTag;
|
|
|
|
/// get DbTag By TagsId
|
|
Future<DbTag?> getDbTag(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbTag()
|
|
.getById(tagsId, loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbLap', 'plField2'..]) or so on..
|
|
DbLap? plDbLap;
|
|
|
|
/// get DbLap By LapsId
|
|
Future<DbLap?> getDbLap(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbLap()
|
|
.getById(lapsId, loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbLapTagging)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbLapTaggingManager? __mnDbLapTagging;
|
|
|
|
DbLapTaggingManager get _mnDbLapTagging {
|
|
return __mnDbLapTagging = __mnDbLapTagging ?? DbLapTaggingManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (lapsId != null) {
|
|
map['lapsId'] = forView
|
|
? plDbLap == null
|
|
? lapsId
|
|
: plDbLap!.event
|
|
: lapsId;
|
|
} else if (lapsId != null || !forView) {
|
|
map['lapsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (lapsId != null) {
|
|
map['lapsId'] = forView
|
|
? plDbLap == null
|
|
? lapsId
|
|
: plDbLap!.event
|
|
: lapsId;
|
|
} else if (lapsId != null || !forView) {
|
|
map['lapsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbLapTagging]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbLapTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [system, tagsId, lapsId];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [id, system, tagsId, lapsId];
|
|
}
|
|
|
|
static Future<List<DbLapTagging>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLapTagging.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbLapTagging>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbLapTagging>[];
|
|
try {
|
|
objList = list
|
|
.map((dblaptagging) =>
|
|
DbLapTagging.fromMap(dblaptagging as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLapTagging.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbLapTagging>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbLapTagging> objList = <DbLapTagging>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbLapTagging.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbLapTagging by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbLapTagging] if exist, otherwise returns null
|
|
Future<DbLapTagging?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbLapTagging? obj;
|
|
final data = await _mnDbLapTagging.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbLapTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbLapTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLapTagging.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbLapTagging.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbLapTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLapTagging.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbLapTagging.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbLapTagging. Returns a new Primary Key value of DbLapTagging
|
|
|
|
/// <returns>Returns a new Primary Key value of DbLapTagging
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbLapTagging> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbLapTagging> dblaptaggings,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dblaptaggings) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dblaptaggings.length; i++) {
|
|
if (dblaptaggings[i].id == null) {
|
|
dblaptaggings[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbLapTagging.rawInsert(
|
|
'INSERT OR REPLACE INTO lapTaggings (id, system, tagsId, lapsId) VALUES (?,?,?,?)',
|
|
[id, system, tagsId, lapsId],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbLapTagging id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbLapTagging id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbLapTagging Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbLapTagging>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbLapTagging> dblaptaggings,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbLapTagging.rawInsertAll(
|
|
'INSERT OR REPLACE INTO lapTaggings (id, system, tagsId, lapsId) VALUES (?,?,?,?)',
|
|
dblaptaggings,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbLapTagging
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbLapTagging invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbLapTagging
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbLapTagging.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbLapTagging] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLapTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLapTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
tagsId = tagsId ?? 0;
|
|
lapsId = lapsId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dblaptagging
|
|
|
|
// region DbLapTaggingField
|
|
class DbLapTaggingField extends FilterBase {
|
|
DbLapTaggingField(DbLapTaggingFilterBuilder dblaptaggingFB)
|
|
: super(dblaptaggingFB);
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder isNull() {
|
|
return super.isNull() as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbLapTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLapTaggingField get not {
|
|
return super.not as DbLapTaggingField;
|
|
}
|
|
}
|
|
// endregion DbLapTaggingField
|
|
|
|
// region DbLapTaggingFilterBuilder
|
|
class DbLapTaggingFilterBuilder extends ConjunctionBase {
|
|
DbLapTaggingFilterBuilder(DbLapTagging obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbLapTagging = obj._mnDbLapTagging;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbLapTaggingManager? _mnDbLapTagging;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbLapTaggingFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbLapTaggingFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbLapTaggingFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbLapTaggingFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbLapTaggingFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbLapTaggingFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbLapTaggingFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLapTaggingFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLapTaggingFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLapTaggingFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLapTaggingFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbLapTaggingField _setField(
|
|
DbLapTaggingField? field, String colName, DbType dbtype) {
|
|
return DbLapTaggingField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbLapTaggingField? _id;
|
|
DbLapTaggingField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbLapTaggingField? _system;
|
|
DbLapTaggingField get system {
|
|
return _system = _setField(_system, 'system', DbType.bool);
|
|
}
|
|
|
|
DbLapTaggingField? _tagsId;
|
|
DbLapTaggingField get tagsId {
|
|
return _tagsId = _setField(_tagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
DbLapTaggingField? _lapsId;
|
|
DbLapTaggingField get lapsId {
|
|
return _lapsId = _setField(_lapsId, 'lapsId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbLapTagging> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbLapTagging!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbLapTagging!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from lapTaggings ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbLapTagging!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbLapTagging] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLapTagging?
|
|
@override
|
|
Future<DbLapTagging?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbLapTagging!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbLapTagging? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbLapTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbLap'))) {
|
|
obj.plDbLap =
|
|
obj.plDbLap ?? await obj.getDbLap(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbLapTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLapTagging?
|
|
@override
|
|
Future<DbLapTagging> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbLapTagging();
|
|
}
|
|
|
|
/// This method returns int. [DbLapTagging]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dblaptaggingCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dblaptaggingsFuture = await _mnDbLapTagging!.toList(qparams);
|
|
final int count = dblaptaggingsFuture[0]['CNT'] as int;
|
|
if (dblaptaggingCount != null) {
|
|
dblaptaggingCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbLapTagging> [DbLapTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbLapTagging>
|
|
@override
|
|
Future<List<DbLapTagging>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbLapTagging> dblaptaggingsData = await DbLapTagging.fromMapList(
|
|
data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dblaptaggingsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbLapTagging]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbLapTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbLapTagging]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbLapTagging!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbLapTagging]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM lapTaggings WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbLapTagging!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbLapTagging]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLapTagging!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbLapTagging.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLapTagging!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbLapTaggingFilterBuilder
|
|
|
|
// region DbLapTaggingFields
|
|
class DbLapTaggingFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSystem;
|
|
static TableField get system {
|
|
return _fSystem =
|
|
_fSystem ?? SqlSyntax.setField(_fSystem, 'system', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fTagsId;
|
|
static TableField get tagsId {
|
|
return _fTagsId =
|
|
_fTagsId ?? SqlSyntax.setField(_fTagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fLapsId;
|
|
static TableField get lapsId {
|
|
return _fLapsId =
|
|
_fLapsId ?? SqlSyntax.setField(_fLapsId, 'lapsId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbLapTaggingFields
|
|
|
|
//region DbLapTaggingManager
|
|
class DbLapTaggingManager extends SqfEntityProvider {
|
|
DbLapTaggingManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'lapTaggings';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbLapTaggingManager
|
|
// region DbActivityTagging
|
|
class DbActivityTagging extends TableBase {
|
|
DbActivityTagging({this.id, this.system, this.tagsId, this.activitiesId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbActivityTagging.withFields(this.system, this.tagsId, this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbActivityTagging.withId(
|
|
this.id, this.system, this.tagsId, this.activitiesId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbActivityTagging.fromMap(Map<String, dynamic> o,
|
|
{bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['system'] != null) {
|
|
system =
|
|
o['system'].toString() == '1' || o['system'].toString() == 'true';
|
|
}
|
|
tagsId = int.tryParse(o['tagsId'].toString());
|
|
|
|
activitiesId = int.tryParse(o['activitiesId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbTag = o['dbTag'] != null
|
|
? DbTag.fromMap(o['dbTag'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbActivity = o['dbActivity'] != null
|
|
? DbActivity.fromMap(o['dbActivity'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbActivityTagging)
|
|
int? id;
|
|
bool? system;
|
|
int? tagsId;
|
|
int? activitiesId;
|
|
|
|
// end FIELDS (DbActivityTagging)
|
|
|
|
// RELATIONSHIPS (DbActivityTagging)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTag', 'plField2'..]) or so on..
|
|
DbTag? plDbTag;
|
|
|
|
/// get DbTag By TagsId
|
|
Future<DbTag?> getDbTag(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbTag()
|
|
.getById(tagsId, loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbActivity', 'plField2'..]) or so on..
|
|
DbActivity? plDbActivity;
|
|
|
|
/// get DbActivity By ActivitiesId
|
|
Future<DbActivity?> getDbActivity(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbActivity().getById(activitiesId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbActivityTagging)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbActivityTaggingManager? __mnDbActivityTagging;
|
|
|
|
DbActivityTaggingManager get _mnDbActivityTagging {
|
|
return __mnDbActivityTagging =
|
|
__mnDbActivityTagging ?? DbActivityTaggingManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (activitiesId != null) {
|
|
map['activitiesId'] = forView
|
|
? plDbActivity == null
|
|
? activitiesId
|
|
: plDbActivity!.state
|
|
: activitiesId;
|
|
} else if (activitiesId != null || !forView) {
|
|
map['activitiesId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbActivityTagging]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbActivityTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [system, tagsId, activitiesId];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [id, system, tagsId, activitiesId];
|
|
}
|
|
|
|
static Future<List<DbActivityTagging>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbActivityTagging.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbActivityTagging>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbActivityTagging>[];
|
|
try {
|
|
objList = list
|
|
.map((dbactivitytagging) => DbActivityTagging.fromMap(
|
|
dbactivitytagging as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbActivityTagging.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbActivityTagging>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbActivityTagging> objList = <DbActivityTagging>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbActivityTagging.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbActivityTagging by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbActivityTagging] if exist, otherwise returns null
|
|
Future<DbActivityTagging?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbActivityTagging? obj;
|
|
final data = await _mnDbActivityTagging.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbActivityTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbActivityTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbActivityTagging.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbActivityTagging.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbActivityTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbActivityTagging.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbActivityTagging.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbActivityTagging. Returns a new Primary Key value of DbActivityTagging
|
|
|
|
/// <returns>Returns a new Primary Key value of DbActivityTagging
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbActivityTagging> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(
|
|
List<DbActivityTagging> dbactivitytaggings,
|
|
{bool? exclusive,
|
|
bool? noResult,
|
|
bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbactivitytaggings) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbactivitytaggings.length; i++) {
|
|
if (dbactivitytaggings[i].id == null) {
|
|
dbactivitytaggings[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbActivityTagging.rawInsert(
|
|
'INSERT OR REPLACE INTO activityTaggings (id, system, tagsId, activitiesId) VALUES (?,?,?,?)',
|
|
[id, system, tagsId, activitiesId],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbActivityTagging id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbActivityTagging id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage:
|
|
'DbActivityTagging Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbActivityTagging>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbActivityTagging> dbactivitytaggings,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbActivityTagging.rawInsertAll(
|
|
'INSERT OR REPLACE INTO activityTaggings (id, system, tagsId, activitiesId) VALUES (?,?,?,?)',
|
|
dbactivitytaggings,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbActivityTagging
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbActivityTagging invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbActivityTagging
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbActivityTagging.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbActivityTagging] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbActivityTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbActivityTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
tagsId = tagsId ?? 0;
|
|
activitiesId = activitiesId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbactivitytagging
|
|
|
|
// region DbActivityTaggingField
|
|
class DbActivityTaggingField extends FilterBase {
|
|
DbActivityTaggingField(DbActivityTaggingFilterBuilder dbactivitytaggingFB)
|
|
: super(dbactivitytaggingFB);
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder isNull() {
|
|
return super.isNull() as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbActivityTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbActivityTaggingField get not {
|
|
return super.not as DbActivityTaggingField;
|
|
}
|
|
}
|
|
// endregion DbActivityTaggingField
|
|
|
|
// region DbActivityTaggingFilterBuilder
|
|
class DbActivityTaggingFilterBuilder extends ConjunctionBase {
|
|
DbActivityTaggingFilterBuilder(DbActivityTagging obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbActivityTagging = obj._mnDbActivityTagging;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbActivityTaggingManager? _mnDbActivityTagging;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbActivityTaggingFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbActivityTaggingFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbActivityTaggingFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbActivityTaggingFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbActivityTaggingFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbActivityTaggingFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbActivityTaggingFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbActivityTaggingFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbActivityTaggingFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbActivityTaggingFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbActivityTaggingFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbActivityTaggingField _setField(
|
|
DbActivityTaggingField? field, String colName, DbType dbtype) {
|
|
return DbActivityTaggingField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbActivityTaggingField? _id;
|
|
DbActivityTaggingField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbActivityTaggingField? _system;
|
|
DbActivityTaggingField get system {
|
|
return _system = _setField(_system, 'system', DbType.bool);
|
|
}
|
|
|
|
DbActivityTaggingField? _tagsId;
|
|
DbActivityTaggingField get tagsId {
|
|
return _tagsId = _setField(_tagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
DbActivityTaggingField? _activitiesId;
|
|
DbActivityTaggingField get activitiesId {
|
|
return _activitiesId =
|
|
_setField(_activitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbActivityTagging> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbActivityTagging!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbActivityTagging!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from activityTaggings ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbActivityTagging!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbActivityTagging] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbActivityTagging?
|
|
@override
|
|
Future<DbActivityTagging?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbActivityTagging!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbActivityTagging? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbActivityTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbActivity'))) {
|
|
obj.plDbActivity = obj.plDbActivity ??
|
|
await obj.getDbActivity(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbActivityTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbActivityTagging?
|
|
@override
|
|
Future<DbActivityTagging> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbActivityTagging();
|
|
}
|
|
|
|
/// This method returns int. [DbActivityTagging]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount(
|
|
[VoidCallback Function(int c)? dbactivitytaggingCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbactivitytaggingsFuture =
|
|
await _mnDbActivityTagging!.toList(qparams);
|
|
final int count = dbactivitytaggingsFuture[0]['CNT'] as int;
|
|
if (dbactivitytaggingCount != null) {
|
|
dbactivitytaggingCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbActivityTagging> [DbActivityTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbActivityTagging>
|
|
@override
|
|
Future<List<DbActivityTagging>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbActivityTagging> dbactivitytaggingsData =
|
|
await DbActivityTagging.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbactivitytaggingsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbActivityTagging]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbActivityTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbActivityTagging]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbActivityTagging!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbActivityTagging]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM activityTaggings WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbActivityTagging!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbActivityTagging]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbActivityTagging!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbActivityTagging.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbActivityTagging!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbActivityTaggingFilterBuilder
|
|
|
|
// region DbActivityTaggingFields
|
|
class DbActivityTaggingFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSystem;
|
|
static TableField get system {
|
|
return _fSystem =
|
|
_fSystem ?? SqlSyntax.setField(_fSystem, 'system', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fTagsId;
|
|
static TableField get tagsId {
|
|
return _fTagsId =
|
|
_fTagsId ?? SqlSyntax.setField(_fTagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fActivitiesId;
|
|
static TableField get activitiesId {
|
|
return _fActivitiesId = _fActivitiesId ??
|
|
SqlSyntax.setField(_fActivitiesId, 'activitiesId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbActivityTaggingFields
|
|
|
|
//region DbActivityTaggingManager
|
|
class DbActivityTaggingManager extends SqfEntityProvider {
|
|
DbActivityTaggingManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'activityTaggings';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbActivityTaggingManager
|
|
// region DbIntervalTagging
|
|
class DbIntervalTagging extends TableBase {
|
|
DbIntervalTagging({this.id, this.system, this.tagsId, this.intervalsId}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbIntervalTagging.withFields(this.system, this.tagsId, this.intervalsId) {
|
|
_setDefaultValues();
|
|
}
|
|
DbIntervalTagging.withId(
|
|
this.id, this.system, this.tagsId, this.intervalsId) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbIntervalTagging.fromMap(Map<String, dynamic> o,
|
|
{bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['system'] != null) {
|
|
system =
|
|
o['system'].toString() == '1' || o['system'].toString() == 'true';
|
|
}
|
|
tagsId = int.tryParse(o['tagsId'].toString());
|
|
|
|
intervalsId = int.tryParse(o['intervalsId'].toString());
|
|
|
|
// RELATIONSHIPS FromMAP
|
|
plDbTag = o['dbTag'] != null
|
|
? DbTag.fromMap(o['dbTag'] as Map<String, dynamic>)
|
|
: null;
|
|
plDbInterval = o['dbInterval'] != null
|
|
? DbInterval.fromMap(o['dbInterval'] as Map<String, dynamic>)
|
|
: null;
|
|
// END RELATIONSHIPS FromMAP
|
|
}
|
|
// FIELDS (DbIntervalTagging)
|
|
int? id;
|
|
bool? system;
|
|
int? tagsId;
|
|
int? intervalsId;
|
|
|
|
// end FIELDS (DbIntervalTagging)
|
|
|
|
// RELATIONSHIPS (DbIntervalTagging)
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbTag', 'plField2'..]) or so on..
|
|
DbTag? plDbTag;
|
|
|
|
/// get DbTag By TagsId
|
|
Future<DbTag?> getDbTag(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbTag()
|
|
.getById(tagsId, loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
|
|
/// to load parent of items to this field, use preload parameter ex: toList(preload:true) or toSingle(preload:true) or getById(preload:true)
|
|
/// You can also specify this object into certain preload fields!. Ex: toList(preload:true, preloadFields:['plDbInterval', 'plField2'..]) or so on..
|
|
DbInterval? plDbInterval;
|
|
|
|
/// get DbInterval By IntervalsId
|
|
Future<DbInterval?> getDbInterval(
|
|
{bool loadParents = false, List<String>? loadedFields}) async {
|
|
final _obj = await DbInterval().getById(intervalsId,
|
|
loadParents: loadParents, loadedFields: loadedFields);
|
|
return _obj;
|
|
}
|
|
// END RELATIONSHIPS (DbIntervalTagging)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbIntervalTaggingManager? __mnDbIntervalTagging;
|
|
|
|
DbIntervalTaggingManager get _mnDbIntervalTagging {
|
|
return __mnDbIntervalTagging =
|
|
__mnDbIntervalTagging ?? DbIntervalTaggingManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (intervalsId != null) {
|
|
map['intervalsId'] = forView
|
|
? plDbInterval == null
|
|
? intervalsId
|
|
: plDbInterval!.id
|
|
: intervalsId;
|
|
} else if (intervalsId != null || !forView) {
|
|
map['intervalsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (system != null) {
|
|
map['system'] = forQuery ? (system! ? 1 : 0) : system;
|
|
} else if (system != null || !forView) {
|
|
map['system'] = null;
|
|
}
|
|
if (tagsId != null) {
|
|
map['tagsId'] = forView
|
|
? plDbTag == null
|
|
? tagsId
|
|
: plDbTag!.name
|
|
: tagsId;
|
|
} else if (tagsId != null || !forView) {
|
|
map['tagsId'] = null;
|
|
}
|
|
if (intervalsId != null) {
|
|
map['intervalsId'] = forView
|
|
? plDbInterval == null
|
|
? intervalsId
|
|
: plDbInterval!.id
|
|
: intervalsId;
|
|
} else if (intervalsId != null || !forView) {
|
|
map['intervalsId'] = null;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbIntervalTagging]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbIntervalTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [system, tagsId, intervalsId];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [id, system, tagsId, intervalsId];
|
|
}
|
|
|
|
static Future<List<DbIntervalTagging>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbIntervalTagging.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbIntervalTagging>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbIntervalTagging>[];
|
|
try {
|
|
objList = list
|
|
.map((dbintervaltagging) => DbIntervalTagging.fromMap(
|
|
dbintervaltagging as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbIntervalTagging.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbIntervalTagging>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbIntervalTagging> objList = <DbIntervalTagging>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbIntervalTagging.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
// final List<String> _loadedFields = List<String>.from(loadedFields);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbInterval'))) {
|
|
obj.plDbInterval = obj.plDbInterval ??
|
|
await obj.getDbInterval(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbIntervalTagging by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbIntervalTagging] if exist, otherwise returns null
|
|
Future<DbIntervalTagging?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbIntervalTagging? obj;
|
|
final data = await _mnDbIntervalTagging.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbIntervalTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbInterval'))) {
|
|
obj.plDbInterval = obj.plDbInterval ??
|
|
await obj.getDbInterval(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbIntervalTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbIntervalTagging.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbIntervalTagging.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbIntervalTagging) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbIntervalTagging.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbIntervalTagging.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbIntervalTagging. Returns a new Primary Key value of DbIntervalTagging
|
|
|
|
/// <returns>Returns a new Primary Key value of DbIntervalTagging
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbIntervalTagging> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(
|
|
List<DbIntervalTagging> dbintervaltaggings,
|
|
{bool? exclusive,
|
|
bool? noResult,
|
|
bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dbintervaltaggings) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dbintervaltaggings.length; i++) {
|
|
if (dbintervaltaggings[i].id == null) {
|
|
dbintervaltaggings[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbIntervalTagging.rawInsert(
|
|
'INSERT OR REPLACE INTO intervalTaggings (id, system, tagsId, intervalsId) VALUES (?,?,?,?)',
|
|
[id, system, tagsId, intervalsId],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true,
|
|
successMessage: 'DbIntervalTagging id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbIntervalTagging id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage:
|
|
'DbIntervalTagging Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbIntervalTagging>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbIntervalTagging> dbintervaltaggings,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbIntervalTagging.rawInsertAll(
|
|
'INSERT OR REPLACE INTO intervalTaggings (id, system, tagsId, intervalsId) VALUES (?,?,?,?)',
|
|
dbintervaltaggings,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbIntervalTagging
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbIntervalTagging invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbIntervalTagging
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbIntervalTagging.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbIntervalTagging] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbIntervalTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbIntervalTaggingFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {
|
|
tagsId = tagsId ?? 0;
|
|
intervalsId = intervalsId ?? 0;
|
|
}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dbintervaltagging
|
|
|
|
// region DbIntervalTaggingField
|
|
class DbIntervalTaggingField extends FilterBase {
|
|
DbIntervalTaggingField(DbIntervalTaggingFilterBuilder dbintervaltaggingFB)
|
|
: super(dbintervaltaggingFB);
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder isNull() {
|
|
return super.isNull() as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbIntervalTaggingFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbIntervalTaggingField get not {
|
|
return super.not as DbIntervalTaggingField;
|
|
}
|
|
}
|
|
// endregion DbIntervalTaggingField
|
|
|
|
// region DbIntervalTaggingFilterBuilder
|
|
class DbIntervalTaggingFilterBuilder extends ConjunctionBase {
|
|
DbIntervalTaggingFilterBuilder(DbIntervalTagging obj, bool? getIsDeleted)
|
|
: super(obj, getIsDeleted) {
|
|
_mnDbIntervalTagging = obj._mnDbIntervalTagging;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbIntervalTaggingManager? _mnDbIntervalTagging;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbIntervalTaggingFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbIntervalTaggingFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbIntervalTaggingFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbIntervalTaggingFilterBuilder where(String? whereCriteria,
|
|
{dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbIntervalTaggingFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbIntervalTaggingFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbIntervalTaggingFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbIntervalTaggingFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbIntervalTaggingFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbIntervalTaggingFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbIntervalTaggingFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbIntervalTaggingField _setField(
|
|
DbIntervalTaggingField? field, String colName, DbType dbtype) {
|
|
return DbIntervalTaggingField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbIntervalTaggingField? _id;
|
|
DbIntervalTaggingField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbIntervalTaggingField? _system;
|
|
DbIntervalTaggingField get system {
|
|
return _system = _setField(_system, 'system', DbType.bool);
|
|
}
|
|
|
|
DbIntervalTaggingField? _tagsId;
|
|
DbIntervalTaggingField get tagsId {
|
|
return _tagsId = _setField(_tagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
DbIntervalTaggingField? _intervalsId;
|
|
DbIntervalTaggingField get intervalsId {
|
|
return _intervalsId =
|
|
_setField(_intervalsId, 'intervalsId', DbType.integer);
|
|
}
|
|
|
|
/// Deletes List<DbIntervalTagging> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbIntervalTagging!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbIntervalTagging!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from intervalTaggings ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbIntervalTagging!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbIntervalTagging] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbIntervalTagging?
|
|
@override
|
|
Future<DbIntervalTagging?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbIntervalTagging!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbIntervalTagging? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbIntervalTagging.fromMap(data[0] as Map<String, dynamic>);
|
|
|
|
// RELATIONSHIPS PRELOAD
|
|
if (preload || loadParents) {
|
|
loadedFields = loadedFields ?? [];
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbTag'))) {
|
|
obj.plDbTag =
|
|
obj.plDbTag ?? await obj.getDbTag(loadParents: loadParents);
|
|
}
|
|
if ((preloadFields == null ||
|
|
loadParents ||
|
|
preloadFields.contains('plDbInterval'))) {
|
|
obj.plDbInterval = obj.plDbInterval ??
|
|
await obj.getDbInterval(loadParents: loadParents);
|
|
}
|
|
} // END RELATIONSHIPS PRELOAD
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbIntervalTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbIntervalTagging?
|
|
@override
|
|
Future<DbIntervalTagging> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbIntervalTagging();
|
|
}
|
|
|
|
/// This method returns int. [DbIntervalTagging]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount(
|
|
[VoidCallback Function(int c)? dbintervaltaggingCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dbintervaltaggingsFuture =
|
|
await _mnDbIntervalTagging!.toList(qparams);
|
|
final int count = dbintervaltaggingsFuture[0]['CNT'] as int;
|
|
if (dbintervaltaggingCount != null) {
|
|
dbintervaltaggingCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbIntervalTagging> [DbIntervalTagging]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbIntervalTagging>
|
|
@override
|
|
Future<List<DbIntervalTagging>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbIntervalTagging> dbintervaltaggingsData =
|
|
await DbIntervalTagging.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dbintervaltaggingsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbIntervalTagging]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbIntervalTagging]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbIntervalTagging]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbIntervalTagging!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbIntervalTagging]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] =
|
|
'SELECT `id` FROM intervalTaggings WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbIntervalTagging!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbIntervalTagging]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbIntervalTagging!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbIntervalTagging.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbIntervalTagging!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbIntervalTaggingFilterBuilder
|
|
|
|
// region DbIntervalTaggingFields
|
|
class DbIntervalTaggingFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fSystem;
|
|
static TableField get system {
|
|
return _fSystem =
|
|
_fSystem ?? SqlSyntax.setField(_fSystem, 'system', DbType.bool);
|
|
}
|
|
|
|
static TableField? _fTagsId;
|
|
static TableField get tagsId {
|
|
return _fTagsId =
|
|
_fTagsId ?? SqlSyntax.setField(_fTagsId, 'tagsId', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fIntervalsId;
|
|
static TableField get intervalsId {
|
|
return _fIntervalsId = _fIntervalsId ??
|
|
SqlSyntax.setField(_fIntervalsId, 'intervalsId', DbType.integer);
|
|
}
|
|
}
|
|
// endregion DbIntervalTaggingFields
|
|
|
|
//region DbIntervalTaggingManager
|
|
class DbIntervalTaggingManager extends SqfEntityProvider {
|
|
DbIntervalTaggingManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'intervalTaggings';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbIntervalTaggingManager
|
|
// region DbLog
|
|
class DbLog extends TableBase {
|
|
DbLog(
|
|
{this.id,
|
|
this.dateTime,
|
|
this.message,
|
|
this.method,
|
|
this.comment,
|
|
this.stackTrace}) {
|
|
_setDefaultValues();
|
|
softDeleteActivated = false;
|
|
}
|
|
DbLog.withFields(
|
|
this.dateTime, this.message, this.method, this.comment, this.stackTrace) {
|
|
_setDefaultValues();
|
|
}
|
|
DbLog.withId(this.id, this.dateTime, this.message, this.method, this.comment,
|
|
this.stackTrace) {
|
|
_setDefaultValues();
|
|
}
|
|
// fromMap v2.0
|
|
DbLog.fromMap(Map<String, dynamic> o, {bool setDefaultValues = true}) {
|
|
if (setDefaultValues) {
|
|
_setDefaultValues();
|
|
}
|
|
id = int.tryParse(o['id'].toString());
|
|
if (o['dateTime'] != null) {
|
|
dateTime = int.tryParse(o['dateTime'].toString()) != null
|
|
? DateTime.fromMillisecondsSinceEpoch(
|
|
int.tryParse(o['dateTime'].toString())!)
|
|
: DateTime.tryParse(o['dateTime'].toString());
|
|
}
|
|
if (o['message'] != null) {
|
|
message = o['message'].toString();
|
|
}
|
|
if (o['method'] != null) {
|
|
method = o['method'].toString();
|
|
}
|
|
if (o['comment'] != null) {
|
|
comment = o['comment'].toString();
|
|
}
|
|
if (o['stackTrace'] != null) {
|
|
stackTrace = o['stackTrace'].toString();
|
|
}
|
|
}
|
|
// FIELDS (DbLog)
|
|
int? id;
|
|
DateTime? dateTime;
|
|
String? message;
|
|
String? method;
|
|
String? comment;
|
|
String? stackTrace;
|
|
|
|
// end FIELDS (DbLog)
|
|
|
|
static const bool _softDeleteActivated = false;
|
|
DbLogManager? __mnDbLog;
|
|
|
|
DbLogManager get _mnDbLog {
|
|
return __mnDbLog = __mnDbLog ?? DbLogManager();
|
|
}
|
|
|
|
// METHODS
|
|
@override
|
|
Map<String, dynamic> toMap(
|
|
{bool forQuery = false, bool forJson = false, bool forView = false}) {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (dateTime != null) {
|
|
map['dateTime'] = forJson
|
|
? dateTime!.toString()
|
|
: forQuery
|
|
? dateTime!.millisecondsSinceEpoch
|
|
: dateTime;
|
|
} else if (dateTime != null || !forView) {
|
|
map['dateTime'] = null;
|
|
}
|
|
if (message != null || !forView) {
|
|
map['message'] = message;
|
|
}
|
|
if (method != null || !forView) {
|
|
map['method'] = method;
|
|
}
|
|
if (comment != null || !forView) {
|
|
map['comment'] = comment;
|
|
}
|
|
if (stackTrace != null || !forView) {
|
|
map['stackTrace'] = stackTrace;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
@override
|
|
Future<Map<String, dynamic>> toMapWithChildren(
|
|
[bool forQuery = false,
|
|
bool forJson = false,
|
|
bool forView = false]) async {
|
|
final map = <String, dynamic>{};
|
|
map['id'] = id;
|
|
if (dateTime != null) {
|
|
map['dateTime'] = forJson
|
|
? dateTime!.toString()
|
|
: forQuery
|
|
? dateTime!.millisecondsSinceEpoch
|
|
: dateTime;
|
|
} else if (dateTime != null || !forView) {
|
|
map['dateTime'] = null;
|
|
}
|
|
if (message != null || !forView) {
|
|
map['message'] = message;
|
|
}
|
|
if (method != null || !forView) {
|
|
map['method'] = method;
|
|
}
|
|
if (comment != null || !forView) {
|
|
map['comment'] = comment;
|
|
}
|
|
if (stackTrace != null || !forView) {
|
|
map['stackTrace'] = stackTrace;
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
/// This method returns Json String [DbLog]
|
|
@override
|
|
String toJson() {
|
|
return json.encode(toMap(forJson: true));
|
|
}
|
|
|
|
/// This method returns Json String [DbLog]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
return json.encode(await toMapWithChildren(false, true));
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgs() {
|
|
return [
|
|
dateTime != null ? dateTime!.millisecondsSinceEpoch : null,
|
|
message,
|
|
method,
|
|
comment,
|
|
stackTrace
|
|
];
|
|
}
|
|
|
|
@override
|
|
List<dynamic> toArgsWithIds() {
|
|
return [
|
|
id,
|
|
dateTime != null ? dateTime!.millisecondsSinceEpoch : null,
|
|
message,
|
|
method,
|
|
comment,
|
|
stackTrace
|
|
];
|
|
}
|
|
|
|
static Future<List<DbLog>?> fromWebUrl(Uri uri,
|
|
{Map<String, String>? headers}) async {
|
|
try {
|
|
final response = await http.get(uri, headers: headers);
|
|
return await fromJson(response.body);
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLog.fromWebUrl: ErrorMessage: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
Future<http.Response> postUrl(Uri uri, {Map<String, String>? headers}) {
|
|
return http.post(uri, headers: headers, body: toJson());
|
|
}
|
|
|
|
static Future<List<DbLog>> fromJson(String jsonBody) async {
|
|
final Iterable list = await json.decode(jsonBody) as Iterable;
|
|
var objList = <DbLog>[];
|
|
try {
|
|
objList = list
|
|
.map((dblog) => DbLog.fromMap(dblog as Map<String, dynamic>))
|
|
.toList();
|
|
} catch (e) {
|
|
debugPrint(
|
|
'SQFENTITY ERROR DbLog.fromJson: ErrorMessage: ${e.toString()}');
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
static Future<List<DbLog>> fromMapList(List<dynamic> data,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields,
|
|
bool setDefaultValues = true}) async {
|
|
final List<DbLog> objList = <DbLog>[];
|
|
loadedFields = loadedFields ?? [];
|
|
for (final map in data) {
|
|
final obj = DbLog.fromMap(map as Map<String, dynamic>,
|
|
setDefaultValues: setDefaultValues);
|
|
|
|
objList.add(obj);
|
|
}
|
|
return objList;
|
|
}
|
|
|
|
/// returns DbLog by ID if exist, otherwise returns null
|
|
/// Primary Keys: int? id
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: getById(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: getById(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>returns [DbLog] if exist, otherwise returns null
|
|
Future<DbLog?> getById(int? id,
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
if (id == null) {
|
|
return null;
|
|
}
|
|
DbLog? obj;
|
|
final data = await _mnDbLog.getById([id]);
|
|
if (data.length != 0) {
|
|
obj = DbLog.fromMap(data[0] as Map<String, dynamic>);
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// Saves the (DbLog) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> save({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLog.insert(this, ignoreBatch);
|
|
} else {
|
|
await _mnDbLog.update(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// Saves the (DbLog) object. If the id field is null, saves as a new record and returns new id, if id is not null then updates record
|
|
/// ignoreBatch = true as a default. Set ignoreBatch to false if you run more than one save() operation those are between batchStart and batchCommit
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> saveOrThrow({bool ignoreBatch = true}) async {
|
|
if (id == null || id == 0) {
|
|
id = await _mnDbLog.insertOrThrow(this, ignoreBatch);
|
|
|
|
isInsert = true;
|
|
} else {
|
|
// id= await _upsert(); // removed in sqfentity_gen 1.3.0+6
|
|
await _mnDbLog.updateOrThrow(this);
|
|
}
|
|
|
|
return id;
|
|
}
|
|
|
|
/// saveAs DbLog. Returns a new Primary Key value of DbLog
|
|
|
|
/// <returns>Returns a new Primary Key value of DbLog
|
|
@override
|
|
Future<int?> saveAs({bool ignoreBatch = true}) async {
|
|
id = null;
|
|
|
|
return save(ignoreBatch: ignoreBatch);
|
|
}
|
|
|
|
/// saveAll method saves the sent List<DbLog> as a bulk in one transaction
|
|
/// Returns a <List<BoolResult>>
|
|
static Future<List<dynamic>> saveAll(List<DbLog> dblogs,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
List<dynamic>? result = [];
|
|
// If there is no open transaction, start one
|
|
final isStartedBatch = await DbEncrateia().batchStart();
|
|
for (final obj in dblogs) {
|
|
await obj.save(ignoreBatch: false);
|
|
}
|
|
if (!isStartedBatch) {
|
|
result = await DbEncrateia().batchCommit(
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
for (int i = 0; i < dblogs.length; i++) {
|
|
if (dblogs[i].id == null) {
|
|
dblogs[i].id = result![i] as int;
|
|
}
|
|
}
|
|
}
|
|
return result!;
|
|
}
|
|
|
|
/// Updates if the record exists, otherwise adds a new row
|
|
/// <returns>Returns id
|
|
@override
|
|
Future<int?> upsert({bool ignoreBatch = true}) async {
|
|
try {
|
|
final result = await _mnDbLog.rawInsert(
|
|
'INSERT OR REPLACE INTO logs (id, dateTime, message, method, comment, stackTrace) VALUES (?,?,?,?,?,?)',
|
|
[
|
|
id,
|
|
dateTime != null ? dateTime!.millisecondsSinceEpoch : null,
|
|
message,
|
|
method,
|
|
comment,
|
|
stackTrace
|
|
],
|
|
ignoreBatch);
|
|
if (result! > 0) {
|
|
saveResult = BoolResult(
|
|
success: true, successMessage: 'DbLog id=$id updated successfully');
|
|
} else {
|
|
saveResult = BoolResult(
|
|
success: false, errorMessage: 'DbLog id=$id did not update');
|
|
}
|
|
return id;
|
|
} catch (e) {
|
|
saveResult = BoolResult(
|
|
success: false,
|
|
errorMessage: 'DbLog Save failed. Error: ${e.toString()}');
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/// inserts or replaces the sent List<<DbLog>> as a bulk in one transaction.
|
|
/// upsertAll() method is faster then saveAll() method. upsertAll() should be used when you are sure that the primary key is greater than zero
|
|
/// Returns a BoolCommitResult
|
|
@override
|
|
Future<BoolCommitResult> upsertAll(List<DbLog> dblogs,
|
|
{bool? exclusive, bool? noResult, bool? continueOnError}) async {
|
|
final results = await _mnDbLog.rawInsertAll(
|
|
'INSERT OR REPLACE INTO logs (id, dateTime, message, method, comment, stackTrace) VALUES (?,?,?,?,?,?)',
|
|
dblogs,
|
|
exclusive: exclusive,
|
|
noResult: noResult,
|
|
continueOnError: continueOnError);
|
|
return results;
|
|
}
|
|
|
|
/// Deletes DbLog
|
|
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
debugPrint('SQFENTITIY: delete DbLog invoked (id=$id)');
|
|
if (!_softDeleteActivated || hardDelete) {
|
|
return _mnDbLog
|
|
.delete(QueryParams(whereString: 'id=?', whereArguments: [id]));
|
|
} else {
|
|
return _mnDbLog.updateBatch(
|
|
QueryParams(whereString: 'id=?', whereArguments: [id]),
|
|
{'isDeleted': 1});
|
|
}
|
|
}
|
|
|
|
@override
|
|
Future<BoolResult> recover([bool recoverChilds = true]) {
|
|
// not implemented because:
|
|
final msg =
|
|
'set useSoftDeleting:true in the table definition of [DbLog] to use this feature';
|
|
throw UnimplementedError(msg);
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder select(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLogFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder distinct(
|
|
{List<String>? columnsToSelect, bool? getIsDeleted}) {
|
|
return DbLogFilterBuilder(this, getIsDeleted)
|
|
..qparams.selectColumns = columnsToSelect
|
|
..qparams.distinct = true;
|
|
}
|
|
|
|
void _setDefaultValues() {}
|
|
|
|
@override
|
|
void rollbackPk() {
|
|
if (isInsert == true) {
|
|
id = null;
|
|
}
|
|
}
|
|
|
|
// END METHODS
|
|
// BEGIN CUSTOM CODE
|
|
/*
|
|
you can define customCode property of your SqfEntityTable constant. For example:
|
|
const tablePerson = SqfEntityTable(
|
|
tableName: 'person',
|
|
primaryKeyName: 'id',
|
|
primaryKeyType: PrimaryKeyType.integer_auto_incremental,
|
|
fields: [
|
|
SqfEntityField('firstName', DbType.text),
|
|
SqfEntityField('lastName', DbType.text),
|
|
],
|
|
customCode: '''
|
|
String fullName()
|
|
{
|
|
return '$firstName $lastName';
|
|
}
|
|
''');
|
|
*/
|
|
// END CUSTOM CODE
|
|
}
|
|
// endregion dblog
|
|
|
|
// region DbLogField
|
|
class DbLogField extends FilterBase {
|
|
DbLogField(DbLogFilterBuilder dblogFB) : super(dblogFB);
|
|
|
|
@override
|
|
DbLogFilterBuilder equals(dynamic pValue) {
|
|
return super.equals(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder equalsOrNull(dynamic pValue) {
|
|
return super.equalsOrNull(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder isNull() {
|
|
return super.isNull() as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder contains(dynamic pValue) {
|
|
return super.contains(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder startsWith(dynamic pValue) {
|
|
return super.startsWith(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder endsWith(dynamic pValue) {
|
|
return super.endsWith(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder between(dynamic pFirst, dynamic pLast) {
|
|
return super.between(pFirst, pLast) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder greaterThan(dynamic pValue) {
|
|
return super.greaterThan(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder lessThan(dynamic pValue) {
|
|
return super.lessThan(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder greaterThanOrEquals(dynamic pValue) {
|
|
return super.greaterThanOrEquals(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder lessThanOrEquals(dynamic pValue) {
|
|
return super.lessThanOrEquals(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogFilterBuilder inValues(dynamic pValue) {
|
|
return super.inValues(pValue) as DbLogFilterBuilder;
|
|
}
|
|
|
|
@override
|
|
DbLogField get not {
|
|
return super.not as DbLogField;
|
|
}
|
|
}
|
|
// endregion DbLogField
|
|
|
|
// region DbLogFilterBuilder
|
|
class DbLogFilterBuilder extends ConjunctionBase {
|
|
DbLogFilterBuilder(DbLog obj, bool? getIsDeleted) : super(obj, getIsDeleted) {
|
|
_mnDbLog = obj._mnDbLog;
|
|
_softDeleteActivated = obj.softDeleteActivated;
|
|
}
|
|
|
|
bool _softDeleteActivated = false;
|
|
DbLogManager? _mnDbLog;
|
|
|
|
/// put the sql keyword 'AND'
|
|
@override
|
|
DbLogFilterBuilder get and {
|
|
super.and;
|
|
return this;
|
|
}
|
|
|
|
/// put the sql keyword 'OR'
|
|
@override
|
|
DbLogFilterBuilder get or {
|
|
super.or;
|
|
return this;
|
|
}
|
|
|
|
/// open parentheses
|
|
@override
|
|
DbLogFilterBuilder get startBlock {
|
|
super.startBlock;
|
|
return this;
|
|
}
|
|
|
|
/// String whereCriteria, write raw query without 'where' keyword. Like this: 'field1 like 'test%' and field2 = 3'
|
|
@override
|
|
DbLogFilterBuilder where(String? whereCriteria, {dynamic parameterValue}) {
|
|
super.where(whereCriteria, parameterValue: parameterValue);
|
|
return this;
|
|
}
|
|
|
|
/// page = page number,
|
|
/// pagesize = row(s) per page
|
|
@override
|
|
DbLogFilterBuilder page(int page, int pagesize) {
|
|
super.page(page, pagesize);
|
|
return this;
|
|
}
|
|
|
|
/// int count = LIMIT
|
|
@override
|
|
DbLogFilterBuilder top(int count) {
|
|
super.top(count);
|
|
return this;
|
|
}
|
|
|
|
/// close parentheses
|
|
@override
|
|
DbLogFilterBuilder get endBlock {
|
|
super.endBlock;
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLogFilterBuilder orderBy(dynamic argFields) {
|
|
super.orderBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLogFilterBuilder orderByDesc(dynamic argFields) {
|
|
super.orderByDesc(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='field1, field2'
|
|
/// Example 2: argFields = ['field1', 'field2']
|
|
@override
|
|
DbLogFilterBuilder groupBy(dynamic argFields) {
|
|
super.groupBy(argFields);
|
|
return this;
|
|
}
|
|
|
|
/// argFields might be String or List<String>.
|
|
/// Example 1: argFields='name, date'
|
|
/// Example 2: argFields = ['name', 'date']
|
|
@override
|
|
DbLogFilterBuilder having(dynamic argFields) {
|
|
super.having(argFields);
|
|
return this;
|
|
}
|
|
|
|
DbLogField _setField(DbLogField? field, String colName, DbType dbtype) {
|
|
return DbLogField(this)
|
|
..param = DbParameter(
|
|
dbType: dbtype, columnName: colName, wStartBlock: openedBlock);
|
|
}
|
|
|
|
DbLogField? _id;
|
|
DbLogField get id {
|
|
return _id = _setField(_id, 'id', DbType.integer);
|
|
}
|
|
|
|
DbLogField? _dateTime;
|
|
DbLogField get dateTime {
|
|
return _dateTime = _setField(_dateTime, 'dateTime', DbType.datetime);
|
|
}
|
|
|
|
DbLogField? _message;
|
|
DbLogField get message {
|
|
return _message = _setField(_message, 'message', DbType.text);
|
|
}
|
|
|
|
DbLogField? _method;
|
|
DbLogField get method {
|
|
return _method = _setField(_method, 'method', DbType.text);
|
|
}
|
|
|
|
DbLogField? _comment;
|
|
DbLogField get comment {
|
|
return _comment = _setField(_comment, 'comment', DbType.text);
|
|
}
|
|
|
|
DbLogField? _stackTrace;
|
|
DbLogField get stackTrace {
|
|
return _stackTrace = _setField(_stackTrace, 'stackTrace', DbType.text);
|
|
}
|
|
|
|
/// Deletes List<DbLog> bulk by query
|
|
///
|
|
/// <returns>BoolResult res.success= true (Deleted), false (Could not be deleted)
|
|
@override
|
|
Future<BoolResult> delete([bool hardDelete = false]) async {
|
|
buildParameters();
|
|
var r = BoolResult(success: false);
|
|
|
|
if (_softDeleteActivated && !hardDelete) {
|
|
r = await _mnDbLog!.updateBatch(qparams, {'isDeleted': 1});
|
|
} else {
|
|
r = await _mnDbLog!.delete(qparams);
|
|
}
|
|
return r;
|
|
}
|
|
|
|
/// using:
|
|
/// update({'fieldName': Value})
|
|
/// fieldName must be String. Value is dynamic, it can be any of the (int, bool, String.. )
|
|
@override
|
|
Future<BoolResult> update(Map<String, dynamic> values) {
|
|
buildParameters();
|
|
if (qparams.limit! > 0 || qparams.offset! > 0) {
|
|
qparams.whereString =
|
|
'id IN (SELECT id from logs ${qparams.whereString!.isNotEmpty ? 'WHERE ${qparams.whereString}' : ''}${qparams.limit! > 0 ? ' LIMIT ${qparams.limit}' : ''}${qparams.offset! > 0 ? ' OFFSET ${qparams.offset}' : ''})';
|
|
}
|
|
return _mnDbLog!.updateBatch(qparams, values);
|
|
}
|
|
|
|
/// This method always returns [DbLog] Obj if exist, otherwise returns null
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLog?
|
|
@override
|
|
Future<DbLog?> toSingle(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
buildParameters(pSize: 1);
|
|
final objFuture = _mnDbLog!.toList(qparams);
|
|
final data = await objFuture;
|
|
DbLog? obj;
|
|
if (data.isNotEmpty) {
|
|
obj = DbLog.fromMap(data[0] as Map<String, dynamic>);
|
|
} else {
|
|
obj = null;
|
|
}
|
|
return obj;
|
|
}
|
|
|
|
/// This method always returns [DbLog]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toSingle(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toSingle(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns> DbLog?
|
|
@override
|
|
Future<DbLog> toSingleOrDefault(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
return await toSingle(
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields) ??
|
|
DbLog();
|
|
}
|
|
|
|
/// This method returns int. [DbLog]
|
|
/// <returns>int
|
|
@override
|
|
Future<int> toCount([VoidCallback Function(int c)? dblogCount]) async {
|
|
buildParameters();
|
|
qparams.selectColumns = ['COUNT(1) AS CNT'];
|
|
final dblogsFuture = await _mnDbLog!.toList(qparams);
|
|
final int count = dblogsFuture[0]['CNT'] as int;
|
|
if (dblogCount != null) {
|
|
dblogCount(count);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
/// This method returns List<DbLog> [DbLog]
|
|
/// bool preload: if true, loads all related child objects (Set preload to true if you want to load all fields related to child or parent)
|
|
/// ex: toList(preload:true) -> Loads all related objects
|
|
/// List<String> preloadFields: specify the fields you want to preload (preload parameter's value should also be "true")
|
|
/// ex: toList(preload:true, preloadFields:['plField1','plField2'... etc]) -> Loads only certain fields what you specified
|
|
/// bool loadParents: if true, loads all parent objects until the object has no parent
|
|
|
|
/// <returns>List<DbLog>
|
|
@override
|
|
Future<List<DbLog>> toList(
|
|
{bool preload = false,
|
|
List<String>? preloadFields,
|
|
bool loadParents = false,
|
|
List<String>? loadedFields}) async {
|
|
final data = await toMapList();
|
|
final List<DbLog> dblogsData = await DbLog.fromMapList(data,
|
|
preload: preload,
|
|
preloadFields: preloadFields,
|
|
loadParents: loadParents,
|
|
loadedFields: loadedFields,
|
|
setDefaultValues: qparams.selectColumns == null);
|
|
return dblogsData;
|
|
}
|
|
|
|
/// This method returns Json String [DbLog]
|
|
@override
|
|
Future<String> toJson() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(o.toMap(forJson: true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns Json String. [DbLog]
|
|
@override
|
|
Future<String> toJsonWithChilds() async {
|
|
final list = <dynamic>[];
|
|
final data = await toList();
|
|
for (var o in data) {
|
|
list.add(await o.toMapWithChildren(false, true));
|
|
}
|
|
return json.encode(list);
|
|
}
|
|
|
|
/// This method returns List<dynamic>. [DbLog]
|
|
/// <returns>List<dynamic>
|
|
@override
|
|
Future<List<dynamic>> toMapList() async {
|
|
buildParameters();
|
|
return await _mnDbLog!.toList(qparams);
|
|
}
|
|
|
|
/// This method returns Primary Key List SQL and Parameters retVal = Map<String,dynamic>. [DbLog]
|
|
/// retVal['sql'] = SQL statement string, retVal['args'] = whereArguments List<dynamic>;
|
|
/// <returns>List<String>
|
|
@override
|
|
Map<String, dynamic> toListPrimaryKeySQL([bool buildParams = true]) {
|
|
final Map<String, dynamic> _retVal = <String, dynamic>{};
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
_retVal['sql'] = 'SELECT `id` FROM logs WHERE ${qparams.whereString}';
|
|
_retVal['args'] = qparams.whereArguments;
|
|
return _retVal;
|
|
}
|
|
|
|
/// This method returns Primary Key List<int>.
|
|
/// <returns>List<int>
|
|
@override
|
|
Future<List<int>> toListPrimaryKey([bool buildParams = true]) async {
|
|
if (buildParams) {
|
|
buildParameters();
|
|
}
|
|
final List<int> idData = <int>[];
|
|
qparams.selectColumns = ['id'];
|
|
final idFuture = await _mnDbLog!.toList(qparams);
|
|
|
|
final int count = idFuture.length;
|
|
for (int i = 0; i < count; i++) {
|
|
idData.add(idFuture[i]['id'] as int);
|
|
}
|
|
return idData;
|
|
}
|
|
|
|
/// Returns List<dynamic> for selected columns. Use this method for 'groupBy' with min,max,avg.. [DbLog]
|
|
/// Sample usage: (see EXAMPLE 4.2 at https://github.com/hhtokpinar/sqfEntity#group-by)
|
|
@override
|
|
Future<List<dynamic>> toListObject() async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLog!.toList(qparams);
|
|
|
|
final List<dynamic> objectsData = <dynamic>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i]);
|
|
}
|
|
return objectsData;
|
|
}
|
|
|
|
/// Returns List<String> for selected first column
|
|
/// Sample usage: await DbLog.select(columnsToSelect: ['columnName']).toListString()
|
|
@override
|
|
Future<List<String>> toListString(
|
|
[VoidCallback Function(List<String> o)? listString]) async {
|
|
buildParameters();
|
|
|
|
final objectFuture = _mnDbLog!.toList(qparams);
|
|
|
|
final List<String> objectsData = <String>[];
|
|
final data = await objectFuture;
|
|
final int count = data.length;
|
|
for (int i = 0; i < count; i++) {
|
|
objectsData.add(data[i][qparams.selectColumns![0]].toString());
|
|
}
|
|
if (listString != null) {
|
|
listString(objectsData);
|
|
}
|
|
return objectsData;
|
|
}
|
|
}
|
|
// endregion DbLogFilterBuilder
|
|
|
|
// region DbLogFields
|
|
class DbLogFields {
|
|
static TableField? _fId;
|
|
static TableField get id {
|
|
return _fId = _fId ?? SqlSyntax.setField(_fId, 'id', DbType.integer);
|
|
}
|
|
|
|
static TableField? _fDateTime;
|
|
static TableField get dateTime {
|
|
return _fDateTime = _fDateTime ??
|
|
SqlSyntax.setField(_fDateTime, 'dateTime', DbType.datetime);
|
|
}
|
|
|
|
static TableField? _fMessage;
|
|
static TableField get message {
|
|
return _fMessage =
|
|
_fMessage ?? SqlSyntax.setField(_fMessage, 'message', DbType.text);
|
|
}
|
|
|
|
static TableField? _fMethod;
|
|
static TableField get method {
|
|
return _fMethod =
|
|
_fMethod ?? SqlSyntax.setField(_fMethod, 'method', DbType.text);
|
|
}
|
|
|
|
static TableField? _fComment;
|
|
static TableField get comment {
|
|
return _fComment =
|
|
_fComment ?? SqlSyntax.setField(_fComment, 'comment', DbType.text);
|
|
}
|
|
|
|
static TableField? _fStackTrace;
|
|
static TableField get stackTrace {
|
|
return _fStackTrace = _fStackTrace ??
|
|
SqlSyntax.setField(_fStackTrace, 'stackTrace', DbType.text);
|
|
}
|
|
}
|
|
// endregion DbLogFields
|
|
|
|
//region DbLogManager
|
|
class DbLogManager extends SqfEntityProvider {
|
|
DbLogManager()
|
|
: super(DbEncrateia(),
|
|
tableName: _tableName,
|
|
primaryKeyList: _primaryKeyList,
|
|
whereStr: _whereStr);
|
|
static const String _tableName = 'logs';
|
|
static const List<String> _primaryKeyList = ['id'];
|
|
static const String _whereStr = 'id=?';
|
|
}
|
|
|
|
//endregion DbLogManager
|
|
class DbEncrateiaSequenceManager extends SqfEntityProvider {
|
|
DbEncrateiaSequenceManager() : super(DbEncrateia());
|
|
}
|
|
// END OF ENTITIES
|