models.py•22.5 kB
# generated by fastapi-codegen:
# filename: openapi.yaml
# timestamp: 2025-06-29T02:09:40+00:00
from __future__ import annotations
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel, Field, RootModel
class Type(Enum):
unknown = 'unknown'
time = 'time'
session = 'session'
activityType = 'activityType'
activitySegment = 'activitySegment'
class AggregateBy(BaseModel):
dataSourceId: Optional[str] = Field(
None,
description='A data source ID to aggregate. Only data from the specified data source ID will be included in the aggregation. If specified, this data source must exist; the OAuth scopes in the supplied credentials must grant read access to this data type. The dataset in the response will have the same data source ID. Note: Data can be aggregated by either the dataTypeName or the dataSourceId, not both.',
)
dataTypeName: Optional[str] = Field(
None,
description='The data type to aggregate. All data sources providing this data type will contribute data to the aggregation. The response will contain a single dataset for this data type name. The dataset will have a data source ID of derived::com.google.android.gms:aggregated. If the user has no data for this data type, an empty data set will be returned. Note: Data can be aggregated by either the dataTypeName or the dataSourceId, not both.',
)
class FilteredDataQualityStandardEnum(Enum):
dataQualityUnknown = 'dataQualityUnknown'
dataQualityBloodPressureEsh2002 = 'dataQualityBloodPressureEsh2002'
dataQualityBloodPressureEsh2010 = 'dataQualityBloodPressureEsh2010'
dataQualityBloodPressureAami = 'dataQualityBloodPressureAami'
dataQualityBloodPressureBhsAA = 'dataQualityBloodPressureBhsAA'
dataQualityBloodPressureBhsAB = 'dataQualityBloodPressureBhsAB'
dataQualityBloodPressureBhsBA = 'dataQualityBloodPressureBhsBA'
dataQualityBloodPressureBhsBB = 'dataQualityBloodPressureBhsBB'
dataQualityBloodGlucoseIso151972003 = 'dataQualityBloodGlucoseIso151972003'
dataQualityBloodGlucoseIso151972013 = 'dataQualityBloodGlucoseIso151972013'
class Application(BaseModel):
detailsUrl: Optional[str] = Field(
None,
description='An optional URI that can be used to link back to the application.',
)
name: Optional[str] = Field(
None,
description='The name of this application. This is required for REST clients, but we do not enforce uniqueness of this name. It is provided as a matter of convenience for other developers who would like to identify which REST created an Application or Data Source.',
)
packageName: Optional[str] = Field(
None,
description='Package name for this application. This is used as a unique identifier when created by Android applications, but cannot be specified by REST clients. REST clients will have their developer project number reflected into the Data Source data stream IDs, instead of the packageName.',
)
version: Optional[str] = Field(
None,
description='Version of the application. You should update this field whenever the application changes in a way that affects the computation of the data.',
)
class BucketByActivity(BaseModel):
activityDataSourceId: Optional[str] = Field(
None,
description='The default activity stream will be used if a specific activityDataSourceId is not specified.',
)
minDurationMillis: Optional[str] = Field(
None,
description='Specifies that only activity segments of duration longer than minDurationMillis are considered and used as a container for aggregated data.',
)
class BucketBySession(BaseModel):
minDurationMillis: Optional[str] = Field(
None,
description='Specifies that only sessions of duration longer than minDurationMillis are considered and used as a container for aggregated data.',
)
class Type1(Enum):
day = 'day'
week = 'week'
month = 'month'
class BucketByTimePeriod(BaseModel):
timeZoneId: Optional[str] = Field(
None, description='org.joda.timezone.DateTimeZone'
)
type: Optional[Type1] = None
value: Optional[int] = None
class DataQualityStandardEnum(Enum):
dataQualityUnknown = 'dataQualityUnknown'
dataQualityBloodPressureEsh2002 = 'dataQualityBloodPressureEsh2002'
dataQualityBloodPressureEsh2010 = 'dataQualityBloodPressureEsh2010'
dataQualityBloodPressureAami = 'dataQualityBloodPressureAami'
dataQualityBloodPressureBhsAA = 'dataQualityBloodPressureBhsAA'
dataQualityBloodPressureBhsAB = 'dataQualityBloodPressureBhsAB'
dataQualityBloodPressureBhsBA = 'dataQualityBloodPressureBhsBA'
dataQualityBloodPressureBhsBB = 'dataQualityBloodPressureBhsBB'
dataQualityBloodGlucoseIso151972003 = 'dataQualityBloodGlucoseIso151972003'
dataQualityBloodGlucoseIso151972013 = 'dataQualityBloodGlucoseIso151972013'
class Type2(Enum):
raw = 'raw'
derived = 'derived'
class Format(Enum):
integer = 'integer'
floatPoint = 'floatPoint'
string = 'string'
map = 'map'
integerList = 'integerList'
floatList = 'floatList'
blob = 'blob'
class DataTypeField(BaseModel):
format: Optional[Format] = Field(
None,
description='The different supported formats for each field in a data type.',
)
name: Optional[str] = Field(
None,
description='Defines the name and format of data. Unlike data type names, field names are not namespaced, and only need to be unique within the data type.',
)
optional: Optional[bool] = None
class Type3(Enum):
unknown = 'unknown'
phone = 'phone'
tablet = 'tablet'
watch = 'watch'
chestStrap = 'chestStrap'
scale = 'scale'
headMounted = 'headMounted'
smartDisplay = 'smartDisplay'
class Device(BaseModel):
manufacturer: Optional[str] = Field(
None, description='Manufacturer of the product/hardware.'
)
model: Optional[str] = Field(
None, description='End-user visible model name for the device.'
)
type: Optional[Type3] = Field(
None, description='A constant representing the type of the device.'
)
uid: Optional[str] = Field(
None,
description='The serial number or other unique ID for the hardware. This field is obfuscated when read by any REST or Android client that did not create the data source. Only the data source creator will see the uid field in clear and normal form. The obfuscation preserves equality; that is, given two IDs, if id1 == id2, obfuscated(id1) == obfuscated(id2).',
)
version: Optional[str] = Field(
None, description='Version string for the device hardware/software.'
)
class MapValue(BaseModel):
fpVal: Optional[float] = Field(None, description='Floating point value.')
class Session(BaseModel):
activeTimeMillis: Optional[str] = Field(
None,
description='Session active time. While start_time_millis and end_time_millis define the full session time, the active time can be shorter and specified by active_time_millis. If the inactive time during the session is known, it should also be inserted via a com.google.activity.segment data point with a STILL activity value',
)
activityType: Optional[int] = Field(
None, description='The type of activity this session represents.'
)
application: Optional[Application] = Field(
None, description='The application that created the session.'
)
description: Optional[str] = Field(
None, description='A description for this session.'
)
endTimeMillis: Optional[str] = Field(
None, description='An end time, in milliseconds since epoch, inclusive.'
)
id: Optional[str] = Field(
None,
description='A client-generated identifier that is unique across all sessions owned by this particular user.',
)
modifiedTimeMillis: Optional[str] = Field(
None,
description='A timestamp that indicates when the session was last modified.',
)
name: Optional[str] = Field(
None, description='A human readable name of the session.'
)
startTimeMillis: Optional[str] = Field(
None, description='A start time, in milliseconds since epoch, inclusive.'
)
class ValueMapValEntry(BaseModel):
key: Optional[str] = None
value: Optional[MapValue] = None
class FieldXgafv(Enum):
field_1 = '1'
field_2 = '2'
class Alt(Enum):
json = 'json'
media = 'media'
proto = 'proto'
class DataTypeName(RootModel[List[str]]):
root: List[str]
class ActivityType(RootModel[List[int]]):
root: List[int]
class BucketByTime(BaseModel):
durationMillis: Optional[str] = Field(
None,
description='Specifies that result buckets aggregate data by exactly durationMillis time frames. Time frames that contain no data will be included in the response with an empty dataset.',
)
period: Optional[BucketByTimePeriod] = None
class DataType(BaseModel):
field: Optional[List[DataTypeField]] = Field(
None, description='A field represents one dimension of a data type.'
)
name: Optional[str] = Field(
None,
description='Each data type has a unique, namespaced, name. All data types in the com.google namespace are shared as part of the platform.',
)
class ListSessionsResponse(BaseModel):
deletedSession: Optional[List[Session]] = Field(
None,
description='If includeDeleted is set to true in the request, and startTime and endTime are omitted, this will include sessions which were deleted since the last sync.',
)
hasMoreData: Optional[bool] = Field(
None,
description='Flag to indicate server has more data to transfer. DO NOT USE THIS FIELD. It is never populated in responses from the server.',
)
nextPageToken: Optional[str] = Field(
None,
description='The sync token which is used to sync further changes. This will only be provided if both startTime and endTime are omitted from the request.',
)
session: Optional[List[Session]] = Field(
None,
description='Sessions with an end time that is between startTime and endTime of the request.',
)
class Value(BaseModel):
fpVal: Optional[float] = Field(
None,
description='Floating point value. When this is set, other values must not be set.',
)
intVal: Optional[int] = Field(
None,
description='Integer value. When this is set, other values must not be set.',
)
mapVal: Optional[List[ValueMapValEntry]] = Field(
None,
description='Map value. The valid key space and units for the corresponding value of each entry should be documented as part of the data type definition. Keys should be kept small whenever possible. Data streams with large keys and high data frequency may be down sampled.',
)
stringVal: Optional[str] = Field(
None,
description='String value. When this is set, other values must not be set. Strings should be kept small whenever possible. Data streams with large string values and high data frequency may be down sampled.',
)
class AggregateRequest(BaseModel):
aggregateBy: Optional[List[AggregateBy]] = Field(
None,
description='The specification of data to be aggregated. At least one aggregateBy spec must be provided. All data that is specified will be aggregated using the same bucketing criteria. There will be one dataset in the response for every aggregateBy spec.',
)
bucketByActivitySegment: Optional[BucketByActivity] = Field(
None,
description='Specifies that data be aggregated each activity segment recorded for a user. Similar to bucketByActivitySegment, but bucketing is done for each activity segment rather than all segments of the same type. Mutually exclusive of other bucketing specifications.',
)
bucketByActivityType: Optional[BucketByActivity] = Field(
None,
description='Specifies that data be aggregated by the type of activity being performed when the data was recorded. All data that was recorded during a certain activity type (.for the given time range) will be aggregated into the same bucket. Data that was recorded while the user was not active will not be included in the response. Mutually exclusive of other bucketing specifications.',
)
bucketBySession: Optional[BucketBySession] = Field(
None,
description='Specifies that data be aggregated by user sessions. Data that does not fall within the time range of a session will not be included in the response. Mutually exclusive of other bucketing specifications.',
)
bucketByTime: Optional[BucketByTime] = Field(
None,
description='Specifies that data be aggregated by a single time interval. Mutually exclusive of other bucketing specifications.',
)
endTimeMillis: Optional[str] = Field(
None,
description='The end of a window of time. Data that intersects with this time window will be aggregated. The time is in milliseconds since epoch, inclusive. The maximum allowed difference between start_time_millis // and end_time_millis is 7776000000 (roughly 90 days).',
)
filteredDataQualityStandard: Optional[List[FilteredDataQualityStandardEnum]] = (
Field(None, description='DO NOT POPULATE THIS FIELD. It is ignored.')
)
startTimeMillis: Optional[str] = Field(
None,
description='The start of a window of time. Data that intersects with this time window will be aggregated. The time is in milliseconds since epoch, inclusive.',
)
class DataPoint(BaseModel):
computationTimeMillis: Optional[str] = Field(
None, description='DO NOT USE THIS FIELD. It is ignored, and not stored.'
)
dataTypeName: Optional[str] = Field(
None,
description='The data type defining the format of the values in this data point.',
)
endTimeNanos: Optional[str] = Field(
None,
description='The end time of the interval represented by this data point, in nanoseconds since epoch.',
)
modifiedTimeMillis: Optional[str] = Field(
None,
description='Indicates the last time this data point was modified. Useful only in contexts where we are listing the data changes, rather than representing the current state of the data.',
)
originDataSourceId: Optional[str] = Field(
None,
description='If the data point is contained in a dataset for a derived data source, this field will be populated with the data source stream ID that created the data point originally. WARNING: do not rely on this field for anything other than debugging. The value of this field, if it is set at all, is an implementation detail and is not guaranteed to remain consistent.',
)
rawTimestampNanos: Optional[str] = Field(
None, description='The raw timestamp from the original SensorEvent.'
)
startTimeNanos: Optional[str] = Field(
None,
description='The start time of the interval represented by this data point, in nanoseconds since epoch.',
)
value: Optional[List[Value]] = Field(
None,
description="Values of each data type field for the data point. It is expected that each value corresponding to a data type field will occur in the same order that the field is listed with in the data type specified in a data source. Only one of integer and floating point fields will be populated, depending on the format enum value within data source's type field.",
)
class DataSource(BaseModel):
application: Optional[Application] = Field(
None,
description='Information about an application which feeds sensor data into the platform.',
)
dataQualityStandard: Optional[List[DataQualityStandardEnum]] = Field(
None,
description='DO NOT POPULATE THIS FIELD. It is never populated in responses from the platform, and is ignored in queries. It will be removed in a future version entirely.',
)
dataStreamId: Optional[str] = Field(
None,
description="A unique identifier for the data stream produced by this data source. The identifier includes: - The physical device's manufacturer, model, and serial number (UID). - The application's package name or name. Package name is used when the data source was created by an Android application. The developer project number is used when the data source was created by a REST client. - The data source's type. - The data source's stream name. Note that not all attributes of the data source are used as part of the stream identifier. In particular, the version of the hardware/the application isn't used. This allows us to preserve the same stream through version updates. This also means that two DataSource objects may represent the same data stream even if they're not equal. The exact format of the data stream ID created by an Android application is: type:dataType.name:application.packageName:device.manufacturer:device.model:device.uid:dataStreamName The exact format of the data stream ID created by a REST client is: type:dataType.name:developer project number:device.manufacturer:device.model:device.uid:dataStreamName When any of the optional fields that make up the data stream ID are absent, they will be omitted from the data stream ID. The minimum viable data stream ID would be: type:dataType.name:developer project number Finally, the developer project number and device UID are obfuscated when read by any REST or Android client that did not create the data source. Only the data source creator will see the developer project number in clear and normal form. This means a client will see a different set of data_stream_ids than another client with different credentials.",
)
dataStreamName: Optional[str] = Field(
None,
description='The stream name uniquely identifies this particular data source among other data sources of the same type from the same underlying producer. Setting the stream name is optional, but should be done whenever an application exposes two streams for the same data type, or when a device has two equivalent sensors.',
)
dataType: Optional[DataType] = Field(
None,
description='The data type defines the schema for a stream of data being collected by, inserted into, or queried from the Fitness API.',
)
device: Optional[Device] = Field(
None,
description='Representation of an integrated device (such as a phone or a wearable) that can hold sensors.',
)
name: Optional[str] = Field(
None, description='An end-user visible name for this data source.'
)
type: Optional[Type2] = Field(
None,
description='A constant describing the type of this data source. Indicates whether this data source produces raw or derived data.',
)
class Dataset(BaseModel):
dataSourceId: Optional[str] = Field(
None,
description='The data stream ID of the data source that created the points in this dataset.',
)
maxEndTimeNs: Optional[str] = Field(
None,
description='The largest end time of all data points in this possibly partial representation of the dataset. Time is in nanoseconds from epoch. This should also match the second part of the dataset identifier.',
)
minStartTimeNs: Optional[str] = Field(
None,
description='The smallest start time of all data points in this possibly partial representation of the dataset. Time is in nanoseconds from epoch. This should also match the first part of the dataset identifier.',
)
nextPageToken: Optional[str] = Field(
None,
description='This token will be set when a dataset is received in response to a GET request and the dataset is too large to be included in a single response. Provide this value in a subsequent GET request to return the next page of data points within this dataset.',
)
point: Optional[List[DataPoint]] = Field(
None,
description='A partial list of data points contained in the dataset, ordered by endTimeNanos. This list is considered complete when retrieving a small dataset and partial when patching a dataset or retrieving a dataset that is too large to include in a single response.',
)
class ListDataPointChangesResponse(BaseModel):
dataSourceId: Optional[str] = Field(
None,
description='The data stream ID of the data source with data point changes.',
)
deletedDataPoint: Optional[List[DataPoint]] = Field(
None,
description='Deleted data points for the user. Note, for modifications this should be parsed before handling insertions.',
)
insertedDataPoint: Optional[List[DataPoint]] = Field(
None, description='Inserted data points for the user.'
)
nextPageToken: Optional[str] = Field(
None,
description='The continuation token, which is used to page through large result sets. Provide this value in a subsequent request to return the next page of results.',
)
class ListDataSourcesResponse(BaseModel):
dataSource: Optional[List[DataSource]] = Field(
None, description='A previously created data source.'
)
class AggregateBucket(BaseModel):
activity: Optional[int] = Field(
None,
description='Available for Bucket.Type.ACTIVITY_TYPE, Bucket.Type.ACTIVITY_SEGMENT',
)
dataset: Optional[List[Dataset]] = Field(
None, description='There will be one dataset per AggregateBy in the request.'
)
endTimeMillis: Optional[str] = Field(
None,
description='The end time for the aggregated data, in milliseconds since epoch, inclusive.',
)
session: Optional[Session] = Field(
None, description='Available for Bucket.Type.SESSION'
)
startTimeMillis: Optional[str] = Field(
None,
description='The start time for the aggregated data, in milliseconds since epoch, inclusive.',
)
type: Optional[Type] = Field(
None,
description='The type of a bucket signifies how the data aggregation is performed in the bucket.',
)
class AggregateResponse(BaseModel):
bucket: Optional[List[AggregateBucket]] = Field(
None, description='A list of buckets containing the aggregated data.'
)