Major update: support Timeline.json semanticSegments with auto-detection
Some checks failed
pre-commit / pre-commit (push) Has been cancelled
tests / Detect unreleased dependencies (push) Has been cancelled
tests / test with OCB (push) Has been cancelled
tests / test with Odoo (push) Has been cancelled

This commit is contained in:
2026-03-14 02:11:21 +00:00
parent b57b2e75ab
commit 37b3540eb8

View File

@@ -2,33 +2,23 @@ from odoo import models, fields, api, _
from odoo.exceptions import UserError
import json
import base64
import bisect
import re
from datetime import datetime, timedelta
from math import radians, sin, cos, sqrt, atan2
from collections import Counter
VEHICLE_ACTIVITIES = {'IN_VEHICLE', 'IN_ROAD_VEHICLE', 'IN_RAIL_VEHICLE', 'IN_TWO_WHEELER_VEHICLE', 'IN_PASSENGER_VEHICLE'}
WALKING_ACTIVITIES = {'WALKING', 'ON_FOOT', 'RUNNING'}
CYCLING_ACTIVITIES = {'ON_BICYCLE'}
VEHICLE_ACTIVITIES = {'IN_VEHICLE', 'IN_ROAD_VEHICLE', 'IN_RAIL_VEHICLE', 'IN_TWO_WHEELER_VEHICLE'}
WALKING_ACTIVITIES = {'WALKING', 'ON_FOOT', 'RUNNING', 'ON_BICYCLE'}
CATEGORY_LABELS = {
'IN_PASSENGER_VEHICLE': 'In Vehicle',
'IN_VEHICLE': 'In Vehicle',
'IN_ROAD_VEHICLE': 'In Vehicle',
'IN_RAIL_VEHICLE': 'Rail / Transit',
'IN_TWO_WHEELER_VEHICLE': 'Motorcycle / Scooter',
'WALKING': 'Walking',
'ON_FOOT': 'Walking',
'RUNNING': 'Running',
'ON_BICYCLE': 'Cycling',
'STILL': 'Stationary',
'UNKNOWN': 'Unknown',
'EXITING_VEHICLE': 'In Vehicle',
'TILTING': 'Unknown',
SEMANTIC_TYPE_CATEGORY = {
'HOME': 'Home',
'INFERRED_HOME': 'Home',
'WORK': 'Work',
'INFERRED_WORK': 'Work',
'SEARCHED_ADDRESS': 'Searched Address',
'UNKNOWN': '',
}
PROXIMITY_METERS = 200
def _haversine_miles(lat1, lon1, lat2, lon2):
R = 3958.8
@@ -47,18 +37,25 @@ def _get_travel_mode(activity_type):
return 'driving'
if activity_type in WALKING_ACTIVITIES:
return 'walking'
if activity_type in CYCLING_ACTIVITIES:
return 'cycling'
return 'unknown'
def _dominant_activity(activities, start_ts, end_ts):
"""Get dominant activity type between two timestamps."""
window = [a for a in activities if start_ts <= a['ts'] <= end_ts]
if not window:
return 'UNKNOWN'
counts = Counter(a['type'] for a in window)
return counts.most_common(1)[0][0]
def _parse_latlng(latlng_str):
"""Parse coordinate string like '30.0381046 deg, -95.5899101 deg' handling encoding issues."""
nums = re.findall(r'-?\d+\.\d+', latlng_str)
if len(nums) >= 2:
return float(nums[0]), float(nums[1])
return None, None
def _parse_ts(ts_str):
"""Parse ISO 8601 timestamp to naive datetime."""
if not ts_str:
return None
try:
return datetime.fromisoformat(ts_str).replace(tzinfo=None)
except Exception:
return None
class WtImportTimelineWizard(models.TransientModel):
@@ -75,7 +72,7 @@ class WtImportTimelineWizard(models.TransientModel):
proximity_meters = fields.Integer(
string='Location Proximity (meters)',
default=200,
help='GPS positions within this distance are grouped as the same location'
help='For raw signal files only: GPS positions within this distance are grouped as one location'
)
geocode = fields.Boolean(
string='Resolve Addresses via OpenStreetMap',
@@ -90,34 +87,45 @@ class WtImportTimelineWizard(models.TransientModel):
except Exception as e:
raise UserError(_('Invalid JSON file: %s') % str(e))
stops = self._parse_timeline(data, self.proximity_meters)
# Auto-detect format
if 'semanticSegments' in data:
stops = self._parse_semantic_timeline(data)
elif 'timelineEdits' in data:
stops = self._parse_raw_timeline(data, self.proximity_meters)
else:
raise UserError(_('Unrecognized format. Expected Timeline.json (semanticSegments) or Timeline Edits.json (timelineEdits).'))
if not stops:
raise UserError(_('No location stops found in the uploaded file.'))
# Filter by minimum stop duration
min_secs = self.min_stop_minutes * 60
stops = [s for s in stops
if (s['departed_at'] - s['arrived_at']).total_seconds() >= min_secs]
if s.get('arrived_at') and s.get('departed_at')
and (s['departed_at'] - s['arrived_at']).total_seconds() >= min_secs]
if not stops:
raise UserError(_('No stops found matching the minimum duration filter.'))
stops.sort(key=lambda s: s['arrived_at'])
# Compute distances and travel times between consecutive stops
for i, stop in enumerate(stops):
if i > 0:
prev = stops[i - 1]
stop['distance_from_previous'] = _haversine_miles(
prev['lat'], prev['lng'], stop['lat'], stop['lng']
)
if prev.get('lat') and stop.get('lat'):
stop['distance_from_previous'] = _haversine_miles(
prev['lat'], prev['lng'], stop['lat'], stop['lng']
)
else:
stop['distance_from_previous'] = 0.0
travel_delta = stop['arrived_at'] - prev['departed_at']
stop['travel_time_from_previous'] = max(
travel_delta.total_seconds() / 3600, 0.0
)
stop['travel_time_from_previous'] = max(travel_delta.total_seconds() / 3600, 0.0)
else:
stop['distance_from_previous'] = 0.0
stop['travel_time_from_previous'] = 0.0
# Skip duplicates
# Skip existing records
LocationLog = self.env['wt.location.log']
existing = set(
r.arrived_at.strftime('%Y-%m-%d %H:%M:%S')
@@ -128,24 +136,22 @@ class WtImportTimelineWizard(models.TransientModel):
created_ids = []
skipped = 0
for stop in stops:
arrived = stop['arrived_at'].replace(tzinfo=None)
departed = stop['departed_at'].replace(tzinfo=None)
arrived_str = arrived.strftime('%Y-%m-%d %H:%M:%S')
arrived_str = stop['arrived_at'].strftime('%Y-%m-%d %H:%M:%S')
if arrived_str in existing:
skipped += 1
continue
log = LocationLog.create({
'date': arrived.date(),
'arrived_at': arrived,
'departed_at': departed,
'latitude': stop['lat'],
'longitude': stop['lng'],
'travel_mode': stop.get('travel_mode', 'unknown'),
'date': stop['arrived_at'].date(),
'arrived_at': stop['arrived_at'],
'departed_at': stop['departed_at'],
'latitude': stop.get('lat') or 0.0,
'longitude': stop.get('lng') or 0.0,
'place_name': stop.get('place_name', ''),
'category': stop.get('category', ''),
'distance_from_previous': stop['distance_from_previous'],
'travel_time_from_previous': stop['travel_time_from_previous'],
'travel_mode': stop.get('travel_mode', 'unknown'),
'distance_from_previous': stop.get('distance_from_previous', 0.0),
'travel_time_from_previous': stop.get('travel_time_from_previous', 0.0),
'source': 'google_timeline',
})
created_ids.append(log.id)
@@ -155,7 +161,7 @@ class WtImportTimelineWizard(models.TransientModel):
raise UserError(_('All %d stops already exist. Nothing new to import.') % skipped)
created = LocationLog.browse(created_ids)
if self.geocode:
if self.geocode and created:
created.action_geocode()
return {
@@ -167,18 +173,51 @@ class WtImportTimelineWizard(models.TransientModel):
'target': 'current',
}
def _parse_timeline(self, data, proximity_meters=200):
def _parse_semantic_timeline(self, data):
"""
Parse Google Timeline Edits JSON into location stops.
Positions represent stationary moments — cluster by proximity.
Activity records between clusters give travel category/mode.
Parse Timeline.json semanticSegments format.
Only 'visit' segments are location stops.
'timelinePath' segments are travel (ignored — distance calculated from stop coords).
"""
stops = []
for seg in data.get('semanticSegments', []):
visit = seg.get('visit')
if not visit:
continue
start_ts = _parse_ts(seg.get('startTime'))
end_ts = _parse_ts(seg.get('endTime'))
if not start_ts or not end_ts:
continue
candidate = visit.get('topCandidate', {})
semantic_type = candidate.get('semanticType', '')
latlng_str = candidate.get('placeLocation', {}).get('latLng', '')
lat, lng = _parse_latlng(latlng_str) if latlng_str else (None, None)
category = SEMANTIC_TYPE_CATEGORY.get(semantic_type, '')
if not category and semantic_type:
category = semantic_type.replace('_', ' ').title()
stops.append({
'arrived_at': start_ts,
'departed_at': end_ts,
'lat': lat,
'lng': lng,
'place_name': '',
'category': category,
'travel_mode': 'unknown',
})
return stops
def _parse_raw_timeline(self, data, proximity_meters=200):
"""Parse Timeline Edits.json raw signal format using proximity clustering."""
positions = []
activities = []
for entry in data.get('timelineEdits', []):
raw = entry.get('rawSignal', {}).get('signal', {})
if 'position' in raw:
pos = raw['position']
point = pos.get('point', {})
@@ -188,7 +227,6 @@ class WtImportTimelineWizard(models.TransientModel):
if ts_str and lat and lng:
ts = datetime.fromisoformat(ts_str.replace('Z', '+00:00'))
positions.append({'ts': ts, 'lat': lat, 'lng': lng})
elif 'activityRecord' in raw:
ar = raw['activityRecord']
ts_str = ar.get('timestamp', '')
@@ -204,51 +242,42 @@ class WtImportTimelineWizard(models.TransientModel):
positions.sort(key=lambda x: x['ts'])
activities.sort(key=lambda x: x['ts'])
# Cluster consecutive positions within proximity_meters
def dominant_mode(start_ts, end_ts):
window = [a for a in activities if start_ts <= a['ts'] <= end_ts]
if not window:
return 'unknown'
counts = Counter(a['type'] for a in window)
return _get_travel_mode(counts.most_common(1)[0][0])
stops = []
current_cluster = [positions[0]]
for pos in positions[1:]:
prev = current_cluster[-1]
dist = _distance_meters(prev['lat'], prev['lng'], pos['lat'], pos['lng'])
if dist <= proximity_meters:
if _distance_meters(prev['lat'], prev['lng'], pos['lat'], pos['lng']) <= proximity_meters:
current_cluster.append(pos)
else:
avg_lat = sum(p['lat'] for p in current_cluster) / len(current_cluster)
avg_lng = sum(p['lng'] for p in current_cluster) / len(current_cluster)
# Activity between this stop and next = travel mode
act_type = _dominant_activity(
activities, current_cluster[-1]['ts'], pos['ts']
)
travel_mode = _get_travel_mode(act_type)
category = CATEGORY_LABELS.get(act_type, act_type.replace('_', ' ').title())
stops.append({
'arrived_at': current_cluster[0]['ts'],
'departed_at': current_cluster[-1]['ts'],
'lat': avg_lat,
'lng': avg_lng,
'travel_mode': travel_mode,
'category': category,
'arrived_at': current_cluster[0]['ts'].replace(tzinfo=None),
'departed_at': current_cluster[-1]['ts'].replace(tzinfo=None),
'lat': avg_lat, 'lng': avg_lng,
'travel_mode': dominant_mode(current_cluster[-1]['ts'], pos['ts']),
'category': '', 'place_name': '',
})
current_cluster = [pos]
# Last cluster
if current_cluster:
avg_lat = sum(p['lat'] for p in current_cluster) / len(current_cluster)
avg_lng = sum(p['lng'] for p in current_cluster) / len(current_cluster)
stops.append({
'arrived_at': current_cluster[0]['ts'],
'departed_at': current_cluster[-1]['ts'],
'lat': avg_lat,
'lng': avg_lng,
'travel_mode': 'unknown',
'category': '',
'arrived_at': current_cluster[0]['ts'].replace(tzinfo=None),
'departed_at': current_cluster[-1]['ts'].replace(tzinfo=None),
'lat': avg_lat, 'lng': avg_lng,
'travel_mode': 'unknown', 'category': '', 'place_name': '',
})
# Estimate duration for single-position stops
for i, stop in enumerate(stops):
if stop['arrived_at'] == stop['departed_at'] and i + 1 < len(stops):
gap = (stops[i + 1]['arrived_at'] - stop['arrived_at']).total_seconds()