2025. 3. 30. 15:23ㆍProject Log/학부 졸업프로젝트
마지막 학기
대학에서의 마지막 학기를 다니고 있다. 졸업프로젝트, 인턴, 학부 과목 공부 등으로 인해 바쁜 하루하루를 보내고 있다. 예전에는 마지막 학기쯤에 뚝섬에 돗자리 펴고 놀다가, 널널하게 학교에 다닐 줄 알았다. 하지만, 전체 학교생활 중 가장 바쁜 시기를 보내는 것 같다. 그래도 무언가 성취할 일과 목표가 있다는 것에 감사하는 중이며, 종강쯤에는 지금 하고 있는 모든 것들을 잘 마무리할 수 있으면 좋겠다.
구현 내용 개요
람다 함수로 RDS MySQL DB로부터 저장된 Fitbit 데이터를 불러와서, 시각화 그래프와 리포트를 생성하는 작업을 진행했다.
이번 주 동안 진행한 작업을 간략히 요약하면 다음과 같다.
- AWS RDS 서비스로 MySQL DB를 생성한다.
- SQL문을 통해 DB와 테이블을 생성하고, mock data를 삽입한다.
- pymysql 라이브러리로 DB의 데이터를 불러온다.
- matplotlib 라이브러리로 데이터를 그래프로 시각화한다.
- 시각화한 그래프는 AWS S3에 저장한다. (fitbit-graph-s3-bucket 버킷의 graphs 디렉토리)
- openai 라이브러리로 GPT API를 호출하여 데이터에 대한 텍스트 분석을 생성한다.
- fpdf2 라이브러리로 그래프 이미지와 분석 텍스트를 포함한 pdf 파일을 생성한다.
- 생성한 건강 리포트는 AWS S3에 저장한다. (fitbit-report-s3-bucket 버킷의 healthreport 디렉토리)
기존 계획에서 변경된 부분
기존에 Flutter 애플리케이션에서 PDF 생성을 하는 테스트용 코드를 짜두었다. 그러나 개발을 진행하면서 PDF 생성과 S3 저장 과정에서 플러터 앱을 오가는 설계는 비효율적이라는 것을 알게 되었다. 람다 함수에서 바로 리포트를 생성해서 S3에 바로 저장하고, 플러터 앱에서는 S3와 연결하여 뷰어와 다운로드 기능만 제공하면 되는 것이었다.
이제부터는 상세하게 구현 방법을 정리하도록 하겠다.
개발 과정에서 기존에 만들어놓은 것을 지우고, 다시 재현해야 할 수도 있으니 꼼꼼하게 기록할 것이다.
AWS RDS - MySQL DB 생성
RDS 서비스에서 MySQL 데이터베이스를 생성한다.
MySQL Workbench에 RDS 정보를 입력한다. Hostname 칸에는 RDS 엔드포인트를 넣어주면 된다. Test Connection 이후 OK를 클릭하면 된다. 이후 DB에 접속할 때는 RDS 생성 과정에서 지정했던 비밀번호를 입력하면 된다.
DB 및 테이블 생성 & Mock data 삽입 코드
접속한 후 아래 SQL문으로 데이터베이스와 테이블을 생성한다.
-- 1. DB 생성 및 선택
CREATE DATABASE IF NOT EXISTS ELDERLY_HEALTHCARE CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
USE ELDERLY_HEALTHCARE;
-- 2. users
CREATE TABLE users (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
username VARCHAR(10) NOT NULL,
birth_date DATE NOT NULL,
gender ENUM('male', 'female') NOT NULL,
address VARCHAR(127),
phone_number VARCHAR(15),
height FLOAT,
weight FLOAT,
profile_image_url VARCHAR(255),
profile_image_key VARCHAR(255),
breakfast_time TIME,
lunch_time TIME,
dinner_time TIME,
kakao_user_id VARCHAR(50),
role ENUM('senior', 'guardian', 'doctor') NOT NULL,
login_provider ENUM('fitbit', 'kakao') NOT NULL,
is_profile_complete BOOLEAN DEFAULT FALSE,
last_login TIMESTAMP,
guardian_code VARCHAR(18) UNIQUE,
doctor_code VARCHAR(18) UNIQUE,
status ENUM('active', 'deleted', 'inactive') DEFAULT 'active',
deleted_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);
-- 3. user_alerts
CREATE TABLE user_alerts (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
alert_type ENUM('meal_reminder', 'report_ready', 'morning_alert', 'anomaly_alert', 'advice_alert', 'device_connect', 'device_battery'),
is_enabled BOOLEAN DEFAULT TRUE,
send_via_kakao BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 4. guardian_doctor_links
CREATE TABLE guardian_doctor_links (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
senior_id BIGINT NOT NULL,
role ENUM('guardian', 'doctor') NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (senior_id) REFERENCES users(id)
);
-- 5. fitbit_device
CREATE TABLE fitbit_device (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
device_id BIGINT NOT NULL,
device_version VARCHAR(50),
battery_level VARCHAR(50),
battery_status VARCHAR(50),
last_sync_time TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 6. daily_missions
CREATE TABLE daily_missions (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
date DATE NOT NULL,
mission_quiz BOOLEAN DEFAULT FALSE,
mission_report BOOLEAN DEFAULT FALSE,
mission_exercise BOOLEAN DEFAULT FALSE,
mission_sleep BOOLEAN DEFAULT FALSE,
completed_count INT DEFAULT 0,
marker_type ENUM('none', 'seed', 'sprout', 'leaf', 'flowerbud', 'flower') DEFAULT 'none',
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
UNIQUE(user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 7. health_checkup_summary
CREATE TABLE health_checkup_summary (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
checkup_date DATE,
height FLOAT,
weight FLOAT,
bmi FLOAT,
blood_pressure VARCHAR(10),
fasting_blood_sugar FLOAT,
total_cholesterol FLOAT,
hdl_cholesterol FLOAT,
ldl_cholesterol FLOAT,
triglyceride FLOAT,
gfr FLOAT,
ast FLOAT,
alt FLOAT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 8. health_checkup_records
CREATE TABLE health_checkup_records (
user_id BIGINT NOT NULL,
checkup_data JSON,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 9. notifications
CREATE TABLE notifications (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
notification_type VARCHAR(50),
message TEXT,
triggered_by VARCHAR(50),
sent_at TIMESTAMP,
is_read BOOLEAN DEFAULT FALSE,
is_push_sent BOOLEAN DEFAULT FALSE,
is_kakao_sent BOOLEAN DEFAULT FALSE,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 10. health_analyzer
CREATE TABLE health_analyzer (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
analysis_type ENUM('gpt', 'threshold'),
triggered_alert BOOLEAN DEFAULT FALSE,
analysis_timestamp TIMESTAMP,
trigger_details TEXT,
notification_id BIGINT,
FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (notification_id) REFERENCES notifications(id)
);
-- 11. chat_logs
CREATE TABLE chat_logs (
user_id BIGINT NOT NULL,
timestamp TIMESTAMP NOT NULL,
date DATE NOT NULL,
question TEXT NOT NULL,
multiple_choice_response TEXT NOT NULL,
text_response TEXT,
gpt_positive_analysis TEXT NOT NULL,
gpt_negative_analysis TEXT NOT NULL,
gpt_solution_motivation TEXT NOT NULL,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY(user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 12. daily_health_reports
CREATE TABLE daily_health_reports (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
report_date DATE NOT NULL,
report_status ENUM('pending', 'completed') DEFAULT 'pending',
overall_health_score FLOAT,
stress_score FLOAT,
total_exercise_time INT,
total_sleep_time INT,
avg_exercise_time FLOAT,
avg_heart_rate FLOAT,
calories_burned FLOAT,
sleep_score FLOAT,
spo2_variation FLOAT,
sleep_heart_rate FLOAT,
exercise_gpt_analysis TEXT,
sleep_disorder_insomnia_score FLOAT,
sleep_disorder_apnea_score FLOAT,
sleep_disorder_eds_score FLOAT,
sleep_disorder_insomnia_flag BOOLEAN,
sleep_disorder_apnea_flag BOOLEAN,
sleep_disorder_eds_flag BOOLEAN,
insomnia_graph_path VARCHAR(255),
sleep_apnea_graph_path VARCHAR(255),
eds_graph_path VARCHAR(255),
exercise_graph_path VARCHAR(255),
sleep_heartrate_path VARCHAR(255),
sleep_zone_graph_path VARCHAR(255),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 13. health_reports_pdf
CREATE TABLE health_reports_pdf (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
user_id BIGINT NOT NULL,
report_id BIGINT NOT NULL,
report_date DATE NOT NULL,
basic_pdf_key VARCHAR(255),
doctor_pdf_key VARCHAR(255),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (report_id) REFERENCES daily_health_reports(id)
);
-- 14. fitbit_activity_data
CREATE TABLE fitbit_activity_data (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
date DATETIME NOT NULL,
steps INT NOT NULL,
distance_km FLOAT NOT NULL,
calories_total FLOAT,
floors INT,
heart_rate FLOAT NOT NULL,
heart_rate_max INT,
heart_rate_min INT NOT NULL,
active_minutes INT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_user_time (user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 15. fitbit_activity_summary
CREATE TABLE fitbit_activity_summary (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
date DATETIME NOT NULL,
total_steps INT,
total_distance FLOAT,
total_calories_out FLOAT,
total_activity_calories FLOAT,
sedentary_minutes INT,
lightly_active_minutes INT,
fairly_active_minutes INT,
very_active_minutes INT,
resting_heart_rate INT,
out_of_range_minutes INT,
fat_burn_minutes INT,
cardio_minutes INT,
peak_minutes INT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_user_time (user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 16. fitbit_health_metrics
CREATE TABLE fitbit_health_metrics (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
date DATETIME NOT NULL,
spo2 FLOAT,
hrv FLOAT,
rhr FLOAT,
respiratory_rate FLOAT,
skin_temperature FLOAT,
stress_score FLOAT,
readiness_score FLOAT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_user_time (user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 17. fitbit_average
CREATE TABLE fitbit_average (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
recorded_at DATE NOT NULL,
period_type ENUM('1D', '7D', '30D', '90D', '180D', '365D') NOT NULL,
avg_steps INT,
avg_calories_total FLOAT,
avg_distance_km FLOAT,
avg_floors INT,
avg_heart_rate FLOAT,
avg_resting_heart_rate INT,
avg_activity_duration FLOAT,
avg_sedentary_minutes INT,
avg_lightly_active_minutes INT,
avg_fairly_active_minutes INT,
avg_very_active_minutes INT,
avg_total_sleep_hours INT,
avg_deep_sleep_hours FLOAT,
avg_light_sleep_hours FLOAT,
avg_rem_sleep_hours FLOAT,
avg_awake_hours FLOAT,
avg_sleep_heart_rate FLOAT,
avg_sleep_breathing_rate FLOAT,
avg_sleep_spo2 FLOAT,
avg_spo2 FLOAT,
avg_hrv FLOAT,
avg_rhr FLOAT,
avg_respiratory_rate FLOAT,
avg_skin_temperature FLOAT,
avg_stress_score FLOAT,
avg_readiness_score FLOAT,
avg_sleep_score FLOAT NOT NULL,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE (user_id, recorded_at, period_type),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 18. fitbit_sleep_data
CREATE TABLE fitbit_sleep_data (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
date DATE NOT NULL,
session_start DATETIME NOT NULL,
session_end DATETIME NOT NULL,
total_sleep_hours FLOAT NOT NULL,
deep_sleep_hours FLOAT NOT NULL,
light_sleep_hours FLOAT NOT NULL,
rem_sleep_hours FLOAT NOT NULL,
awake_hours FLOAT NOT NULL,
awakenings_count INT DEFAULT 0,
time_in_bed_minutes INT,
minutes_asleep INT,
avg_heart_rate FLOAT NOT NULL,
avg_breathing_rate FLOAT NOT NULL,
avg_spo2 FLOAT NOT NULL,
overall_health_score FLOAT NOT NULL,
is_main_sleep BOOLEAN NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_user_time (user_id, date),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- 19. comments
CREATE TABLE comments (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
report_id BIGINT NOT NULL,
author_id BIGINT NOT NULL,
role ENUM('guardian', 'doctor') NOT NULL,
content TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
UNIQUE (report_id, author_id, role),
FOREIGN KEY (report_id) REFERENCES daily_health_reports(id),
FOREIGN KEY (author_id) REFERENCES users(id)
);
-- 20. s3_bucket
CREATE TABLE s3_bucket (
file_key VARCHAR(255) PRIMARY KEY,
bucket_name VARCHAR(50),
url VARCHAR(255),
storage_class VARCHAR(50),
file_type ENUM('pdf', 'image') NOT NULL,
category ENUM('basic_report', 'doctor_report', 'exercise_graph', 'sleep_graph', 'sleep_heart_graph', 'sleep_pattern_graph') NOT NULL,
related_report_id BIGINT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (related_report_id) REFERENCES daily_health_reports(id)
);
-- 21. fitbit_average_history
CREATE TABLE fitbit_average_history (
id BIGINT AUTO_INCREMENT PRIMARY KEY,
user_id BIGINT NOT NULL,
recorded_at DATE NOT NULL,
period_type ENUM('1D', '7D', '30D', '90D', '180D', '365D') NOT NULL,
avg_steps INT DEFAULT NULL,
avg_calories_total FLOAT DEFAULT NULL,
avg_distance_km FLOAT DEFAULT NULL,
avg_floors INT DEFAULT NULL,
avg_heart_rate FLOAT DEFAULT NULL,
avg_resting_heart_rate INT DEFAULT NULL,
avg_activity_duration FLOAT DEFAULT NULL,
avg_sedentary_minutes INT DEFAULT NULL,
avg_lightly_active_minutes INT DEFAULT NULL,
avg_fairly_active_minutes INT DEFAULT NULL,
avg_very_active_minutes INT DEFAULT NULL,
avg_total_sleep_hours INT DEFAULT NULL,
avg_deep_sleep_hours FLOAT DEFAULT NULL,
avg_light_sleep_hours FLOAT DEFAULT NULL,
avg_rem_sleep_hours FLOAT DEFAULT NULL,
avg_awake_hours FLOAT DEFAULT NULL,
avg_sleep_heart_rate FLOAT DEFAULT NULL,
avg_sleep_breathing_rate FLOAT DEFAULT NULL,
avg_sleep_spo2 FLOAT DEFAULT NULL,
avg_spo2 FLOAT DEFAULT NULL,
avg_hrv FLOAT DEFAULT NULL,
avg_rhr FLOAT DEFAULT NULL,
avg_respiratory_rate FLOAT DEFAULT NULL,
avg_skin_temperature FLOAT DEFAULT NULL,
avg_stress_score FLOAT DEFAULT NULL,
avg_readiness_score FLOAT DEFAULT NULL,
avg_sleep_score FLOAT NOT NULL,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE (user_id, recorded_at, period_type),
FOREIGN KEY (user_id) REFERENCES users(id)
);
정상적으로 생성되었다면, 아래 SQL문으로 Mock data를 Insert 한다. fitbit_activity_data 테이블과 fitbit_sleep_data 테이블은 foreign key로 user_id를 가지고 있으므로, insert 할 때 유저 한 명은 생성해 놓아야 에러가 발생하지 않는다.
INSERT INTO users (
id, username, birth_date, gender, address, phone_number,
height, weight, role, login_provider, is_profile_complete
)
VALUES (1, '최예름', '1950-10-04', 'female', '제주도 어딘가', '010-1234-5678', 160.5, 50.5, 'senior', 'fitbit', TRUE);
INSERT INTO fitbit_activity_data (user_id, date, steps, distance_km, calories_total, floors, heart_rate, heart_rate_max, heart_rate_min, active_minutes, created_at)
VALUES
(1, '2025-03-28 00:00:00', 4, 0.003, 27.31, 0, 94, 105, 87, 0, '2025-03-28 00:00:00'),
(1, '2025-03-28 00:15:00', 2, 0.002, 22.96, 0, 68, 76, 62, 0, '2025-03-28 00:15:00'),
(1, '2025-03-28 00:30:00', 3, 0.002, 23.92, 0, 61, 74, 54, 0, '2025-03-28 00:30:00'),
(1, '2025-03-28 00:45:00', 5, 0.004, 22.7, 0, 60, 67, 56, 0, '2025-03-28 00:45:00'),
(1, '2025-03-28 01:00:00', 4, 0.003, 38.87, 0, 78, 89, 72, 0, '2025-03-28 01:00:00'),
(1, '2025-03-28 01:15:00', 2, 0.002, 22.94, 0, 91, 106, 84, 0, '2025-03-28 01:15:00'),
(1, '2025-03-28 01:30:00', 5, 0.004, 36.15, 0, 95, 108, 91, 0, '2025-03-28 01:30:00'),
(1, '2025-03-28 01:45:00', 1, 0.001, 40.14, 0, 76, 81, 70, 0, '2025-03-28 01:45:00'),
(1, '2025-03-28 02:00:00', 0, 0.0, 10.49, 0, 91, 102, 85, 0, '2025-03-28 02:00:00'),
(1, '2025-03-28 02:15:00', 2, 0.002, 23.03, 0, 60, 68, 57, 0, '2025-03-28 02:15:00'),
(1, '2025-03-28 02:30:00', 5, 0.004, 22.25, 0, 71, 78, 64, 0, '2025-03-28 02:30:00'),
(1, '2025-03-28 02:45:00', 3, 0.002, 39.14, 0, 75, 90, 68, 0, '2025-03-28 02:45:00'),
(1, '2025-03-28 03:00:00', 5, 0.004, 38.4, 0, 85, 96, 79, 0, '2025-03-28 03:00:00'),
(1, '2025-03-28 03:15:00', 2, 0.002, 39.71, 0, 66, 77, 61, 0, '2025-03-28 03:15:00'),
(1, '2025-03-28 03:30:00', 0, 0.0, 11.11, 0, 79, 89, 72, 0, '2025-03-28 03:30:00'),
(1, '2025-03-28 03:45:00', 1, 0.001, 30.27, 0, 66, 81, 59, 0, '2025-03-28 03:45:00'),
(1, '2025-03-28 04:00:00', 1, 0.001, 24.77, 0, 78, 88, 75, 0, '2025-03-28 04:00:00'),
(1, '2025-03-28 04:15:00', 2, 0.002, 34.76, 0, 74, 88, 67, 0, '2025-03-28 04:15:00'),
(1, '2025-03-28 04:30:00', 0, 0.0, 10.24, 0, 92, 101, 89, 0, '2025-03-28 04:30:00'),
(1, '2025-03-28 04:45:00', 1, 0.001, 33.2, 0, 94, 103, 87, 0, '2025-03-28 04:45:00'),
(1, '2025-03-28 05:00:00', 4, 0.003, 29.61, 0, 79, 89, 72, 0, '2025-03-28 05:00:00'),
(1, '2025-03-28 05:15:00', 3, 0.002, 31.07, 0, 81, 96, 76, 0, '2025-03-28 05:15:00'),
(1, '2025-03-28 05:30:00', 4, 0.003, 37.36, 0, 82, 95, 75, 0, '2025-03-28 05:30:00'),
(1, '2025-03-28 05:45:00', 0, 0.0, 12.37, 0, 72, 86, 65, 0, '2025-03-28 05:45:00'),
(1, '2025-03-28 06:00:00', 150, 0.12, 76.93, 0, 72, 80, 65, 1, '2025-03-28 06:00:00'),
(1, '2025-03-28 06:15:00', 185, 0.148, 79.97, 0, 64, 74, 60, 1, '2025-03-28 06:15:00'),
(1, '2025-03-28 06:30:00', 120, 0.096, 60.48, 0, 90, 98, 84, 1, '2025-03-28 06:30:00'),
(1, '2025-03-28 06:45:00', 143, 0.114, 71.3, 0, 74, 83, 69, 1, '2025-03-28 06:45:00'),
(1, '2025-03-28 07:00:00', 127, 0.102, 69.27, 0, 87, 94, 83, 1, '2025-03-28 07:00:00'),
(1, '2025-03-28 07:15:00', 128, 0.102, 54.17, 0, 77, 90, 72, 1, '2025-03-28 07:15:00'),
(1, '2025-03-28 07:30:00', 156, 0.125, 67.0, 0, 84, 96, 79, 1, '2025-03-28 07:30:00'),
(1, '2025-03-28 07:45:00', 178, 0.142, 64.65, 0, 81, 93, 78, 1, '2025-03-28 07:45:00'),
(1, '2025-03-28 08:00:00', 43, 0.034, 34.39, 0, 63, 71, 58, 1, '2025-03-28 08:00:00'),
(1, '2025-03-28 08:15:00', 50, 0.04, 41.55, 0, 78, 84, 73, 1, '2025-03-28 08:15:00'),
(1, '2025-03-28 08:30:00', 59, 0.047, 44.51, 0, 95, 108, 88, 1, '2025-03-28 08:30:00'),
(1, '2025-03-28 08:45:00', 51, 0.041, 50.41, 0, 94, 106, 90, 1, '2025-03-28 08:45:00'),
(1, '2025-03-28 09:00:00', 1, 0.001, 20.38, 0, 73, 85, 70, 0, '2025-03-28 09:00:00'),
(1, '2025-03-28 09:15:00', 1, 0.001, 24.48, 0, 85, 99, 78, 0, '2025-03-28 09:15:00'),
(1, '2025-03-28 09:30:00', 4, 0.003, 28.54, 0, 78, 85, 73, 0, '2025-03-28 09:30:00'),
(1, '2025-03-28 09:45:00', 3, 0.002, 28.07, 0, 68, 79, 64, 0, '2025-03-28 09:45:00'),
(1, '2025-03-28 10:00:00', 3, 0.002, 40.42, 0, 66, 74, 62, 0, '2025-03-28 10:00:00'),
(1, '2025-03-28 10:15:00', 1, 0.001, 36.15, 0, 66, 71, 61, 0, '2025-03-28 10:15:00'),
(1, '2025-03-28 10:30:00', 2, 0.002, 23.75, 0, 67, 81, 61, 0, '2025-03-28 10:30:00'),
(1, '2025-03-28 10:45:00', 3, 0.002, 31.88, 0, 63, 74, 56, 0, '2025-03-28 10:45:00'),
(1, '2025-03-28 11:00:00', 160, 0.128, 62.52, 0, 77, 84, 70, 1, '2025-03-28 11:00:00'),
(1, '2025-03-28 11:15:00', 181, 0.145, 74.13, 0, 95, 101, 92, 1, '2025-03-28 11:15:00'),
(1, '2025-03-28 11:30:00', 178, 0.142, 73.52, 0, 84, 92, 79, 1, '2025-03-28 11:30:00'),
(1, '2025-03-28 11:45:00', 197, 0.158, 72.78, 0, 88, 95, 85, 1, '2025-03-28 11:45:00'),
(1, '2025-03-28 12:00:00', 163, 0.13, 69.27, 0, 76, 82, 71, 1, '2025-03-28 12:00:00'),
(1, '2025-03-28 12:15:00', 157, 0.126, 68.88, 0, 84, 90, 81, 1, '2025-03-28 12:15:00'),
(1, '2025-03-28 12:30:00', 157, 0.126, 73.35, 0, 77, 84, 73, 1, '2025-03-28 12:30:00'),
(1, '2025-03-28 12:45:00', 178, 0.142, 74.89, 0, 63, 76, 60, 1, '2025-03-28 12:45:00'),
(1, '2025-03-28 13:00:00', 70, 0.056, 43.52, 0, 93, 105, 88, 1, '2025-03-28 13:00:00'),
(1, '2025-03-28 13:15:00', 76, 0.061, 56.49, 0, 95, 110, 88, 1, '2025-03-28 13:15:00'),
(1, '2025-03-28 13:30:00', 52, 0.042, 40.76, 0, 67, 72, 61, 1, '2025-03-28 13:30:00'),
(1, '2025-03-28 13:45:00', 76, 0.061, 46.16, 0, 91, 106, 87, 1, '2025-03-28 13:45:00'),
(1, '2025-03-28 14:00:00', 5, 0.004, 35.72, 0, 76, 89, 71, 0, '2025-03-28 14:00:00'),
(1, '2025-03-28 14:15:00', 3, 0.002, 30.79, 0, 87, 94, 80, 0, '2025-03-28 14:15:00'),
(1, '2025-03-28 14:30:00', 0, 0.0, 13.38, 0, 69, 76, 63, 0, '2025-03-28 14:30:00'),
(1, '2025-03-28 14:45:00', 1, 0.001, 30.95, 0, 79, 84, 72, 0, '2025-03-28 14:45:00'),
(1, '2025-03-28 15:00:00', 0, 0.0, 13.07, 0, 91, 106, 84, 0, '2025-03-28 15:00:00'),
(1, '2025-03-28 15:15:00', 5, 0.004, 23.7, 0, 75, 85, 68, 0, '2025-03-28 15:15:00'),
(1, '2025-03-28 15:30:00', 5, 0.004, 25.5, 0, 81, 86, 74, 0, '2025-03-28 15:30:00'),
(1, '2025-03-28 15:45:00', 1, 0.001, 39.24, 0, 60, 70, 53, 0, '2025-03-28 15:45:00'),
(1, '2025-03-28 16:00:00', 4, 0.003, 32.07, 0, 71, 79, 66, 0, '2025-03-28 16:00:00'),
(1, '2025-03-28 16:15:00', 3, 0.002, 31.99, 0, 92, 107, 88, 0, '2025-03-28 16:15:00'),
(1, '2025-03-28 16:30:00', 3, 0.002, 35.66, 0, 72, 82, 68, 0, '2025-03-28 16:30:00'),
(1, '2025-03-28 16:45:00', 3, 0.002, 23.48, 0, 78, 84, 71, 0, '2025-03-28 16:45:00'),
(1, '2025-03-28 17:00:00', 154, 0.123, 78.04, 0, 72, 78, 65, 1, '2025-03-28 17:00:00'),
(1, '2025-03-28 17:15:00', 131, 0.105, 70.22, 0, 64, 77, 60, 1, '2025-03-28 17:15:00'),
(1, '2025-03-28 17:30:00', 154, 0.123, 62.71, 0, 68, 79, 64, 1, '2025-03-28 17:30:00'),
(1, '2025-03-28 17:45:00', 154, 0.123, 61.66, 0, 85, 91, 78, 1, '2025-03-28 17:45:00'),
(1, '2025-03-28 18:00:00', 153, 0.122, 75.31, 0, 93, 103, 90, 1, '2025-03-28 18:00:00'),
(1, '2025-03-28 18:15:00', 171, 0.137, 71.23, 0, 66, 80, 60, 1, '2025-03-28 18:15:00'),
(1, '2025-03-28 18:30:00', 177, 0.142, 76.22, 0, 81, 88, 78, 1, '2025-03-28 18:30:00'),
(1, '2025-03-28 18:45:00', 172, 0.138, 64.16, 0, 91, 96, 87, 1, '2025-03-28 18:45:00'),
(1, '2025-03-28 19:00:00', 76, 0.061, 51.92, 0, 72, 86, 68, 1, '2025-03-28 19:00:00'),
(1, '2025-03-28 19:15:00', 73, 0.058, 44.52, 0, 75, 83, 72, 1, '2025-03-28 19:15:00'),
(1, '2025-03-28 19:30:00', 54, 0.043, 46.41, 0, 81, 89, 78, 1, '2025-03-28 19:30:00'),
(1, '2025-03-28 19:45:00', 53, 0.042, 35.44, 0, 64, 77, 58, 1, '2025-03-28 19:45:00'),
(1, '2025-03-28 20:00:00', 0, 0.0, 10.3, 0, 80, 88, 75, 0, '2025-03-28 20:00:00'),
(1, '2025-03-28 20:15:00', 2, 0.002, 29.23, 0, 79, 87, 75, 0, '2025-03-28 20:15:00'),
(1, '2025-03-28 20:30:00', 2, 0.002, 21.24, 0, 73, 83, 66, 0, '2025-03-28 20:30:00'),
(1, '2025-03-28 20:45:00', 3, 0.002, 36.25, 0, 88, 100, 85, 0, '2025-03-28 20:45:00'),
(1, '2025-03-28 21:00:00', 0, 0.0, 10.87, 0, 93, 98, 88, 0, '2025-03-28 21:00:00'),
(1, '2025-03-28 21:15:00', 5, 0.004, 28.96, 0, 83, 90, 76, 0, '2025-03-28 21:15:00'),
(1, '2025-03-28 21:30:00', 5, 0.004, 39.99, 0, 65, 72, 61, 0, '2025-03-28 21:30:00'),
(1, '2025-03-28 21:45:00', 0, 0.0, 13.86, 0, 76, 81, 72, 0, '2025-03-28 21:45:00'),
(1, '2025-03-28 22:00:00', 4, 0.003, 22.18, 0, 78, 90, 73, 0, '2025-03-28 22:00:00'),
(1, '2025-03-28 22:15:00', 1, 0.001, 21.4, 0, 93, 101, 88, 0, '2025-03-28 22:15:00'),
(1, '2025-03-28 22:30:00', 0, 0.0, 12.77, 0, 94, 101, 89, 0, '2025-03-28 22:30:00'),
(1, '2025-03-28 22:45:00', 2, 0.002, 29.55, 0, 77, 92, 73, 0, '2025-03-28 22:45:00'),
(1, '2025-03-28 23:00:00', 4, 0.003, 35.36, 0, 93, 104, 89, 0, '2025-03-28 23:00:00'),
(1, '2025-03-28 23:15:00', 5, 0.004, 24.69, 0, 89, 95, 86, 0, '2025-03-28 23:15:00'),
(1, '2025-03-28 23:30:00', 4, 0.003, 38.72, 0, 89, 100, 84, 0, '2025-03-28 23:30:00'),
(1, '2025-03-28 23:45:00', 1, 0.001, 24.68, 0, 63, 77, 56, 0, '2025-03-28 23:45:00');
INSERT INTO fitbit_sleep_data
(user_id, date, session_start, session_end, total_sleep_hours,
deep_sleep_hours, light_sleep_hours, rem_sleep_hours, awake_hours,
awakenings_count, time_in_bed_minutes, minutes_asleep,
avg_heart_rate, avg_breathing_rate, avg_spo2, overall_health_score,
is_main_sleep, created_at)
VALUES
(1, '2025-03-24', '2025-03-24 07:00:00', '2025-03-24 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 60, 15.5, 97.0, 70, 1, '2025-03-24 07:00:00'),
(1, '2025-03-24', '2025-03-24 10:00:00', '2025-03-24 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 61, 16.5, 98.0, 70, 0, '2025-03-24 10:00:00'),
(1, '2025-03-24', '2025-03-24 14:00:00', '2025-03-24 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 62, 17.5, 99.0, 70, 0, '2025-03-24 14:00:00'),
(1, '2025-03-24', '2025-03-24 19:00:00', '2025-03-24 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 63, 18.5, 97.0, 67, 0, '2025-03-24 19:00:00'),
(1, '2025-03-24', '2025-03-24 00:00:00', '2025-03-24 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 64, 15.5, 98.0, 67, 0, '2025-03-24 00:00:00'),
(1, '2025-03-25', '2025-03-25 07:00:00', '2025-03-25 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 65, 16.5, 99.0, 67, 1, '2025-03-25 07:00:00'),
(1, '2025-03-25', '2025-03-25 10:00:00', '2025-03-25 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 66, 17.5, 97.0, 64, 0, '2025-03-25 10:00:00'),
(1, '2025-03-25', '2025-03-25 14:00:00', '2025-03-25 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 67, 18.5, 98.0, 64, 0, '2025-03-25 14:00:00'),
(1, '2025-03-25', '2025-03-25 19:00:00', '2025-03-25 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 68, 15.5, 99.0, 64, 0, '2025-03-25 19:00:00'),
(1, '2025-03-25', '2025-03-25 00:00:00', '2025-03-25 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 69, 16.5, 97.0, 61, 0, '2025-03-25 00:00:00'),
(1, '2025-03-26', '2025-03-26 07:00:00', '2025-03-26 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 60, 17.5, 98.0, 71, 1, '2025-03-26 07:00:00'),
(1, '2025-03-26', '2025-03-26 10:00:00', '2025-03-26 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 61, 18.5, 99.0, 71, 0, '2025-03-26 10:00:00'),
(1, '2025-03-26', '2025-03-26 14:00:00', '2025-03-26 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 62, 15.5, 97.0, 68, 0, '2025-03-26 14:00:00'),
(1, '2025-03-26', '2025-03-26 19:00:00', '2025-03-26 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 63, 16.5, 98.0, 68, 0, '2025-03-26 19:00:00'),
(1, '2025-03-26', '2025-03-26 00:00:00', '2025-03-26 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 64, 17.5, 99.0, 68, 0, '2025-03-26 00:00:00'),
(1, '2025-03-27', '2025-03-27 07:00:00', '2025-03-27 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 65, 18.5, 97.0, 65, 1, '2025-03-27 07:00:00'),
(1, '2025-03-27', '2025-03-27 10:00:00', '2025-03-27 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 66, 15.5, 98.0, 65, 0, '2025-03-27 10:00:00'),
(1, '2025-03-27', '2025-03-27 14:00:00', '2025-03-27 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 67, 16.5, 99.0, 65, 0, '2025-03-27 14:00:00'),
(1, '2025-03-27', '2025-03-27 19:00:00', '2025-03-27 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 68, 17.5, 97.0, 62, 0, '2025-03-27 19:00:00'),
(1, '2025-03-27', '2025-03-27 00:00:00', '2025-03-27 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 69, 18.5, 98.0, 62, 0, '2025-03-27 00:00:00'),
(1, '2025-03-28', '2025-03-28 07:00:00', '2025-03-28 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 60, 15.5, 99.0, 72, 1, '2025-03-28 07:00:00'),
(1, '2025-03-28', '2025-03-28 10:00:00', '2025-03-28 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 61, 16.5, 97.0, 69, 0, '2025-03-28 10:00:00'),
(1, '2025-03-28', '2025-03-28 14:00:00', '2025-03-28 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 62, 17.5, 98.0, 69, 0, '2025-03-28 14:00:00'),
(1, '2025-03-28', '2025-03-28 19:00:00', '2025-03-28 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 63, 18.5, 99.0, 69, 0, '2025-03-28 19:00:00'),
(1, '2025-03-28', '2025-03-28 00:00:00', '2025-03-28 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 64, 15.5, 97.0, 66, 0, '2025-03-28 00:00:00'),
(1, '2025-03-29', '2025-03-29 07:00:00', '2025-03-29 07:53:00', 0.72, 0.11, 0.4, 0.14, 0.07, 1, 53, 43, 65, 16.5, 98.0, 66, 1, '2025-03-29 07:00:00'),
(1, '2025-03-29', '2025-03-29 10:00:00', '2025-03-29 11:19:00', 1.15, 0.17, 0.63, 0.23, 0.12, 0, 79, 69, 66, 17.5, 99.0, 66, 0, '2025-03-29 10:00:00'),
(1, '2025-03-29', '2025-03-29 14:00:00', '2025-03-29 15:08:00', 0.97, 0.15, 0.53, 0.19, 0.1, 0, 68, 58, 67, 18.5, 97.0, 63, 0, '2025-03-29 14:00:00'),
(1, '2025-03-29', '2025-03-29 19:00:00', '2025-03-29 20:38:00', 1.47, 0.22, 0.81, 0.29, 0.15, 0, 98, 88, 68, 15.5, 98.0, 63, 0, '2025-03-29 19:00:00'),
(1, '2025-03-29', '2025-03-29 00:00:00', '2025-03-29 01:23:00', 1.22, 0.18, 0.67, 0.24, 0.13, 0, 83, 73, 69, 16.5, 99.0, 63, 0, '2025-03-29 00:00:00');
사전 준비 (람다 함수 생성 및 레이어 추가)
람다 함수를 생성하고 zip 형태로 코드를 로드한다. 코드에서 사용하는 라이브러리는 Lambda Layer로 등록을 해준다. 라이브러리의 큰 용량으로 인해, 람다 함수에 함께 업로드를 할 수 없어서 Layer를 사용하는 것이다.
Layer의 에러 디버깅에서 상당한 시간이 소요되었다. 명확한 path를 지정해야 하는 이슈, 각종 라이브러리를 인식하지 못하는 이슈, import 문에는 쓰지 않지만, 종속적으로 필요한 라이브러리가 없다는 이슈 등 인내심이 필요한 부분이었다.
중요한 내용을 정리하자면,
- pymysql은 하나만 설치해도 문제가 없었으나, matplotlib의 경우 종속성이 있는 라이브러리를 함께 설치해야 했다.
(나중에 알게 된 것은 --no-deps 옵션을 제거하면, 자동으로 종속적인 라이브러리들이 설치된다고 한다.) - 라이브러리를 설치할 때, 람다 함수 운영체제와 동일한 linux 86_64로 설치해야 한다.
- 디렉토리 구조는 아래처럼 하면 된다. 경로 인식을 위해, 반드시 python 디렉토리 내부에 설치해야 한다.
- layer-library-graph
|_python
|_위의 라이브러리들
|_fonts/NanumGothic.ttf - 라이브러리 설치 커맨드
pip install pymysql matplotlib numpy packaging kiwisolver pyparsing cycler Pillow fonttools openai fpdf2 typing_extensions pydantic\
--platform manylinux2014_x86_64 \
--target ./ \
--implementation cp \
--python-version 3.11 \
--only-binary=:all: \
--no-deps
pip install openai fpdf2 \
--platform manylinux2014_x86_64 \
--target ./ \
--implementation cp \
--python-version 3.11 \
--only-binary=:all: \
- 아래 경로에 원하는 폰트 파일을 넣어준다. 한글 글꼴을 넣어줘야 그래프 시각화에서 한글 깨짐을 방지할 수 있다.
cp NanumGothic-Regular.ttf python/matplotlib/mpl-data/fonts/ttf/ - 라이브러리 압축 커맨드 실행한다.
zip -r layer-library-graph.zip python
zip -r layer-pydantic.zip python - Lambda layer에 zip 파일을 업로드하고, Lambda 함수에 Layer를 추가한다. Layer를 추가할 때 계층 버전을 지정할 수 있다. 아래 사진에 계층 버전 숫자가 큰 이유는 라이브러리 관련 에러를 해결하면서 여러 번 zip 파일을 등록했기 때문이다. 쓰지 않는 버전은 삭제도 가능하다.
- 람다 함수의 환경 변수를 지정한다.
[람다 함수 > 구성 > 환경 변수] 탭에 접속한다.
DB_HOST, DB_NAME, DB_PASSWORD, DB_USER, S3_BUCKET_GRAPH, S3_BUCKET_PDF, OPENAI_API_KEY를 생성하고 값을 입력한다. - 제한 시간은 1분 30초, 메모리는 512MB로 증가시킨다.
[람다 함수 > 구성 > 일반 구성] 탭에 접속하여 설정을 편집한다.
pymysql 데이터 불러오기
pymysql 라이브러리를 통해 DB에 있는 데이터를 fetch 한다.
def fetch_fitbit_data(query):
conn = pymysql.connect(
host=DB_HOST,
user=DB_USER,
password=DB_PASSWORD,
database=DB_NAME,
cursorclass=pymysql.cursors.DictCursor
)
try:
with conn.cursor() as cursor:
cursor.execute(query)
data = cursor.fetchall()
return data
finally:
conn.close()
sleep_data = fetch_fitbit_data("SELECT * FROM fitbit_sleep_data ORDER BY created_at DESC")
activity_data = fetch_fitbit_data("SELECT * FROM fitbit_activity_data ORDER BY created_at DESC")
matplotlib 그래프 시각화 및 S3 저장
S3에 fitbit-graph-s3-bucket을 생성하고, 내부에 graphs 폴더를 만든다.
[람다 함수 - 구성 - 권한 - 실행 역할 파란 링크 클릭 - IAM 콘솔로 이동 - 권한 추가 - 인라인 정책 생성 - JSON 업로드] 를 진행하여, S3에 대한 접근 권한을 부여한다.
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:PutObject",
"s3:PutObjectAcl"
],
"Resource": "arn:aws:s3:::fitbit-graph-s3-bucket/*"
}
]
}
1. 수면 단계별 그래프 / 2. 심박수 그래프 / 3. 걸음 수 및 칼로리 소모 그래프 각각을 생성하는 함수를 만들었다. 전체 코드는 포스팅의 아래쪽에 첨부하도록 하고 일단 아래에는 1번 그래프 생성 코드와 호출 코드이다.
def generate_sleep_step_graph_week(data):
data = sorted(data, key=lambda x: x['created_at'])
timestamps = [datetime.strptime(d['created_at'], "%Y-%m-%d %H:%M:%S") if isinstance(d['created_at'], str) else d['created_at'] for d in data]
deep_list = [d['deep_sleep_hours'] for d in data]
light_list = [d['light_sleep_hours'] for d in data]
rem_list = [d['rem_sleep_hours'] for d in data]
awake_list = [d['awake_hours'] for d in data]
pastel_colors = {'deep': '#A0C4FF', 'light': '#BDB2FF', 'rem': '#FFC6FF', 'awake': '#FFD6A5'}
bar_width = timedelta(minutes=100)
plt.figure(figsize=(14, 7))
plt.bar(timestamps, deep_list, label='깊은 수면', color=pastel_colors['deep'], width=bar_width)
plt.bar(timestamps, light_list, bottom=deep_list, label='얕은 수면', color=pastel_colors['light'], width=bar_width)
plt.bar(timestamps, rem_list, bottom=[d + l for d, l in zip(deep_list, light_list)], label='렘 수면', color=pastel_colors['rem'], width=bar_width)
plt.bar(timestamps, awake_list, bottom=[d + l + r for d, l, r in zip(deep_list, light_list, rem_list)], label='깨어있는 시간', color=pastel_colors['awake'], width=bar_width)
plt.xlabel('시각', fontsize=12)
plt.ylabel('수면 시간\n(시간)', fontsize=12, rotation=0, labelpad=50)
plt.title('일주일간 수면 단계 분포', fontweight='bold', fontsize=15)
ax = plt.gca()
ax.xaxis.set_major_locator(mdates.AutoDateLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d\n%H:%M'))
plt.legend()
plt.tight_layout()
img_buffer = BytesIO()
plt.savefig(img_buffer, format='png')
img_buffer.seek(0)
plt.close()
return img_buffer
sleep_graph = generate_sleep_step_graph_week(sleep_data)
hr_graph = generate_heart_rate(activity_data)
step_graph = generate_step_and_calories_graph(activity_data)
생성한 그래프를 S3에 저장한다.
def upload_to_s3(img_buffer, filename, bucket):
s3 = boto3.client('s3')
s3.put_object(Bucket=bucket, Key=filename, Body=img_buffer, ContentType='image/png')
return f"https://{bucket}.s3.amazonaws.com/{filename}"
sleep_graph_url = upload_to_s3(sleep_graph, f"graphs/{today}_sleep_step.png", S3_BUCKET_GRAPH)
hr_graph_url = upload_to_s3(hr_graph, f"graphs/{today}_activity_heart_rate.png", S3_BUCKET_GRAPH)
step_graph_url = upload_to_s3(step_graph, f"graphs/{today}_activity_step_calories.png", S3_BUCKET_GRAPH)
GPT API 데이터 분석
GPT API를 호출하여 데이터에 대한 간단한 분석 텍스트를 저장한다. 분석 프롬프트는 앞으로 수정해야 한다.
wrap_text 함수는 줄바꿈이 없어 PDF 제한 너비를 초과한다는 에러 때문이었다. 수동으로 줄바꿈을 지정하여 에러를 해결했다.
def generate_gpt_analysis(prompt):
response = openai_client.chat.completions.create(
model="gpt-4",
messages=[
{
"role": "system",
"content": (
"당신은 친절한 건강 데이터 분석가입니다. "
"사용자의 활동 및 수면 데이터를 분석하여 통찰을 제공합니다. "
"분석 결과는 자연스러운 문장으로 구성해주세요."
)
},
{"role": "user", "content": prompt}
],
temperature=0.7
)
return response.choices[0].message.content
def format_activity_data(data):
return "[활동 데이터]\n" + "\n".join([f"{d['created_at']}, 심박수: {d['heart_rate']}, 걸음: {d['steps']}, 칼로리: {d['calories_total']}" for d in data])
def format_sleep_data(data):
return "[수면 데이터]\n" + "\n".join([f"{d['created_at']}, 깊은 수면: {d['deep_sleep_hours']}, 얕은 수면: {d['light_sleep_hours']}, 렘 수면: {d['rem_sleep_hours']}, 깨어있음: {d['awake_hours']}" for d in data])
def wrap_text(text, max_length=55):
import textwrap
wrapped = []
for paragraph in text.splitlines():
paragraph = paragraph.strip()
if paragraph:
wrapped += textwrap.wrap(
paragraph,
width=max_length,
break_long_words=True,
replace_whitespace=False
)
else:
wrapped.append('')
return '\n'.join(wrapped)
activity_prompt = format_activity_data(activity_data)
sleep_prompt = format_sleep_data(sleep_data)
activity_analysis = wrap_text(remove_unsupported_chars(generate_gpt_analysis(activity_prompt)))
sleep_analysis = wrap_text(remove_unsupported_chars(generate_gpt_analysis(sleep_prompt)))
fpdf2 건강 리포트 PDF 생성 및 S3 저장
S3에 fitbit-report-s3-bucket을 생성하고, 내부에 healthreport 폴더를 만든다. fonts 폴더도 만들고, 내부에 한글 폰트 파일(.ttf)을 업로드한다.
[람다 함수 - 구성 - 권한 - 실행 역할 파란 링크 클릭 - IAM 콘솔로 이동 - 권한 추가 - 인라인 정책 생성 - JSON 업로드] 를 진행하여, S3에 대한 접근 권한을 부여한다. 폰트 파일을 불러와야 해서 위쪽의 graph s3 권한과 달리, s3:GetObject도 추가되었다.
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:PutObject",
"s3:PutObjectAcl",
"s3:GetObject"
],
"Resource": "arn:aws:s3:::fitbit-report-s3-bucket/*"
}
]
}
그래프 이미지와 GPT 분석 텍스트를 넣어 PDF를 생성하고, S3에 저장한다.
def generate_pdf_report(activity_analysis, sleep_analysis, img1, img2, img3, username):
pdf = FPDF()
pdf.add_page()
pdf.add_font("Nanum", "", "/tmp/NanumGothic-Regular.ttf", uni=True)
pdf.set_font("Nanum", size=12)
today_str = datetime.now().strftime('%Y-%m-%d')
pdf.set_font("Nanum", size=20)
pdf.set_text_color(33, 33, 33)
pdf.cell(0, 15, f"{username}님의 건강 리포트 - {today_str}", ln=True, align='C')
pdf.ln(5)
pdf.set_font("Nanum", size=14)
pdf.set_text_color(0, 102, 204)
pdf.cell(0, 10, "활동 분석", ln=True)
pdf.set_draw_color(0, 102, 204)
pdf.set_line_width(0.5)
pdf.line(10, pdf.get_y(), 200, pdf.get_y())
pdf.ln(3)
pdf.set_font("Nanum", size=12)
pdf.set_text_color(0, 0, 0)
pdf.multi_cell(0, 8, wrap_text(activity_analysis, 80))
pdf.ln(4)
pdf.image(img1, x=15, w=180)
pdf.ln(5)
pdf.image(img2, x=15, w=180)
pdf.ln(10)
pdf.set_font("Nanum", size=14)
pdf.set_text_color(102, 0, 153)
pdf.cell(0, 10, "수면 분석", ln=True)
pdf.set_draw_color(102, 0, 153)
pdf.set_line_width(0.5)
pdf.line(10, pdf.get_y(), 200, pdf.get_y())
pdf.ln(3)
pdf.set_font("Nanum", size=12)
pdf.set_text_color(0, 0, 0)
pdf.multi_cell(0, 8, wrap_text(sleep_analysis, 80))
pdf.ln(4)
pdf.image(img3, x=15, w=180)
output_buffer = BytesIO()
pdf.output(output_buffer)
output_buffer.seek(0)
return output_buffer
def download_font_from_s3(bucket, key, local_path="/tmp/NanumGothic-Regular.ttf"):
boto3.client("s3").download_file(bucket, key, local_path)
def upload_pdf_to_s3(pdf_buffer, username, today):
today = datetime.now().strftime('%Y-%m-%d')
file_name = f"healthreport/건강리포트_{today}_{username}.pdf"
s3 = boto3.client('s3')
s3.put_object(Bucket=S3_BUCKET_PDF, Key=file_name, Body=pdf_buffer, ContentType='application/pdf')
return f"https://{S3_BUCKET_PDF}.s3.amazonaws.com/{file_name}"
download_font_from_s3(S3_BUCKET_PDF, "fonts/NanumGothic-Regular.ttf")
pdf_buffer = generate_pdf_report(activity_analysis, sleep_analysis, hr_graph, step_graph, sleep_graph, username)
pdf_url = upload_pdf_to_s3(pdf_buffer, username, today)
실행 결과: 그래프 및 PDF S3 저장
S3 버킷에 정상적으로 파일들이 저장된 것을 볼 수 있다.
To do
- EventBridge를 통해 람다 함수를 트리거 하는 기능 추가
- S3에 저장된 이미지 URL, 리포트 URL을 DB에 저장하는 로직 추가
- 플러터 앱에서 S3 객체 URL을 통해 PDF 뷰어와 다운로드를 제공하는 로직 추가
- GPT 분석 프롬프트 수정
[첨부] 람다 함수 전체 코드
import os
os.environ['MPLCONFIGDIR'] = '/tmp'
import pymysql
import matplotlib.pyplot as plt
import boto3
from io import BytesIO
import matplotlib
import matplotlib.dates as mdates
from datetime import datetime, timedelta
from fpdf import FPDF
from openai import OpenAI
openai_client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
DB_HOST = os.environ["DB_HOST"]
DB_USER = os.environ["DB_USER"]
DB_PASSWORD = os.environ["DB_PASSWORD"]
DB_NAME = os.environ["DB_NAME"]
S3_BUCKET_GRAPH = os.environ["S3_BUCKET_GRAPH"]
S3_BUCKET_PDF = os.environ["S3_BUCKET_PDF"]
def wrap_text(text, max_length=55):
import textwrap
wrapped = []
for paragraph in text.splitlines():
paragraph = paragraph.strip()
if paragraph:
wrapped += textwrap.wrap(
paragraph,
width=max_length,
break_long_words=True,
replace_whitespace=False
)
else:
wrapped.append('')
return '\n'.join(wrapped)
def remove_unsupported_chars(text):
return ''.join(c for c in text if 32 <= ord(c) <= 126 or ord(c) >= 0xAC00)
def fetch_fitbit_data(query):
conn = pymysql.connect(
host=DB_HOST,
user=DB_USER,
password=DB_PASSWORD,
database=DB_NAME,
cursorclass=pymysql.cursors.DictCursor
)
try:
with conn.cursor() as cursor:
cursor.execute(query)
data = cursor.fetchall()
return data
finally:
conn.close()
def upload_to_s3(img_buffer, filename, bucket):
s3 = boto3.client('s3')
s3.put_object(Bucket=bucket, Key=filename, Body=img_buffer, ContentType='image/png')
return f"https://{bucket}.s3.amazonaws.com/{filename}"
def upload_pdf_to_s3(pdf_buffer, username, today):
today = datetime.now().strftime('%Y-%m-%d')
file_name = f"healthreport/건강리포트_{today}_{username}.pdf"
s3 = boto3.client('s3')
s3.put_object(Bucket=S3_BUCKET_PDF, Key=file_name, Body=pdf_buffer, ContentType='application/pdf')
return f"https://{S3_BUCKET_PDF}.s3.amazonaws.com/{file_name}"
def generate_sleep_step_graph_week(data):
data = sorted(data, key=lambda x: x['created_at'])
timestamps = [datetime.strptime(d['created_at'], "%Y-%m-%d %H:%M:%S") if isinstance(d['created_at'], str) else d['created_at'] for d in data]
deep_list = [d['deep_sleep_hours'] for d in data]
light_list = [d['light_sleep_hours'] for d in data]
rem_list = [d['rem_sleep_hours'] for d in data]
awake_list = [d['awake_hours'] for d in data]
pastel_colors = {'deep': '#A0C4FF', 'light': '#BDB2FF', 'rem': '#FFC6FF', 'awake': '#FFD6A5'}
bar_width = timedelta(minutes=100)
plt.figure(figsize=(14, 7))
plt.bar(timestamps, deep_list, label='깊은 수면', color=pastel_colors['deep'], width=bar_width)
plt.bar(timestamps, light_list, bottom=deep_list, label='얕은 수면', color=pastel_colors['light'], width=bar_width)
plt.bar(timestamps, rem_list, bottom=[d + l for d, l in zip(deep_list, light_list)], label='렘 수면', color=pastel_colors['rem'], width=bar_width)
plt.bar(timestamps, awake_list, bottom=[d + l + r for d, l, r in zip(deep_list, light_list, rem_list)], label='깨어있는 시간', color=pastel_colors['awake'], width=bar_width)
plt.xlabel('시각', fontsize=12)
plt.ylabel('수면 시간\n(시간)', fontsize=12, rotation=0, labelpad=50)
plt.title('일주일간 수면 단계 분포', fontweight='bold', fontsize=15)
ax = plt.gca()
ax.xaxis.set_major_locator(mdates.AutoDateLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d\n%H:%M'))
plt.legend()
plt.tight_layout()
img_buffer = BytesIO()
plt.savefig(img_buffer, format='png')
img_buffer.seek(0)
plt.close()
return img_buffer
def generate_heart_rate(data):
created_at = [d['created_at'] for d in data]
heart_rate = [d['heart_rate'] for d in data]
plt.figure(figsize=(10, 5))
plt.plot(created_at, heart_rate, marker='o', label='심박수', color='#0ABAB5')
plt.xlabel('시간')
plt.ylabel('평균 심박수\n(bpm)', rotation=0, labelpad=40)
plt.title('일일 평균 심박수 변화')
plt.legend()
ax = plt.gca()
ax.xaxis.set_major_locator(mdates.AutoDateLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
plt.tight_layout()
img_buffer = BytesIO()
plt.savefig(img_buffer, format='png')
img_buffer.seek(0)
plt.close()
return img_buffer
def generate_step_and_calories_graph(data):
created_at = [d['created_at'] for d in data]
steps = [d['steps'] for d in data]
calories = [d['calories_total'] for d in data]
fig, ax1 = plt.subplots(figsize=(10, 5))
ax1.plot(created_at, steps, marker='o', linestyle='dashed', color='#FFB3A7', label='걸음 수')
ax1.set_ylabel('걸음 수', rotation=0, labelpad=40)
ax2 = ax1.twinx()
ax2.plot(created_at, calories, marker='s', color='#A5C8FF', label='칼로리 소모')
ax2.set_ylabel('칼로리\n소모', rotation=0, labelpad=40)
ax1.set_xlabel('시간')
ax1.xaxis.set_major_locator(mdates.AutoDateLocator())
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M'))
plt.tight_layout()
img_buffer = BytesIO()
plt.savefig(img_buffer, format='png')
img_buffer.seek(0)
plt.close()
return img_buffer
def generate_gpt_analysis(prompt):
response = openai_client.chat.completions.create(
model="gpt-4",
messages=[
{
"role": "system",
"content": (
"당신은 친절한 건강 데이터 분석가입니다. "
"사용자의 활동 및 수면 데이터를 분석하여 통찰을 제공합니다. "
"분석 결과는 자연스러운 문장으로 구성해주세요."
)
},
{"role": "user", "content": prompt}
],
temperature=0.7
)
return response.choices[0].message.content
def format_activity_data(data):
return "[활동 데이터]\n" + "\n".join([f"{d['created_at']}, 심박수: {d['heart_rate']}, 걸음: {d['steps']}, 칼로리: {d['calories_total']}" for d in data])
def format_sleep_data(data):
return "[수면 데이터]\n" + "\n".join([f"{d['created_at']}, 깊은 수면: {d['deep_sleep_hours']}, 얕은 수면: {d['light_sleep_hours']}, 렘 수면: {d['rem_sleep_hours']}, 깨어있음: {d['awake_hours']}" for d in data])
def generate_pdf_report(activity_analysis, sleep_analysis, img1, img2, img3, username):
pdf = FPDF()
pdf.add_page()
pdf.add_font("Nanum", "", "/tmp/NanumGothic-Regular.ttf", uni=True)
pdf.set_font("Nanum", size=12)
today_str = datetime.now().strftime('%Y-%m-%d')
pdf.set_font("Nanum", size=20)
pdf.set_text_color(33, 33, 33)
pdf.cell(0, 15, f"{username}님의 건강 리포트 - {today_str}", ln=True, align='C')
pdf.ln(5)
pdf.set_font("Nanum", size=14)
pdf.set_text_color(0, 102, 204)
pdf.cell(0, 10, "활동 분석", ln=True)
pdf.set_draw_color(0, 102, 204)
pdf.set_line_width(0.5)
pdf.line(10, pdf.get_y(), 200, pdf.get_y())
pdf.ln(3)
pdf.set_font("Nanum", size=12)
pdf.set_text_color(0, 0, 0)
pdf.multi_cell(0, 8, wrap_text(activity_analysis, 80))
pdf.ln(4)
pdf.image(img1, x=15, w=180)
pdf.ln(5)
pdf.image(img2, x=15, w=180)
pdf.ln(10)
pdf.set_font("Nanum", size=14)
pdf.set_text_color(102, 0, 153)
pdf.cell(0, 10, "수면 분석", ln=True)
pdf.set_draw_color(102, 0, 153)
pdf.set_line_width(0.5)
pdf.line(10, pdf.get_y(), 200, pdf.get_y())
pdf.ln(3)
pdf.set_font("Nanum", size=12)
pdf.set_text_color(0, 0, 0)
pdf.multi_cell(0, 8, wrap_text(sleep_analysis, 80))
pdf.ln(4)
pdf.image(img3, x=15, w=180)
output_buffer = BytesIO()
pdf.output(output_buffer)
output_buffer.seek(0)
return output_buffer
def download_font_from_s3(bucket, key, local_path="/tmp/NanumGothic-Regular.ttf"):
boto3.client("s3").download_file(bucket, key, local_path)
def lambda_handler(event, context):
matplotlib.rcParams['font.family'] = 'NanumGothic'
matplotlib.rcParams['axes.unicode_minus'] = False
download_font_from_s3(S3_BUCKET_PDF, "fonts/NanumGothic-Regular.ttf")
username = "최예름"
today = datetime.now().strftime('%Y-%m-%d')
sleep_data = fetch_fitbit_data("SELECT * FROM fitbit_sleep_data ORDER BY created_at DESC")
activity_data = fetch_fitbit_data("SELECT * FROM fitbit_activity_data ORDER BY created_at DESC")
sleep_graph = generate_sleep_step_graph_week(sleep_data)
hr_graph = generate_heart_rate(activity_data)
step_graph = generate_step_and_calories_graph(activity_data)
sleep_graph_url = upload_to_s3(sleep_graph, f"graphs/{today}_sleep_step.png", S3_BUCKET_GRAPH)
hr_graph_url = upload_to_s3(hr_graph, f"graphs/{today}_activity_heart_rate.png", S3_BUCKET_GRAPH)
step_graph_url = upload_to_s3(step_graph, f"graphs/{today}_activity_step_calories.png", S3_BUCKET_GRAPH)
activity_prompt = format_activity_data(activity_data)
sleep_prompt = format_sleep_data(sleep_data)
activity_analysis = wrap_text(remove_unsupported_chars(generate_gpt_analysis(activity_prompt)))
sleep_analysis = wrap_text(remove_unsupported_chars(generate_gpt_analysis(sleep_prompt)))
pdf_buffer = generate_pdf_report(activity_analysis, sleep_analysis, hr_graph, step_graph, sleep_graph, username)
pdf_url = upload_pdf_to_s3(pdf_buffer, username, today)
return {
"statusCode": 200,
"body": {
"message": f"PDF 업로드 완료: {pdf_url}",
"pdf_url": pdf_url,
"image_urls": {
"heart_rate_graph": hr_graph_url,
"step_calories_graph": step_graph_url,
"sleep_step_graph": sleep_graph_url
}
}
}