jj
Browse files- .gitignore +1 -1
- Linkedin_poster_dev +1 -1
- backend/services/content_service.py +36 -37
- docu_code/My_data_base_schema_.txt +20 -10
- .qwen/bmad-method/QWEN.md → mcp.md +0 -0
- sprint_change_proposal.md +87 -0
.gitignore
CHANGED
|
@@ -177,4 +177,4 @@ docker-compose.override.yml
|
|
| 177 |
.kilocode/
|
| 178 |
docs/
|
| 179 |
backend/tests/
|
| 180 |
-
.qwen/
|
|
|
|
| 177 |
.kilocode/
|
| 178 |
docs/
|
| 179 |
backend/tests/
|
| 180 |
+
.qwen/
|
Linkedin_poster_dev
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
Subproject commit
|
|
|
|
| 1 |
+
Subproject commit c2eff437aa77ba366d92bbe61164823d4736c7e8
|
backend/services/content_service.py
CHANGED
|
@@ -499,44 +499,43 @@ class ContentService:
|
|
| 499 |
user_rss_sources = rss_response.data if rss_response.data else []
|
| 500 |
|
| 501 |
# Analyze each RSS source
|
| 502 |
-
|
| 503 |
-
rss_link = rss_source["source"]
|
| 504 |
|
| 505 |
-
|
| 506 |
-
|
| 507 |
-
|
| 508 |
-
|
| 509 |
-
|
| 510 |
-
|
| 511 |
-
|
| 512 |
-
|
| 513 |
-
|
| 514 |
-
|
| 515 |
-
|
| 516 |
-
|
| 517 |
-
|
| 518 |
-
|
| 519 |
-
|
| 520 |
-
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
| 526 |
-
|
| 527 |
-
|
| 528 |
-
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
|
| 532 |
-
|
| 533 |
-
|
| 534 |
-
|
| 535 |
-
|
| 536 |
-
|
| 537 |
-
|
| 538 |
-
|
| 539 |
-
|
| 540 |
|
| 541 |
# Create a DataFrame from the articles
|
| 542 |
df_articles = pd.DataFrame(all_articles)
|
|
|
|
| 499 |
user_rss_sources = rss_response.data if rss_response.data else []
|
| 500 |
|
| 501 |
# Analyze each RSS source
|
| 502 |
+
|
|
|
|
| 503 |
|
| 504 |
+
# Check if the source matches the keyword or if it's any source
|
| 505 |
+
# We'll analyze any source that contains the keyword or is related to it
|
| 506 |
+
|
| 507 |
+
# Check if the source is a keyword rather than an RSS URL
|
| 508 |
+
# If it's a keyword, generate a Google News RSS URL
|
| 509 |
+
if self._is_url(keyword):
|
| 510 |
+
# It's a URL, use it directly
|
| 511 |
+
feed_url = keyword
|
| 512 |
+
else:
|
| 513 |
+
# It's a keyword, generate Google News RSS URL
|
| 514 |
+
feed_url = self._generate_google_news_rss_from_string(keyword)
|
| 515 |
+
|
| 516 |
+
# Parse the RSS feed
|
| 517 |
+
feed = feedparser.parse(feed_url)
|
| 518 |
+
|
| 519 |
+
# Log some debug information
|
| 520 |
+
current_app.logger.info(f"Processing RSS feed: {feed_url}")
|
| 521 |
+
current_app.logger.info(f"Number of entries in feed: {len(feed.entries)}")
|
| 522 |
+
|
| 523 |
+
# Extract ALL articles from the feed (without filtering by keyword again)
|
| 524 |
+
for entry in feed.entries:
|
| 525 |
+
# Use the same date handling as in the original ai_agent.py
|
| 526 |
+
article_data = {
|
| 527 |
+
'title': entry.title,
|
| 528 |
+
'link': entry.link,
|
| 529 |
+
'summary': entry.summary,
|
| 530 |
+
'date': entry.get('published', entry.get('updated', None)),
|
| 531 |
+
'content': entry.get('summary', '') + ' ' + entry.get('title', '')
|
| 532 |
+
}
|
| 533 |
+
|
| 534 |
+
# Log individual article data for debugging
|
| 535 |
+
current_app.logger.info(f"Article title: {entry.title}")
|
| 536 |
+
current_app.logger.info(f"Article date: {article_data['date']}")
|
| 537 |
+
|
| 538 |
+
all_articles.append(article_data)
|
| 539 |
|
| 540 |
# Create a DataFrame from the articles
|
| 541 |
df_articles = pd.DataFrame(all_articles)
|
docu_code/My_data_base_schema_.txt
CHANGED
|
@@ -2,49 +2,59 @@
|
|
| 2 |
-- Table order and constraints may not be valid for execution.
|
| 3 |
|
| 4 |
CREATE TABLE public.Post_content (
|
|
|
|
| 5 |
id_social bigint,
|
| 6 |
Text_content text,
|
|
|
|
| 7 |
Video_content text,
|
| 8 |
post_time time without time zone,
|
| 9 |
-
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
| 10 |
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 11 |
-
image_content_url bytea,
|
| 12 |
-
sched bigint,
|
| 13 |
is_published boolean DEFAULT false,
|
|
|
|
| 14 |
CONSTRAINT Post_content_pkey PRIMARY KEY (id),
|
| 15 |
-
CONSTRAINT
|
| 16 |
-
CONSTRAINT
|
| 17 |
);
|
| 18 |
CREATE TABLE public.Scheduling (
|
| 19 |
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
| 20 |
id_social bigint,
|
| 21 |
-
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 22 |
schedule_time character varying,
|
|
|
|
| 23 |
adjusted_time character varying NOT NULL,
|
| 24 |
CONSTRAINT Scheduling_pkey PRIMARY KEY (id),
|
| 25 |
CONSTRAINT Scheduling_id_social_fkey FOREIGN KEY (id_social) REFERENCES public.Social_network(id)
|
| 26 |
);
|
| 27 |
CREATE TABLE public.Social_network (
|
| 28 |
-
|
| 29 |
-
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 30 |
social_network character varying NOT NULL,
|
| 31 |
token character varying NOT NULL UNIQUE,
|
|
|
|
|
|
|
| 32 |
sub character varying NOT NULL UNIQUE,
|
| 33 |
given_name character varying NOT NULL,
|
| 34 |
picture character varying NOT NULL,
|
| 35 |
family_name character varying NOT NULL,
|
| 36 |
-
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
| 37 |
account_name text NOT NULL UNIQUE,
|
|
|
|
| 38 |
CONSTRAINT Social_network_pkey PRIMARY KEY (id),
|
| 39 |
CONSTRAINT Social_network_id_utilisateur_fkey FOREIGN KEY (id_utilisateur) REFERENCES auth.users(id)
|
| 40 |
);
|
| 41 |
CREATE TABLE public.Source (
|
|
|
|
| 42 |
source text NOT NULL,
|
| 43 |
categorie text,
|
| 44 |
last_update timestamp without time zone DEFAULT now(),
|
| 45 |
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 46 |
user_id uuid NOT NULL,
|
| 47 |
-
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL UNIQUE,
|
| 48 |
CONSTRAINT Source_pkey PRIMARY KEY (id, user_id),
|
| 49 |
CONSTRAINT Source_user_id_fkey FOREIGN KEY (user_id) REFERENCES auth.users(id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
);
|
|
|
|
| 2 |
-- Table order and constraints may not be valid for execution.
|
| 3 |
|
| 4 |
CREATE TABLE public.Post_content (
|
| 5 |
+
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
| 6 |
id_social bigint,
|
| 7 |
Text_content text,
|
| 8 |
+
image_content_url bytea,
|
| 9 |
Video_content text,
|
| 10 |
post_time time without time zone,
|
|
|
|
| 11 |
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
|
|
|
|
|
|
| 12 |
is_published boolean DEFAULT false,
|
| 13 |
+
sched bigint,
|
| 14 |
CONSTRAINT Post_content_pkey PRIMARY KEY (id),
|
| 15 |
+
CONSTRAINT Post_content_sched_fkey FOREIGN KEY (sched) REFERENCES public.Scheduling(id),
|
| 16 |
+
CONSTRAINT Post_content_id_social_fkey FOREIGN KEY (id_social) REFERENCES public.Social_network(id)
|
| 17 |
);
|
| 18 |
CREATE TABLE public.Scheduling (
|
| 19 |
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
| 20 |
id_social bigint,
|
|
|
|
| 21 |
schedule_time character varying,
|
| 22 |
+
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 23 |
adjusted_time character varying NOT NULL,
|
| 24 |
CONSTRAINT Scheduling_pkey PRIMARY KEY (id),
|
| 25 |
CONSTRAINT Scheduling_id_social_fkey FOREIGN KEY (id_social) REFERENCES public.Social_network(id)
|
| 26 |
);
|
| 27 |
CREATE TABLE public.Social_network (
|
| 28 |
+
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL,
|
|
|
|
| 29 |
social_network character varying NOT NULL,
|
| 30 |
token character varying NOT NULL UNIQUE,
|
| 31 |
+
id_utilisateur uuid DEFAULT gen_random_uuid(),
|
| 32 |
+
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 33 |
sub character varying NOT NULL UNIQUE,
|
| 34 |
given_name character varying NOT NULL,
|
| 35 |
picture character varying NOT NULL,
|
| 36 |
family_name character varying NOT NULL,
|
|
|
|
| 37 |
account_name text NOT NULL UNIQUE,
|
| 38 |
+
expiration date,
|
| 39 |
CONSTRAINT Social_network_pkey PRIMARY KEY (id),
|
| 40 |
CONSTRAINT Social_network_id_utilisateur_fkey FOREIGN KEY (id_utilisateur) REFERENCES auth.users(id)
|
| 41 |
);
|
| 42 |
CREATE TABLE public.Source (
|
| 43 |
+
id bigint GENERATED ALWAYS AS IDENTITY NOT NULL UNIQUE,
|
| 44 |
source text NOT NULL,
|
| 45 |
categorie text,
|
| 46 |
last_update timestamp without time zone DEFAULT now(),
|
| 47 |
created_at timestamp with time zone NOT NULL DEFAULT now(),
|
| 48 |
user_id uuid NOT NULL,
|
|
|
|
| 49 |
CONSTRAINT Source_pkey PRIMARY KEY (id, user_id),
|
| 50 |
CONSTRAINT Source_user_id_fkey FOREIGN KEY (user_id) REFERENCES auth.users(id)
|
| 51 |
+
);
|
| 52 |
+
CREATE TABLE public.profiles (
|
| 53 |
+
id uuid NOT NULL,
|
| 54 |
+
email text UNIQUE,
|
| 55 |
+
raw_user_meta jsonb,
|
| 56 |
+
created_at timestamp with time zone DEFAULT now(),
|
| 57 |
+
updated_at timestamp with time zone,
|
| 58 |
+
CONSTRAINT profiles_pkey PRIMARY KEY (id),
|
| 59 |
+
CONSTRAINT profiles_id_fkey FOREIGN KEY (id) REFERENCES auth.users(id)
|
| 60 |
);
|
.qwen/bmad-method/QWEN.md → mcp.md
RENAMED
|
File without changes
|
sprint_change_proposal.md
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Sprint Change Proposal: LinkedIn Token Expiration Management System
|
| 2 |
+
|
| 3 |
+
## Analysis Summary
|
| 4 |
+
|
| 5 |
+
**Original Issue**: LinkedIn tokens expire after 2 months, requiring manual reconnection process that creates a poor user experience.
|
| 6 |
+
|
| 7 |
+
**Impact Analysis**:
|
| 8 |
+
- Currently, the `Social_network` table already has an `expiration` column that can be leveraged
|
| 9 |
+
- No schema changes needed, only implementation of business logic
|
| 10 |
+
- Requires background task scheduling and email notification system
|
| 11 |
+
- Affects user experience for LinkedIn integration users
|
| 12 |
+
|
| 13 |
+
**Rationale for Chosen Path**: Implementing automated token refresh with 50-day cycle and daily background checks minimizes user disruption while maintaining security best practices.
|
| 14 |
+
|
| 15 |
+
## Specific Proposed Edits
|
| 16 |
+
|
| 17 |
+
### 1. Update Account Linking Process
|
| 18 |
+
|
| 19 |
+
**Current**: LinkedIn tokens are stored without expiration tracking
|
| 20 |
+
|
| 21 |
+
**Proposed Changes**:
|
| 22 |
+
- Modify the LinkedIn account linking process to automatically set the `expiration` column to 50 days after link date
|
| 23 |
+
- Add validation to ensure the `expiration` column is properly set during the connection process
|
| 24 |
+
|
| 25 |
+
### 2. Create Background Task Scheduler
|
| 26 |
+
|
| 27 |
+
**Current**: No automated expiration checks exist
|
| 28 |
+
|
| 29 |
+
**Proposed Changes**:
|
| 30 |
+
- Implement a background task that runs at noon and midnight daily
|
| 31 |
+
- Create a function to check for accounts with expiration date equal to current date
|
| 32 |
+
- Add token refresh logic for LinkedIn tokens
|
| 33 |
+
- Implement error handling and logging
|
| 34 |
+
|
| 35 |
+
### 3. Implement Token Refresh Process
|
| 36 |
+
|
| 37 |
+
**Current**: No automated refresh mechanism exists
|
| 38 |
+
|
| 39 |
+
**Proposed Changes**:
|
| 40 |
+
- Create function to perform LinkedIn token refresh using LinkedIn's API
|
| 41 |
+
- Handle successful refresh (update tokens and expiration)
|
| 42 |
+
- Handle failed refresh (send user notification)
|
| 43 |
+
|
| 44 |
+
### 4. Implement Email Notification System
|
| 45 |
+
|
| 46 |
+
**Current**: No automated notification system exists for failed refreshes
|
| 47 |
+
|
| 48 |
+
**Proposed Changes**:
|
| 49 |
+
- Create function to send email notification when refresh fails
|
| 50 |
+
- Include clear instructions for users to reconnect their LinkedIn account
|
| 51 |
+
|
| 52 |
+
### 5. Add Refresh Token Storage
|
| 53 |
+
|
| 54 |
+
**Current**: Only access token is stored in the `token` column
|
| 55 |
+
|
| 56 |
+
**Proposed Changes**:
|
| 57 |
+
- Modify the initial LinkedIn connection flow to also store the refresh token
|
| 58 |
+
- Update schema to add refresh_token column or modify existing storage approach
|
| 59 |
+
|
| 60 |
+
## Additional Implementation Considerations
|
| 61 |
+
|
| 62 |
+
### 6. Logging and Monitoring
|
| 63 |
+
- Add comprehensive logging for the refresh process
|
| 64 |
+
- Create monitoring for failed refresh attempts
|
| 65 |
+
- Log successful refreshes for tracking
|
| 66 |
+
|
| 67 |
+
### 7. Error Handling Improvements
|
| 68 |
+
- Implement retry logic for temporary failures
|
| 69 |
+
- Handle rate limiting from LinkedIn API
|
| 70 |
+
- Graceful degradation when refresh fails
|
| 71 |
+
|
| 72 |
+
### 8. Testing Requirements
|
| 73 |
+
- Unit tests for the refresh logic
|
| 74 |
+
- Integration tests for the scheduling system
|
| 75 |
+
- Testing of email notification system
|
| 76 |
+
|
| 77 |
+
## Success Metrics
|
| 78 |
+
- Reduction in manual reconnection requests
|
| 79 |
+
- Improved user retention for LinkedIn integration
|
| 80 |
+
- Decreased support tickets related to token expiration
|
| 81 |
+
|
| 82 |
+
## Implementation Timeline
|
| 83 |
+
1. **Day 1**: Implement token refresh logic and database updates
|
| 84 |
+
2. **Day 2**: Implement background scheduler and email notifications
|
| 85 |
+
3. **Week 1-2**: Testing, monitoring, and adjustments
|
| 86 |
+
|
| 87 |
+
This proposal addresses the LinkedIn token expiration issue while leveraging your existing database structure efficiently. The system will automatically handle token refresh for users, sending notifications only when automatic refresh fails, thus improving the user experience significantly.
|