Distribution System
Parent: Shared Infrastructure OVERVIEWStatus: Active
Scope: Platforms → Scheduling → Publishing → Metrics → Optimization
Overview
The Distribution System is the delivery layer - it manages publishing content across all platforms, tracks performance, and optimizes timing.Copy
┌─────────────────────────────────────────────────────────────────────────┐
│ DISTRIBUTION SYSTEM PIPELINE │
├─────────────────────────────────────────────────────────────────────────┤
│ │
│ CONTENT SOURCES │
│ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │
│ │Generated │ │ Manual │ │Repurposed│ │ Curated │ │
│ │ Content │ │ Posts │ │ Content │ │ Content │ │
│ └────┬─────┘ └────┬─────┘ └────┬─────┘ └────┬─────┘ │
│ └─────────────┴─────────────┴─────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────┐ │
│ │ SCHEDULING ENGINE │ │
│ │ Optimal timing → Queue management → Conflict resolution │ │
│ └─────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────┐ │
│ │ PLATFORM ADAPTERS │ │
│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │
│ │ │WordPress│ │ Twitter │ │LinkedIn │ │ YouTube │ │ │
│ │ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ │
│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │
│ │ │ Email │ │Instagram│ │ TikTok │ │ Podcast │ │ │
│ │ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ │
│ └─────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────┐ │
│ │ METRICS COLLECTION │ │
│ │ Impressions → Engagement → Clicks → Conversions → Attribution │ │
│ └─────────────────────────────────────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌─────────────────────────────────────────────────────────────────┐ │
│ │ OPTIMIZATION │ │
│ │ Performance analysis → Timing adjustment → A/B testing │ │
│ └─────────────────────────────────────────────────────────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────┘
Schema
Platforms (Extended)
Platform configurations with publishing settings.Copy
-- Note: Basic platform schema in content-intelligence.md
-- This extends with publishing-specific fields
ALTER TABLE platforms ADD COLUMN IF NOT EXISTS
-- Publishing configuration
publish_config jsonb DEFAULT '{}', -- API endpoints, auth method
webhook_url text, -- For receiving updates
-- Credentials (encrypted reference)
credentials_secret_id text, -- Reference to secrets manager
-- Rate limiting
posts_per_hour integer,
posts_per_day integer,
min_interval_minutes integer DEFAULT 60,
-- Feature flags
supports_scheduling boolean DEFAULT true,
supports_editing boolean DEFAULT true,
supports_deletion boolean DEFAULT true,
supports_analytics boolean DEFAULT true,
-- Account info
account_id text,
account_name text,
followers_count integer,
-- Health
last_published_at timestamptz,
last_metrics_sync_at timestamptz,
api_errors_24h integer DEFAULT 0;
Distributions
Scheduled and published content.Copy
CREATE TABLE distributions (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
-- Content reference
content_id uuid, -- Reference to source content
content_type text NOT NULL, -- 'generated', 'manual', 'repurposed'
-- Platform
platform_id uuid REFERENCES platforms(id) NOT NULL,
-- Content
title text,
body text NOT NULL,
media_urls text[],
link_url text,
-- Platform-specific
platform_specific jsonb DEFAULT '{}', -- Hashtags, mentions, etc.
-- Scheduling
scheduled_for timestamptz,
timezone text DEFAULT 'America/New_York',
-- Publishing
status text DEFAULT 'draft', -- 'draft', 'scheduled', 'publishing', 'published', 'failed'
published_at timestamptz,
external_id text, -- ID on the platform
external_url text, -- URL on the platform
-- Error handling
error_message text,
retry_count integer DEFAULT 0,
last_retry_at timestamptz,
-- Metrics (populated by sync)
impressions integer,
reach integer,
engagements integer,
clicks integer,
shares integer,
comments integer,
likes integer,
saves integer,
-- Calculated
engagement_rate numeric,
click_through_rate numeric,
-- A/B Testing
variant_group text,
is_control boolean DEFAULT false,
-- Cross-posting
cross_post_group_id uuid,
-- Ownership
business_unit text,
vertical_id uuid REFERENCES publisher_verticals(id),
client_id uuid,
created_at timestamptz DEFAULT now(),
updated_at timestamptz DEFAULT now()
);
CREATE INDEX idx_distributions_platform ON distributions(platform_id);
CREATE INDEX idx_distributions_status ON distributions(status);
CREATE INDEX idx_distributions_scheduled ON distributions(scheduled_for)
WHERE status = 'scheduled';
CREATE INDEX idx_distributions_published ON distributions(published_at DESC);
CREATE INDEX idx_distributions_content ON distributions(content_id);
CREATE INDEX idx_distributions_vertical ON distributions(vertical_id);
CREATE INDEX idx_distributions_external ON distributions(platform_id, external_id);
Distribution Metrics
Detailed performance tracking over time.Copy
CREATE TABLE distribution_metrics (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
distribution_id uuid REFERENCES distributions(id) NOT NULL,
-- Snapshot time
recorded_at timestamptz DEFAULT now(),
hours_since_publish integer,
-- Raw metrics
impressions integer,
reach integer,
engagements integer,
clicks integer,
shares integer,
comments integer,
likes integer,
saves integer,
video_views integer,
video_watch_time_seconds integer,
-- Follower impact
followers_gained integer,
followers_lost integer,
-- Platform-specific
platform_metrics jsonb DEFAULT '{}'
);
CREATE INDEX idx_distribution_metrics_distribution ON distribution_metrics(distribution_id);
CREATE INDEX idx_distribution_metrics_recorded ON distribution_metrics(recorded_at DESC);
Publishing Calendar
Content schedule overview.Copy
CREATE TABLE publishing_calendar (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
-- Time slot
date date NOT NULL,
time_slot time NOT NULL,
timezone text DEFAULT 'America/New_York',
-- Platform
platform_id uuid REFERENCES platforms(id) NOT NULL,
-- Assignment
distribution_id uuid REFERENCES distributions(id),
-- Slot properties
slot_type text DEFAULT 'regular', -- 'regular', 'prime', 'test'
is_locked boolean DEFAULT false, -- Prevent auto-scheduling
-- For recurring slots
recurring_pattern text, -- 'daily', 'weekly', 'monthly'
UNIQUE(date, time_slot, platform_id)
);
CREATE INDEX idx_publishing_calendar_date ON publishing_calendar(date);
CREATE INDEX idx_publishing_calendar_platform ON publishing_calendar(platform_id);
CREATE INDEX idx_publishing_calendar_distribution ON publishing_calendar(distribution_id);
Cross-Post Groups
Multi-platform posting coordination.Copy
CREATE TABLE cross_post_groups (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
-- Identity
name text,
-- Source content
source_content_id uuid,
-- Timing strategy
timing_strategy text DEFAULT 'simultaneous', -- 'simultaneous', 'staggered', 'optimized'
stagger_minutes integer,
-- Status
status text DEFAULT 'pending', -- 'pending', 'in_progress', 'completed', 'partial'
created_at timestamptz DEFAULT now()
);
CREATE INDEX idx_cross_post_groups_status ON cross_post_groups(status);
Platform Adapters
Base Adapter Interface
Copy
interface PlatformAdapter {
// Publishing
publish(content: DistributionContent): Promise<PublishResult>
update(externalId: string, content: DistributionContent): Promise<UpdateResult>
delete(externalId: string): Promise<DeleteResult>
// Scheduling (if supported)
schedule?(content: DistributionContent, scheduledFor: Date): Promise<ScheduleResult>
// Metrics
getMetrics(externalId: string): Promise<PlatformMetrics>
// Validation
validate(content: DistributionContent): ValidationResult
}
interface DistributionContent {
title?: string
body: string
mediaUrls?: string[]
linkUrl?: string
platformSpecific?: Record<string, any>
}
interface PublishResult {
success: boolean
externalId?: string
externalUrl?: string
error?: string
}
WordPress Adapter
Copy
class WordPressAdapter implements PlatformAdapter {
private apiUrl: string
private credentials: WordPressCredentials
constructor(platform: Platform) {
this.apiUrl = platform.publish_config.api_url
this.credentials = getCredentials(platform.credentials_secret_id)
}
async publish(content: DistributionContent): Promise<PublishResult> {
try {
const response = await fetch(`${this.apiUrl}/wp-json/wp/v2/posts`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Basic ${Buffer.from(
`${this.credentials.username}:${this.credentials.app_password}`
).toString('base64')}`
},
body: JSON.stringify({
title: content.title,
content: content.body,
status: 'publish',
featured_media: content.platformSpecific?.featured_image_id,
categories: content.platformSpecific?.category_ids,
tags: content.platformSpecific?.tag_ids,
meta: content.platformSpecific?.meta
})
})
if (!response.ok) {
throw new Error(`WordPress API error: ${response.status}`)
}
const post = await response.json()
return {
success: true,
externalId: String(post.id),
externalUrl: post.link
}
} catch (error) {
return {
success: false,
error: error.message
}
}
}
async getMetrics(externalId: string): Promise<PlatformMetrics> {
// WordPress doesn't have built-in analytics
// Would integrate with Google Analytics or Jetpack
return {
impressions: 0,
engagements: 0
}
}
validate(content: DistributionContent): ValidationResult {
const errors: string[] = []
if (!content.title) {
errors.push('Title is required for WordPress posts')
}
if (!content.body || content.body.length < 100) {
errors.push('Body must be at least 100 characters')
}
return {
valid: errors.length === 0,
errors
}
}
}
Twitter/X Adapter
Copy
class TwitterAdapter implements PlatformAdapter {
private client: TwitterApi
constructor(platform: Platform) {
const credentials = getCredentials(platform.credentials_secret_id)
this.client = new TwitterApi({
appKey: credentials.api_key,
appSecret: credentials.api_secret,
accessToken: credentials.access_token,
accessSecret: credentials.access_secret
})
}
async publish(content: DistributionContent): Promise<PublishResult> {
try {
let mediaIds: string[] = []
// Upload media if present
if (content.mediaUrls?.length) {
for (const url of content.mediaUrls.slice(0, 4)) {
const mediaId = await this.uploadMedia(url)
mediaIds.push(mediaId)
}
}
const tweet = await this.client.v2.tweet({
text: content.body,
media: mediaIds.length ? { media_ids: mediaIds } : undefined
})
return {
success: true,
externalId: tweet.data.id,
externalUrl: `https://twitter.com/i/status/${tweet.data.id}`
}
} catch (error) {
return {
success: false,
error: error.message
}
}
}
async getMetrics(externalId: string): Promise<PlatformMetrics> {
const tweet = await this.client.v2.singleTweet(externalId, {
'tweet.fields': ['public_metrics']
})
const metrics = tweet.data.public_metrics
return {
impressions: metrics.impression_count,
engagements: metrics.like_count + metrics.retweet_count + metrics.reply_count,
likes: metrics.like_count,
shares: metrics.retweet_count,
comments: metrics.reply_count
}
}
validate(content: DistributionContent): ValidationResult {
const errors: string[] = []
if (content.body.length > 280) {
errors.push(`Tweet exceeds 280 characters (${content.body.length})`)
}
if (content.mediaUrls?.length > 4) {
errors.push('Maximum 4 media items per tweet')
}
return {
valid: errors.length === 0,
errors
}
}
private async uploadMedia(url: string): Promise<string> {
const response = await fetch(url)
const buffer = await response.arrayBuffer()
return await this.client.v1.uploadMedia(Buffer.from(buffer))
}
}
Email Adapter (Mailchimp)
Copy
class MailchimpAdapter implements PlatformAdapter {
private client: any
private listId: string
constructor(platform: Platform) {
const credentials = getCredentials(platform.credentials_secret_id)
this.client = require('@mailchimp/mailchimp_marketing')
this.client.setConfig({
apiKey: credentials.api_key,
server: credentials.server_prefix
})
this.listId = platform.publish_config.list_id
}
async publish(content: DistributionContent): Promise<PublishResult> {
try {
// Create campaign
const campaign = await this.client.campaigns.create({
type: 'regular',
recipients: {
list_id: this.listId,
segment_opts: content.platformSpecific?.segment_id
? { saved_segment_id: content.platformSpecific.segment_id }
: undefined
},
settings: {
subject_line: content.title,
preview_text: content.platformSpecific?.preview_text,
from_name: content.platformSpecific?.from_name || 'Trending Society',
reply_to: content.platformSpecific?.reply_to || '[email protected]'
}
})
// Set content
await this.client.campaigns.setContent(campaign.id, {
html: content.body
})
// Send
await this.client.campaigns.send(campaign.id)
return {
success: true,
externalId: campaign.id,
externalUrl: campaign.archive_url
}
} catch (error) {
return {
success: false,
error: error.message
}
}
}
async schedule(content: DistributionContent, scheduledFor: Date): Promise<ScheduleResult> {
try {
// Create campaign
const campaign = await this.client.campaigns.create({
type: 'regular',
recipients: { list_id: this.listId },
settings: {
subject_line: content.title,
from_name: 'Trending Society',
reply_to: '[email protected]'
}
})
await this.client.campaigns.setContent(campaign.id, {
html: content.body
})
// Schedule
await this.client.campaigns.schedule(campaign.id, {
schedule_time: scheduledFor.toISOString()
})
return {
success: true,
externalId: campaign.id,
scheduledFor
}
} catch (error) {
return {
success: false,
error: error.message
}
}
}
async getMetrics(externalId: string): Promise<PlatformMetrics> {
const report = await this.client.reports.getCampaignReport(externalId)
return {
impressions: report.emails_sent,
reach: report.emails_sent - report.bounces.hard_bounces - report.bounces.soft_bounces,
engagements: report.opens.unique_opens + report.clicks.unique_clicks,
clicks: report.clicks.unique_clicks,
platform_metrics: {
open_rate: report.opens.open_rate,
click_rate: report.clicks.click_rate,
unsubscribes: report.unsubscribed
}
}
}
validate(content: DistributionContent): ValidationResult {
const errors: string[] = []
if (!content.title) {
errors.push('Subject line is required')
}
if (!content.body.includes('*|UNSUB|*')) {
errors.push('Email must include unsubscribe link')
}
return {
valid: errors.length === 0,
errors
}
}
}
Scheduling Engine
Schedule Distribution
Copy
async function scheduleDistribution(
contentId: string,
platformIds: string[],
options: {
scheduledFor?: Date
optimizeTime?: boolean
crossPost?: boolean
} = {}
) {
const { data: content } = await supabase
.from('generated_content')
.select('*')
.eq('id', contentId)
.single()
if (!content) throw new Error('Content not found')
let crossPostGroupId: string | undefined
if (options.crossPost && platformIds.length > 1) {
const { data: group } = await supabase
.from('cross_post_groups')
.insert({
source_content_id: contentId,
timing_strategy: 'optimized'
})
.select()
.single()
crossPostGroupId = group.id
}
const distributions = []
for (const platformId of platformIds) {
const { data: platform } = await supabase
.from('platforms')
.select('*')
.eq('id', platformId)
.single()
// Determine scheduled time
let scheduledFor = options.scheduledFor
if (options.optimizeTime || !scheduledFor) {
scheduledFor = await findOptimalTime(platformId, content.content_type)
}
// Adapt content for platform
const adaptedContent = await adaptContentForPlatform(content, platform)
// Validate
const adapter = getAdapter(platform)
const validation = adapter.validate(adaptedContent)
if (!validation.valid) {
console.warn(`Validation failed for ${platform.name}:`, validation.errors)
continue
}
// Create distribution
const { data: distribution } = await supabase
.from('distributions')
.insert({
content_id: contentId,
content_type: 'generated',
platform_id: platformId,
title: adaptedContent.title,
body: adaptedContent.body,
media_urls: adaptedContent.mediaUrls,
link_url: adaptedContent.linkUrl,
platform_specific: adaptedContent.platformSpecific,
scheduled_for: scheduledFor.toISOString(),
status: 'scheduled',
cross_post_group_id: crossPostGroupId,
business_unit: content.business_unit
})
.select()
.single()
// Reserve calendar slot
await supabase
.from('publishing_calendar')
.upsert({
date: scheduledFor.toISOString().split('T')[0],
time_slot: scheduledFor.toTimeString().split(' ')[0],
platform_id: platformId,
distribution_id: distribution.id
})
distributions.push(distribution)
}
return distributions
}
Find Optimal Time
Copy
async function findOptimalTime(
platformId: string,
contentType: string
): Promise<Date> {
// Get platform patterns for timing
const { data: pattern } = await supabase
.from('platform_patterns')
.select('*')
.eq('platform_id', platformId)
.eq('pattern_type', 'timing')
.eq('pattern_name', 'optimal_posting_times')
.single()
const optimalTimes = pattern?.pattern_template
? JSON.parse(pattern.pattern_template)
: getDefaultTimes(platformId)
// Get already scheduled slots
const nextWeek = new Date()
nextWeek.setDate(nextWeek.getDate() + 7)
const { data: scheduled } = await supabase
.from('publishing_calendar')
.select('date, time_slot')
.eq('platform_id', platformId)
.gte('date', new Date().toISOString().split('T')[0])
.lte('date', nextWeek.toISOString().split('T')[0])
const scheduledSet = new Set(
scheduled?.map(s => `${s.date}T${s.time_slot}`) || []
)
// Find first available optimal slot
for (let daysOut = 1; daysOut <= 7; daysOut++) {
const targetDate = new Date()
targetDate.setDate(targetDate.getDate() + daysOut)
const dayOfWeek = targetDate.getDay()
for (const slot of optimalTimes) {
if (slot.day_of_week !== dayOfWeek) continue
const slotDateTime = `${targetDate.toISOString().split('T')[0]}T${slot.hour}:00:00`
if (!scheduledSet.has(slotDateTime)) {
const result = new Date(targetDate)
result.setHours(slot.hour, 0, 0, 0)
return result
}
}
}
// Fallback: next day at 10am
const fallback = new Date()
fallback.setDate(fallback.getDate() + 1)
fallback.setHours(10, 0, 0, 0)
return fallback
}
function getDefaultTimes(platformId: string) {
// Default optimal times by platform type
return [
{ day_of_week: 1, hour: 9 }, // Monday 9am
{ day_of_week: 2, hour: 10 }, // Tuesday 10am
{ day_of_week: 3, hour: 9 }, // Wednesday 9am
{ day_of_week: 4, hour: 10 }, // Thursday 10am
{ day_of_week: 5, hour: 9 }, // Friday 9am
]
}
Adapt Content for Platform
Copy
async function adaptContentForPlatform(
content: GeneratedContent,
platform: Platform
): Promise<DistributionContent> {
let body = content.content
let title = content.title
// Truncate if needed
if (platform.max_content_length && body.length > platform.max_content_length) {
body = truncateSmartly(body, platform.max_content_length)
}
if (platform.max_title_length && title && title.length > platform.max_title_length) {
title = truncateSmartly(title, platform.max_title_length)
}
// Platform-specific adaptations
const platformSpecific: Record<string, any> = {}
switch (platform.name) {
case 'twitter':
// Add hashtags
const hashtags = extractHashtags(content.metadata?.keywords || [])
if (hashtags.length && body.length + hashtags.join(' ').length + 1 <= 280) {
body = `${body}\n\n${hashtags.join(' ')}`
}
break
case 'linkedin':
// More professional formatting
body = body.replace(/\n\n/g, '\n\n---\n\n')
platformSpecific.visibility = 'PUBLIC'
break
case 'instagram':
// Hashtags at the end
platformSpecific.hashtags = extractHashtags(content.metadata?.keywords || [], 30)
break
case 'wordpress':
// Convert to HTML if markdown
body = markdownToHtml(body)
platformSpecific.category_ids = await getCategoryIds(content.metadata?.topics || [])
break
}
return {
title,
body,
mediaUrls: content.metadata?.media_urls,
linkUrl: content.metadata?.canonical_url,
platformSpecific
}
}
function truncateSmartly(text: string, maxLength: number): string {
if (text.length <= maxLength) return text
// Find last sentence break before limit
const truncated = text.slice(0, maxLength - 3)
const lastPeriod = truncated.lastIndexOf('.')
const lastQuestion = truncated.lastIndexOf('?')
const lastExclaim = truncated.lastIndexOf('!')
const breakPoint = Math.max(lastPeriod, lastQuestion, lastExclaim)
if (breakPoint > maxLength * 0.5) {
return text.slice(0, breakPoint + 1)
}
return truncated + '...'
}
Publishing Engine
Process Scheduled Distributions
Copy
// Cron: Every minute
async function processScheduledDistributions() {
const now = new Date()
// Get distributions due for publishing
const { data: due } = await supabase
.from('distributions')
.select('*, platform:platforms(*)')
.eq('status', 'scheduled')
.lte('scheduled_for', now.toISOString())
.order('scheduled_for')
.limit(20)
for (const distribution of due) {
try {
// Mark as publishing
await supabase
.from('distributions')
.update({ status: 'publishing' })
.eq('id', distribution.id)
// Get adapter
const adapter = getAdapter(distribution.platform)
// Publish
const result = await adapter.publish({
title: distribution.title,
body: distribution.body,
mediaUrls: distribution.media_urls,
linkUrl: distribution.link_url,
platformSpecific: distribution.platform_specific
})
if (result.success) {
await supabase
.from('distributions')
.update({
status: 'published',
published_at: now.toISOString(),
external_id: result.externalId,
external_url: result.externalUrl
})
.eq('id', distribution.id)
// Update platform last published
await supabase
.from('platforms')
.update({ last_published_at: now.toISOString() })
.eq('id', distribution.platform_id)
} else {
throw new Error(result.error)
}
} catch (error) {
const shouldRetry = distribution.retry_count < 3
await supabase
.from('distributions')
.update({
status: shouldRetry ? 'scheduled' : 'failed',
error_message: error.message,
retry_count: distribution.retry_count + 1,
last_retry_at: now.toISOString(),
scheduled_for: shouldRetry
? new Date(now.getTime() + 300000).toISOString() // 5 min retry
: distribution.scheduled_for
})
.eq('id', distribution.id)
}
}
}
Metrics Collection
Sync Platform Metrics
Copy
// Cron: Every hour
async function syncPlatformMetrics() {
// Get recently published distributions
const { data: distributions } = await supabase
.from('distributions')
.select('*, platform:platforms(*)')
.eq('status', 'published')
.gte('published_at', new Date(Date.now() - 7 * 86400000).toISOString())
.not('external_id', 'is', null)
for (const distribution of distributions) {
try {
const adapter = getAdapter(distribution.platform)
const metrics = await adapter.getMetrics(distribution.external_id)
const hoursSincePublish = Math.floor(
(Date.now() - new Date(distribution.published_at).getTime()) / 3600000
)
// Store metrics snapshot
await supabase
.from('distribution_metrics')
.insert({
distribution_id: distribution.id,
hours_since_publish: hoursSincePublish,
...metrics
})
// Update distribution summary
const engagementRate = metrics.impressions > 0
? (metrics.engagements / metrics.impressions) * 100
: 0
const ctr = metrics.impressions > 0
? (metrics.clicks / metrics.impressions) * 100
: 0
await supabase
.from('distributions')
.update({
impressions: metrics.impressions,
reach: metrics.reach,
engagements: metrics.engagements,
clicks: metrics.clicks,
shares: metrics.shares,
comments: metrics.comments,
likes: metrics.likes,
saves: metrics.saves,
engagement_rate: engagementRate,
click_through_rate: ctr
})
.eq('id', distribution.id)
// Update platform last sync
await supabase
.from('platforms')
.update({ last_metrics_sync_at: new Date().toISOString() })
.eq('id', distribution.platform_id)
} catch (error) {
console.error(`Failed to sync metrics for ${distribution.id}:`, error)
}
}
}
Feed Pattern Learning
Copy
// After metrics sync, feed data to pattern learning
async function feedPatternLearning(distributionId: string) {
const { data: distribution } = await supabase
.from('distributions')
.select(`
*,
content:generated_content(*)
`)
.eq('id', distributionId)
.single()
if (!distribution || !distribution.engagement_rate) return
// Get patterns that were applied to this content
const { data: appliedPatterns } = await supabase
.from('platform_patterns')
.select('*')
.eq('platform_id', distribution.platform_id)
for (const pattern of appliedPatterns) {
// Check if pattern was applied
const wasApplied = checkPatternApplication(distribution, pattern)
// Create feedback record
await supabase
.from('pattern_feedback')
.insert({
pattern_id: pattern.id,
content_id: distribution.content_id,
distribution_id: distributionId,
pattern_applied: wasApplied,
impressions: distribution.impressions,
engagements: distribution.engagements,
clicks: distribution.clicks,
shares: distribution.shares,
engagement_rate: distribution.engagement_rate,
posted_at: distribution.published_at,
day_of_week: new Date(distribution.published_at).getDay(),
hour_of_day: new Date(distribution.published_at).getHours()
})
}
}
Queries
Distribution Performance by Platform
Copy
SELECT
p.name as platform,
COUNT(d.id) as total_posts,
ROUND(AVG(d.impressions), 0) as avg_impressions,
ROUND(AVG(d.engagement_rate), 2) as avg_engagement_rate,
ROUND(AVG(d.click_through_rate), 2) as avg_ctr,
SUM(d.clicks) as total_clicks
FROM distributions d
JOIN platforms p ON d.platform_id = p.id
WHERE d.status = 'published'
AND d.published_at > now() - interval '30 days'
GROUP BY p.id, p.name
ORDER BY avg_engagement_rate DESC;
Best Performing Times
Copy
SELECT
p.name as platform,
EXTRACT(DOW FROM d.published_at) as day_of_week,
EXTRACT(HOUR FROM d.published_at) as hour,
COUNT(*) as posts,
ROUND(AVG(d.engagement_rate), 2) as avg_engagement
FROM distributions d
JOIN platforms p ON d.platform_id = p.id
WHERE d.status = 'published'
AND d.published_at > now() - interval '90 days'
AND d.engagement_rate IS NOT NULL
GROUP BY p.name, 1, 2, 3
HAVING COUNT(*) >= 3
ORDER BY p.name, avg_engagement DESC;
Publishing Calendar Overview
Copy
SELECT
pc.date,
p.name as platform,
COUNT(pc.distribution_id) as scheduled_posts,
array_agg(pc.time_slot ORDER BY pc.time_slot) as time_slots
FROM publishing_calendar pc
JOIN platforms p ON pc.platform_id = p.id
WHERE pc.date >= CURRENT_DATE
AND pc.date <= CURRENT_DATE + interval '7 days'
GROUP BY pc.date, p.name
ORDER BY pc.date, p.name;
Failed Distributions
Copy
SELECT
d.id,
p.name as platform,
d.title,
d.scheduled_for,
d.error_message,
d.retry_count
FROM distributions d
JOIN platforms p ON d.platform_id = p.id
WHERE d.status = 'failed'
ORDER BY d.scheduled_for DESC
LIMIT 20;
Related Documentation
| Document | Purpose |
|---|---|
| content-intelligence.md | Platform patterns |
| generation-engine.md | Content creation |
| cost-optimization.md | API rate limiting |
| SCHEMA.md | Full table definitions |