+ {/* Enrichment status indicator */}
+ {enrichStatus === 'loading' && (
+
+ )}
+
/auth/v1).
+# jwt_issuer = ""
+# Path to JWT signing key. DO NOT commit your signing keys file to git.
+# signing_keys_path = "./signing_keys.json"
+# If disabled, the refresh token will never expire.
+enable_refresh_token_rotation = true
+# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
+# Requires enable_refresh_token_rotation = true.
+refresh_token_reuse_interval = 10
+# Allow/disallow new user signups to your project.
+enable_signup = true
+# Allow/disallow anonymous sign-ins to your project.
+enable_anonymous_sign_ins = false
+# Allow/disallow testing manual linking of accounts
+enable_manual_linking = false
+# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
+minimum_password_length = 6
+# Passwords that do not meet the following requirements will be rejected as weak. Supported values
+# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
+password_requirements = ""
+
+[auth.rate_limit]
+# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled.
+email_sent = 2
+# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled.
+sms_sent = 30
+# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true.
+anonymous_users = 30
+# Number of sessions that can be refreshed in a 5 minute interval per IP address.
+token_refresh = 150
+# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users).
+sign_in_sign_ups = 30
+# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address.
+token_verifications = 30
+# Number of Web3 logins that can be made in a 5 minute interval per IP address.
+web3 = 30
+
+# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
+# [auth.captcha]
+# enabled = true
+# provider = "hcaptcha"
+# secret = ""
+
+[auth.email]
+# Allow/disallow new user signups via email to your project.
+enable_signup = true
+# If enabled, a user will be required to confirm any email change on both the old, and new email
+# addresses. If disabled, only the new email is required to confirm.
+double_confirm_changes = true
+# If enabled, users need to confirm their email address before signing in.
+enable_confirmations = false
+# If enabled, users will need to reauthenticate or have logged in recently to change their password.
+secure_password_change = false
+# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
+max_frequency = "1s"
+# Number of characters used in the email OTP.
+otp_length = 6
+# Number of seconds before the email OTP expires (defaults to 1 hour).
+otp_expiry = 3600
+
+# Use a production-ready SMTP server
+# [auth.email.smtp]
+# enabled = true
+# host = "smtp.sendgrid.net"
+# port = 587
+# user = "apikey"
+# pass = "env(SENDGRID_API_KEY)"
+# admin_email = "admin@email.com"
+# sender_name = "Admin"
+
+# Uncomment to customize email template
+# [auth.email.template.invite]
+# subject = "You have been invited"
+# content_path = "./supabase/templates/invite.html"
+
+# Uncomment to customize notification email template
+# [auth.email.notification.password_changed]
+# enabled = true
+# subject = "Your password has been changed"
+# content_path = "./templates/password_changed_notification.html"
+
+[auth.sms]
+# Allow/disallow new user signups via SMS to your project.
+enable_signup = false
+# If enabled, users need to confirm their phone number before signing in.
+enable_confirmations = false
+# Template for sending OTP to users
+template = "Your code is {{ .Code }}"
+# Controls the minimum amount of time that must pass before sending another sms otp.
+max_frequency = "5s"
+
+# Use pre-defined map of phone number to OTP for testing.
+# [auth.sms.test_otp]
+# 4152127777 = "123456"
+
+# Configure logged in session timeouts.
+# [auth.sessions]
+# Force log out after the specified duration.
+# timebox = "24h"
+# Force log out if the user has been inactive longer than the specified duration.
+# inactivity_timeout = "8h"
+
+# This hook runs before a new user is created and allows developers to reject the request based on the incoming user object.
+# [auth.hook.before_user_created]
+# enabled = true
+# uri = "pg-functions://postgres/auth/before-user-created-hook"
+
+# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
+# [auth.hook.custom_access_token]
+# enabled = true
+# uri = "pg-functions:////"
+
+# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
+[auth.sms.twilio]
+enabled = false
+account_sid = ""
+message_service_sid = ""
+# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
+auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
+
+# Multi-factor-authentication is available to Supabase Pro plan.
+[auth.mfa]
+# Control how many MFA factors can be enrolled at once per user.
+max_enrolled_factors = 10
+
+# Control MFA via App Authenticator (TOTP)
+[auth.mfa.totp]
+enroll_enabled = false
+verify_enabled = false
+
+# Configure MFA via Phone Messaging
+[auth.mfa.phone]
+enroll_enabled = false
+verify_enabled = false
+otp_length = 6
+template = "Your code is {{ .Code }}"
+max_frequency = "5s"
+
+# Configure MFA via WebAuthn
+# [auth.mfa.web_authn]
+# enroll_enabled = true
+# verify_enabled = true
+
+# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
+# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
+# `twitter`, `x`, `slack`, `spotify`, `workos`, `zoom`.
+[auth.external.apple]
+enabled = false
+client_id = ""
+# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
+secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
+# Overrides the default auth redirectUrl.
+redirect_uri = ""
+# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
+# or any other third-party OIDC providers.
+url = ""
+# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
+skip_nonce_check = false
+# If enabled, it will allow the user to successfully authenticate when the provider does not return an email address.
+email_optional = false
+
+# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard.
+# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting.
+[auth.web3.solana]
+enabled = false
+
+# Use Firebase Auth as a third-party provider alongside Supabase Auth.
+[auth.third_party.firebase]
+enabled = false
+# project_id = "my-firebase-project"
+
+# Use Auth0 as a third-party provider alongside Supabase Auth.
+[auth.third_party.auth0]
+enabled = false
+# tenant = "my-auth0-tenant"
+# tenant_region = "us"
+
+# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
+[auth.third_party.aws_cognito]
+enabled = false
+# user_pool_id = "my-user-pool-id"
+# user_pool_region = "us-east-1"
+
+# Use Clerk as a third-party provider alongside Supabase Auth.
+[auth.third_party.clerk]
+enabled = false
+# Obtain from https://clerk.com/setup/supabase
+# domain = "example.clerk.accounts.dev"
+
+# OAuth server configuration
+[auth.oauth_server]
+# Enable OAuth server functionality
+enabled = false
+# Path for OAuth consent flow UI
+authorization_url_path = "/oauth/consent"
+# Allow dynamic client registration
+allow_dynamic_registration = false
+
+[edge_runtime]
+enabled = true
+# Supported request policies: `oneshot`, `per_worker`.
+# `per_worker` (default) — enables hot reload during local development.
+# `oneshot` — fallback mode if hot reload causes issues (e.g. in large repos or with symlinks).
+policy = "per_worker"
+# Port to attach the Chrome inspector for debugging edge functions.
+inspector_port = 8083
+# The Deno major version to use.
+deno_version = 2
+
+# [edge_runtime.secrets]
+# secret_key = "env(SECRET_VALUE)"
+
+[analytics]
+enabled = true
+port = 54327
+# Configure one of the supported backends: `postgres`, `bigquery`.
+backend = "postgres"
+
+# Experimental features may be deprecated any time
+[experimental]
+# Configures Postgres storage engine to use OrioleDB (S3)
+orioledb_version = ""
+# Configures S3 bucket URL, eg. .s3-.amazonaws.com
+s3_host = "env(S3_HOST)"
+# Configures S3 bucket region, eg. us-east-1
+s3_region = "env(S3_REGION)"
+# Configures AWS_ACCESS_KEY_ID for S3 bucket
+s3_access_key = "env(S3_ACCESS_KEY)"
+# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
+s3_secret_key = "env(S3_SECRET_KEY)"
+
+# [experimental.pgdelta]
+# When enabled, pg-delta becomes the active engine for supported schema flows.
+# enabled = false
+# Directory under `supabase/` where declarative files are written.
+# declarative_schema_path = "./declarative"
+# JSON string passed through to pg-delta SQL formatting.
+# format_options = "{\"keywordCase\":\"upper\",\"indent\":2,\"maxWidth\":80,\"commaStyle\":\"trailing\"}"
+
+[functions.scrape-website]
+enabled = true
+verify_jwt = true
+import_map = "./functions/scrape-website/deno.json"
+# Uncomment to specify a custom file path to the entrypoint.
+# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
+entrypoint = "./functions/scrape-website/index.ts"
+# Specifies static files to be bundled with the function. Supports glob patterns.
+# For example, if you want to serve static HTML pages in your function:
+# static_files = [ "./functions/scrape-website/*.html" ]
+
+[functions.analyze-market]
+enabled = true
+verify_jwt = true
+import_map = "./functions/analyze-market/deno.json"
+# Uncomment to specify a custom file path to the entrypoint.
+# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
+entrypoint = "./functions/analyze-market/index.ts"
+# Specifies static files to be bundled with the function. Supports glob patterns.
+# For example, if you want to serve static HTML pages in your function:
+# static_files = [ "./functions/analyze-market/*.html" ]
+
+[functions.generate-report]
+enabled = true
+verify_jwt = true
+import_map = "./functions/generate-report/deno.json"
+# Uncomment to specify a custom file path to the entrypoint.
+# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
+entrypoint = "./functions/generate-report/index.ts"
+# Specifies static files to be bundled with the function. Supports glob patterns.
+# For example, if you want to serve static HTML pages in your function:
+# static_files = [ "./functions/generate-report/*.html" ]
+
+[functions.enrich-channels]
+enabled = true
+verify_jwt = true
+import_map = "./functions/enrich-channels/deno.json"
+# Uncomment to specify a custom file path to the entrypoint.
+# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx
+entrypoint = "./functions/enrich-channels/index.ts"
+# Specifies static files to be bundled with the function. Supports glob patterns.
+# For example, if you want to serve static HTML pages in your function:
+# static_files = [ "./functions/enrich-channels/*.html" ]
diff --git a/supabase/functions/analyze-market/.npmrc b/supabase/functions/analyze-market/.npmrc
new file mode 100644
index 0000000..48c6388
--- /dev/null
+++ b/supabase/functions/analyze-market/.npmrc
@@ -0,0 +1,3 @@
+# Configuration for private npm package dependencies
+# For more information on using private registries with Edge Functions, see:
+# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries
diff --git a/supabase/functions/analyze-market/deno.json b/supabase/functions/analyze-market/deno.json
new file mode 100644
index 0000000..758d070
--- /dev/null
+++ b/supabase/functions/analyze-market/deno.json
@@ -0,0 +1,5 @@
+{
+ "imports": {
+ "@supabase/functions-js": "jsr:@supabase/functions-js@^2"
+ }
+}
diff --git a/supabase/functions/analyze-market/index.ts b/supabase/functions/analyze-market/index.ts
new file mode 100644
index 0000000..543150e
--- /dev/null
+++ b/supabase/functions/analyze-market/index.ts
@@ -0,0 +1,113 @@
+import "@supabase/functions-js/edge-runtime.d.ts";
+
+const corsHeaders = {
+ "Access-Control-Allow-Origin": "*",
+ "Access-Control-Allow-Headers":
+ "authorization, x-client-info, apikey, content-type",
+};
+
+interface AnalyzeRequest {
+ clinicName: string;
+ services: string[];
+ address: string;
+ scrapeData?: Record;
+}
+
+Deno.serve(async (req) => {
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ try {
+ const { clinicName, services, address } =
+ (await req.json()) as AnalyzeRequest;
+
+ const PERPLEXITY_API_KEY = Deno.env.get("PERPLEXITY_API_KEY");
+ if (!PERPLEXITY_API_KEY) {
+ throw new Error("PERPLEXITY_API_KEY not configured");
+ }
+
+ // Run multiple Perplexity queries in parallel
+ const queries = [
+ {
+ id: "competitors",
+ prompt: `${address} 근처 ${services.slice(0, 3).join(", ")} 전문 성형외과/피부과 경쟁 병원 5곳을 분석해줘. 각 병원의 이름, 주요 시술, 온라인 평판, 마케팅 채널(블로그, 인스타그램, 유튜브)을 포함해줘. JSON 형식으로 응답해줘.`,
+ },
+ {
+ id: "keywords",
+ prompt: `한국 ${services.slice(0, 3).join(", ")} 관련 검색 키워드 트렌드를 분석해줘. 네이버와 구글에서 월간 검색량이 높은 키워드 20개, 경쟁 강도, 추천 롱테일 키워드를 JSON 형식으로 제공해줘.`,
+ },
+ {
+ id: "market",
+ prompt: `한국 ${services[0] || "성형외과"} 시장 트렌드 2025-2026을 분석해줘. 시장 규모, 성장률, 주요 트렌드(비수술 시술 증가, AI 마케팅, 외국인 환자 유치 등), 마케팅 채널별 효과를 JSON 형식으로 제공해줘.`,
+ },
+ {
+ id: "targetAudience",
+ prompt: `${clinicName || services[0] + " 병원"}의 잠재 고객을 분석해줘. 연령대별, 성별, 관심 시술, 정보 탐색 채널(강남언니, 바비톡, 네이버, 인스타), 의사결정 요인을 JSON 형식으로 제공해줘.`,
+ },
+ ];
+
+ const perplexityResults = await Promise.allSettled(
+ queries.map(async (q) => {
+ const response = await fetch(
+ "https://api.perplexity.ai/chat/completions",
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${PERPLEXITY_API_KEY}`,
+ },
+ body: JSON.stringify({
+ model: "sonar",
+ messages: [
+ {
+ role: "system",
+ content:
+ "You are a Korean medical marketing analyst. Always respond in Korean. Provide data in valid JSON format when requested.",
+ },
+ { role: "user", content: q.prompt },
+ ],
+ temperature: 0.3,
+ }),
+ }
+ );
+ const data = await response.json();
+ return {
+ id: q.id,
+ content: data.choices?.[0]?.message?.content || "",
+ citations: data.citations || [],
+ };
+ })
+ );
+
+ const analysis: Record = {};
+ for (const result of perplexityResults) {
+ if (result.status === "fulfilled") {
+ const { id, content, citations } = result.value;
+ let parsed = content;
+ const jsonMatch = content.match(/```json\n?([\s\S]*?)```/);
+ if (jsonMatch) {
+ try {
+ parsed = JSON.parse(jsonMatch[1]);
+ } catch {
+ // Keep as string if JSON parse fails
+ }
+ }
+ analysis[id] = { data: parsed, citations };
+ }
+ }
+
+ return new Response(
+ JSON.stringify({
+ success: true,
+ data: { clinicName, services, address, analysis, analyzedAt: new Date().toISOString() },
+ }),
+ { headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ } catch (error) {
+ return new Response(
+ JSON.stringify({ success: false, error: error.message }),
+ { status: 500, headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ }
+});
diff --git a/supabase/functions/enrich-channels/.npmrc b/supabase/functions/enrich-channels/.npmrc
new file mode 100644
index 0000000..48c6388
--- /dev/null
+++ b/supabase/functions/enrich-channels/.npmrc
@@ -0,0 +1,3 @@
+# Configuration for private npm package dependencies
+# For more information on using private registries with Edge Functions, see:
+# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries
diff --git a/supabase/functions/enrich-channels/deno.json b/supabase/functions/enrich-channels/deno.json
new file mode 100644
index 0000000..758d070
--- /dev/null
+++ b/supabase/functions/enrich-channels/deno.json
@@ -0,0 +1,5 @@
+{
+ "imports": {
+ "@supabase/functions-js": "jsr:@supabase/functions-js@^2"
+ }
+}
diff --git a/supabase/functions/enrich-channels/index.ts b/supabase/functions/enrich-channels/index.ts
new file mode 100644
index 0000000..63d123d
--- /dev/null
+++ b/supabase/functions/enrich-channels/index.ts
@@ -0,0 +1,262 @@
+import "@supabase/functions-js/edge-runtime.d.ts";
+import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
+
+const corsHeaders = {
+ "Access-Control-Allow-Origin": "*",
+ "Access-Control-Allow-Headers":
+ "authorization, x-client-info, apikey, content-type",
+};
+
+const APIFY_BASE = "https://api.apify.com/v2";
+
+interface EnrichRequest {
+ reportId: string;
+ clinicName: string;
+ instagramHandle?: string;
+ youtubeChannelId?: string;
+ address?: string;
+}
+
+async function runApifyActor(
+ actorId: string,
+ input: Record,
+ token: string
+): Promise {
+ const res = await fetch(
+ `${APIFY_BASE}/acts/${actorId}/runs?token=${token}&waitForFinish=120`,
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(input),
+ }
+ );
+ const run = await res.json();
+ const datasetId = run.data?.defaultDatasetId;
+ if (!datasetId) return [];
+
+ const itemsRes = await fetch(
+ `${APIFY_BASE}/datasets/${datasetId}/items?token=${token}&limit=20`
+ );
+ return itemsRes.json();
+}
+
+Deno.serve(async (req) => {
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ try {
+ const { reportId, clinicName, instagramHandle, youtubeChannelId, address } =
+ (await req.json()) as EnrichRequest;
+
+ const APIFY_TOKEN = Deno.env.get("APIFY_API_TOKEN");
+ if (!APIFY_TOKEN) throw new Error("APIFY_API_TOKEN not configured");
+
+ const enrichment: Record = {};
+
+ // Run all enrichment tasks in parallel
+ const tasks = [];
+
+ // 1. Instagram Profile
+ if (instagramHandle) {
+ tasks.push(
+ (async () => {
+ const items = await runApifyActor(
+ "apify~instagram-profile-scraper",
+ { usernames: [instagramHandle], resultsLimit: 12 },
+ APIFY_TOKEN
+ );
+ const profile = (items as Record[])[0];
+ if (profile && !profile.error) {
+ enrichment.instagram = {
+ username: profile.username,
+ followers: profile.followersCount,
+ following: profile.followsCount,
+ posts: profile.postsCount,
+ bio: profile.biography,
+ isBusinessAccount: profile.isBusinessAccount,
+ externalUrl: profile.externalUrl,
+ latestPosts: ((profile.latestPosts as Record[]) || [])
+ .slice(0, 12)
+ .map((p) => ({
+ type: p.type,
+ likes: p.likesCount,
+ comments: p.commentsCount,
+ caption: (p.caption as string || "").slice(0, 200),
+ timestamp: p.timestamp,
+ })),
+ };
+ }
+ })()
+ );
+ }
+
+ // 2. Google Maps / Place Reviews
+ if (clinicName || address) {
+ tasks.push(
+ (async () => {
+ const searchQuery = `${clinicName} ${address || "강남"}`;
+ const items = await runApifyActor(
+ "compass~crawler-google-places",
+ {
+ searchStringsArray: [searchQuery],
+ maxCrawledPlacesPerSearch: 1,
+ language: "ko",
+ maxReviews: 10,
+ },
+ APIFY_TOKEN
+ );
+ const place = (items as Record[])[0];
+ if (place) {
+ enrichment.googleMaps = {
+ name: place.title,
+ rating: place.totalScore,
+ reviewCount: place.reviewsCount,
+ address: place.address,
+ phone: place.phone,
+ website: place.website,
+ category: place.categoryName,
+ openingHours: place.openingHours,
+ topReviews: ((place.reviews as Record[]) || [])
+ .slice(0, 10)
+ .map((r) => ({
+ stars: r.stars,
+ text: (r.text as string || "").slice(0, 200),
+ publishedAtDate: r.publishedAtDate,
+ })),
+ };
+ }
+ })()
+ );
+ }
+
+ // 3. YouTube Channel (using YouTube Data API v3)
+ if (youtubeChannelId) {
+ const YOUTUBE_API_KEY = Deno.env.get("YOUTUBE_API_KEY");
+ if (YOUTUBE_API_KEY) {
+ tasks.push(
+ (async () => {
+ const YT_BASE = "https://www.googleapis.com/youtube/v3";
+
+ // Resolve handle/username to channel ID
+ let channelId = youtubeChannelId;
+ if (channelId.startsWith("@") || !channelId.startsWith("UC")) {
+ // Use forHandle for @handles, forUsername for legacy usernames
+ const param = channelId.startsWith("@") ? "forHandle" : "forUsername";
+ const handle = channelId.startsWith("@") ? channelId.slice(1) : channelId;
+ const lookupRes = await fetch(
+ `${YT_BASE}/channels?part=id&${param}=${handle}&key=${YOUTUBE_API_KEY}`
+ );
+ const lookupData = await lookupRes.json();
+ channelId = lookupData.items?.[0]?.id || "";
+ }
+
+ if (!channelId) return;
+
+ // Step 1: Get channel statistics & snippet (1 quota unit)
+ const channelRes = await fetch(
+ `${YT_BASE}/channels?part=snippet,statistics,brandingSettings&id=${channelId}&key=${YOUTUBE_API_KEY}`
+ );
+ const channelData = await channelRes.json();
+ const channel = channelData.items?.[0];
+
+ if (!channel) return;
+
+ const stats = channel.statistics || {};
+ const snippet = channel.snippet || {};
+
+ // Step 2: Get recent/popular videos (100 quota units)
+ const searchRes = await fetch(
+ `${YT_BASE}/search?part=snippet&channelId=${channelId}&order=viewCount&type=video&maxResults=10&key=${YOUTUBE_API_KEY}`
+ );
+ const searchData = await searchRes.json();
+ const videoIds = (searchData.items || [])
+ .map((item: Record) => (item.id as Record)?.videoId)
+ .filter(Boolean)
+ .join(",");
+
+ // Step 3: Get video details — views, likes, duration (1 quota unit)
+ let videos: Record[] = [];
+ if (videoIds) {
+ const videosRes = await fetch(
+ `${YT_BASE}/videos?part=snippet,statistics,contentDetails&id=${videoIds}&key=${YOUTUBE_API_KEY}`
+ );
+ const videosData = await videosRes.json();
+ videos = videosData.items || [];
+ }
+
+ enrichment.youtube = {
+ channelId,
+ channelName: snippet.title,
+ handle: snippet.customUrl || youtubeChannelId,
+ description: snippet.description?.slice(0, 500),
+ publishedAt: snippet.publishedAt,
+ thumbnailUrl: snippet.thumbnails?.default?.url,
+ subscribers: parseInt(stats.subscriberCount || "0", 10),
+ totalViews: parseInt(stats.viewCount || "0", 10),
+ totalVideos: parseInt(stats.videoCount || "0", 10),
+ videos: videos.slice(0, 10).map((v) => {
+ const vs = v.statistics as Record || {};
+ const vSnippet = v.snippet as Record || {};
+ const vContent = v.contentDetails as Record || {};
+ return {
+ title: vSnippet.title,
+ views: parseInt(vs.viewCount || "0", 10),
+ likes: parseInt(vs.likeCount || "0", 10),
+ comments: parseInt(vs.commentCount || "0", 10),
+ date: vSnippet.publishedAt,
+ duration: vContent.duration,
+ url: `https://www.youtube.com/watch?v=${(v.id as string)}`,
+ thumbnail: (vSnippet.thumbnails as Record>)?.medium?.url,
+ };
+ }),
+ };
+ })()
+ );
+ }
+ }
+
+ await Promise.allSettled(tasks);
+
+ // Save enrichment data to Supabase
+ const supabaseUrl = Deno.env.get("SUPABASE_URL")!;
+ const supabaseKey = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY")!;
+ const supabase = createClient(supabaseUrl, supabaseKey);
+
+ if (reportId) {
+ // Get existing report
+ const { data: existing } = await supabase
+ .from("marketing_reports")
+ .select("report")
+ .eq("id", reportId)
+ .single();
+
+ if (existing) {
+ const updatedReport = {
+ ...existing.report,
+ channelEnrichment: enrichment,
+ enrichedAt: new Date().toISOString(),
+ };
+
+ await supabase
+ .from("marketing_reports")
+ .update({ report: updatedReport, updated_at: new Date().toISOString() })
+ .eq("id", reportId);
+ }
+ }
+
+ return new Response(
+ JSON.stringify({
+ success: true,
+ data: enrichment,
+ enrichedAt: new Date().toISOString(),
+ }),
+ { headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ } catch (error) {
+ return new Response(
+ JSON.stringify({ success: false, error: error.message }),
+ { status: 500, headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ }
+});
diff --git a/supabase/functions/generate-report/.npmrc b/supabase/functions/generate-report/.npmrc
new file mode 100644
index 0000000..48c6388
--- /dev/null
+++ b/supabase/functions/generate-report/.npmrc
@@ -0,0 +1,3 @@
+# Configuration for private npm package dependencies
+# For more information on using private registries with Edge Functions, see:
+# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries
diff --git a/supabase/functions/generate-report/deno.json b/supabase/functions/generate-report/deno.json
new file mode 100644
index 0000000..758d070
--- /dev/null
+++ b/supabase/functions/generate-report/deno.json
@@ -0,0 +1,5 @@
+{
+ "imports": {
+ "@supabase/functions-js": "jsr:@supabase/functions-js@^2"
+ }
+}
diff --git a/supabase/functions/generate-report/index.ts b/supabase/functions/generate-report/index.ts
new file mode 100644
index 0000000..d4934b0
--- /dev/null
+++ b/supabase/functions/generate-report/index.ts
@@ -0,0 +1,205 @@
+import "@supabase/functions-js/edge-runtime.d.ts";
+import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
+
+const corsHeaders = {
+ "Access-Control-Allow-Origin": "*",
+ "Access-Control-Allow-Headers":
+ "authorization, x-client-info, apikey, content-type",
+};
+
+interface ReportRequest {
+ url: string;
+ clinicName?: string;
+}
+
+Deno.serve(async (req) => {
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ try {
+ const { url, clinicName } = (await req.json()) as ReportRequest;
+
+ if (!url) {
+ return new Response(
+ JSON.stringify({ error: "URL is required" }),
+ { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ }
+
+ const PERPLEXITY_API_KEY = Deno.env.get("PERPLEXITY_API_KEY");
+ if (!PERPLEXITY_API_KEY) {
+ throw new Error("PERPLEXITY_API_KEY not configured");
+ }
+
+ const supabaseUrl = Deno.env.get("SUPABASE_URL")!;
+ const supabaseKey = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY")!;
+
+ // Step 1: Call scrape-website function
+ const scrapeRes = await fetch(`${supabaseUrl}/functions/v1/scrape-website`, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${supabaseKey}`,
+ },
+ body: JSON.stringify({ url, clinicName }),
+ });
+ const scrapeResult = await scrapeRes.json();
+
+ if (!scrapeResult.success) {
+ throw new Error(`Scraping failed: ${scrapeResult.error}`);
+ }
+
+ const clinic = scrapeResult.data.clinic;
+ const resolvedName = clinicName || clinic.clinicName || url;
+ const services = clinic.services || [];
+ const address = clinic.address || "";
+
+ // Step 2: Call analyze-market function
+ const analyzeRes = await fetch(`${supabaseUrl}/functions/v1/analyze-market`, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${supabaseKey}`,
+ },
+ body: JSON.stringify({
+ clinicName: resolvedName,
+ services,
+ address,
+ scrapeData: scrapeResult.data,
+ }),
+ });
+ const analyzeResult = await analyzeRes.json();
+
+ // Step 3: Generate final report with Gemini
+ const reportPrompt = `
+당신은 프리미엄 의료 마케팅 전문 분석가입니다. 아래 데이터를 기반으로 종합 마케팅 인텔리전스 리포트를 생성해주세요.
+
+## 수집된 데이터
+
+### 병원 정보
+${JSON.stringify(scrapeResult.data, null, 2)}
+
+### 시장 분석
+${JSON.stringify(analyzeResult.data?.analysis || {}, null, 2)}
+
+## 리포트 형식 (반드시 아래 JSON 구조로 응답)
+
+{
+ "clinicInfo": {
+ "name": "병원명",
+ "address": "주소",
+ "phone": "전화번호",
+ "services": ["시술1", "시술2"],
+ "doctors": [{"name": "의사명", "specialty": "전문분야"}]
+ },
+ "executiveSummary": "경영진 요약 (3-5문장)",
+ "overallScore": 0-100,
+ "channelAnalysis": {
+ "naverBlog": { "score": 0-100, "status": "active|inactive|weak", "posts": 0, "recommendation": "추천사항" },
+ "instagram": { "score": 0-100, "status": "active|inactive|weak", "followers": 0, "recommendation": "추천사항" },
+ "youtube": { "score": 0-100, "status": "active|inactive|weak", "subscribers": 0, "recommendation": "추천사항" },
+ "naverPlace": { "score": 0-100, "rating": 0, "reviews": 0, "recommendation": "추천사항" },
+ "gangnamUnni": { "score": 0-100, "rating": 0, "reviews": 0, "recommendation": "추천사항" },
+ "website": { "score": 0-100, "issues": [], "recommendation": "추천사항" }
+ },
+ "competitors": [
+ { "name": "경쟁병원명", "strengths": ["강점1"], "weaknesses": ["약점1"], "marketingChannels": ["채널1"] }
+ ],
+ "keywords": {
+ "primary": [{"keyword": "키워드", "monthlySearches": 0, "competition": "high|medium|low"}],
+ "longTail": [{"keyword": "롱테일 키워드", "monthlySearches": 0}]
+ },
+ "targetAudience": {
+ "primary": { "ageRange": "25-35", "gender": "female", "interests": ["관심사1"], "channels": ["채널1"] },
+ "secondary": { "ageRange": "35-45", "gender": "female", "interests": ["관심사1"], "channels": ["채널1"] }
+ },
+ "recommendations": [
+ { "priority": "high|medium|low", "category": "카테고리", "title": "제목", "description": "설명", "expectedImpact": "기대 효과" }
+ ],
+ "marketTrends": ["트렌드1", "트렌드2"]
+}
+`;
+
+ const aiRes = await fetch("https://api.perplexity.ai/chat/completions", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${PERPLEXITY_API_KEY}`,
+ },
+ body: JSON.stringify({
+ model: "sonar",
+ messages: [
+ {
+ role: "system",
+ content: "You are a Korean medical marketing analyst. Respond ONLY with valid JSON, no markdown code blocks. Always respond in Korean for text fields.",
+ },
+ { role: "user", content: reportPrompt },
+ ],
+ temperature: 0.3,
+ }),
+ });
+
+ const aiData = await aiRes.json();
+ let reportText = aiData.choices?.[0]?.message?.content || "";
+ // Strip markdown code blocks if present
+ const jsonMatch = reportText.match(/```(?:json)?\n?([\s\S]*?)```/);
+ if (jsonMatch) reportText = jsonMatch[1];
+
+ let report;
+ try {
+ report = JSON.parse(reportText);
+ } catch {
+ report = { raw: reportText, parseError: true };
+ }
+
+ // Save to Supabase
+ const supabase = createClient(supabaseUrl, supabaseKey);
+ const { data: saved, error: saveError } = await supabase
+ .from("marketing_reports")
+ .insert({
+ url,
+ clinic_name: resolvedName,
+ report,
+ scrape_data: scrapeResult.data,
+ analysis_data: analyzeResult.data,
+ })
+ .select("id")
+ .single();
+
+ // Extract social handles from scrape data for frontend enrichment
+ const socialMedia = clinic.socialMedia || {};
+
+ return new Response(
+ JSON.stringify({
+ success: true,
+ reportId: saved?.id || null,
+ report,
+ metadata: {
+ url,
+ clinicName: resolvedName,
+ generatedAt: new Date().toISOString(),
+ dataSources: {
+ scraping: scrapeResult.success,
+ marketAnalysis: analyzeResult.success,
+ aiGeneration: !report.parseError,
+ },
+ socialHandles: {
+ instagram: socialMedia.instagram || null,
+ youtube: socialMedia.youtube || null,
+ facebook: socialMedia.facebook || null,
+ blog: socialMedia.blog || null,
+ },
+ address,
+ services,
+ },
+ }),
+ { headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ } catch (error) {
+ return new Response(
+ JSON.stringify({ success: false, error: error.message }),
+ { status: 500, headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ }
+});
diff --git a/supabase/functions/scrape-website/.npmrc b/supabase/functions/scrape-website/.npmrc
new file mode 100644
index 0000000..48c6388
--- /dev/null
+++ b/supabase/functions/scrape-website/.npmrc
@@ -0,0 +1,3 @@
+# Configuration for private npm package dependencies
+# For more information on using private registries with Edge Functions, see:
+# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries
diff --git a/supabase/functions/scrape-website/deno.json b/supabase/functions/scrape-website/deno.json
new file mode 100644
index 0000000..758d070
--- /dev/null
+++ b/supabase/functions/scrape-website/deno.json
@@ -0,0 +1,5 @@
+{
+ "imports": {
+ "@supabase/functions-js": "jsr:@supabase/functions-js@^2"
+ }
+}
diff --git a/supabase/functions/scrape-website/index.ts b/supabase/functions/scrape-website/index.ts
new file mode 100644
index 0000000..5c00433
--- /dev/null
+++ b/supabase/functions/scrape-website/index.ts
@@ -0,0 +1,160 @@
+import "@supabase/functions-js/edge-runtime.d.ts";
+import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
+
+const corsHeaders = {
+ "Access-Control-Allow-Origin": "*",
+ "Access-Control-Allow-Headers":
+ "authorization, x-client-info, apikey, content-type",
+};
+
+interface ScrapeRequest {
+ url: string;
+ clinicName?: string;
+}
+
+Deno.serve(async (req) => {
+ // Handle CORS preflight
+ if (req.method === "OPTIONS") {
+ return new Response("ok", { headers: corsHeaders });
+ }
+
+ try {
+ const { url, clinicName } = (await req.json()) as ScrapeRequest;
+
+ if (!url) {
+ return new Response(
+ JSON.stringify({ error: "URL is required" }),
+ { status: 400, headers: { ...corsHeaders, "Content-Type": "application/json" } }
+ );
+ }
+
+ const FIRECRAWL_API_KEY = Deno.env.get("FIRECRAWL_API_KEY");
+ if (!FIRECRAWL_API_KEY) {
+ throw new Error("FIRECRAWL_API_KEY not configured");
+ }
+
+ // Step 1: Scrape the main website
+ const scrapeResponse = await fetch("https://api.firecrawl.dev/v1/scrape", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${FIRECRAWL_API_KEY}`,
+ },
+ body: JSON.stringify({
+ url,
+ formats: ["json", "links"],
+ jsonOptions: {
+ prompt:
+ "Extract clinic information: clinic name, address, phone number, services offered, doctors with specialties, social media links (instagram, youtube, blog, facebook), business hours, and any marketing-related content like slogans or key messages",
+ schema: {
+ type: "object",
+ properties: {
+ clinicName: { type: "string" },
+ address: { type: "string" },
+ phone: { type: "string" },
+ businessHours: { type: "string" },
+ slogan: { type: "string" },
+ services: {
+ type: "array",
+ items: { type: "string" },
+ },
+ doctors: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ name: { type: "string" },
+ title: { type: "string" },
+ specialty: { type: "string" },
+ },
+ },
+ },
+ socialMedia: {
+ type: "object",
+ properties: {
+ instagram: { type: "string" },
+ youtube: { type: "string" },
+ blog: { type: "string" },
+ facebook: { type: "string" },
+ },
+ },
+ },
+ },
+ },
+ waitFor: 5000,
+ }),
+ });
+
+ const scrapeData = await scrapeResponse.json();
+
+ if (!scrapeData.success) {
+ throw new Error(scrapeData.error || "Scraping failed");
+ }
+
+ // Step 2: Map the site to discover all pages
+ const mapResponse = await fetch("https://api.firecrawl.dev/v1/map", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${FIRECRAWL_API_KEY}`,
+ },
+ body: JSON.stringify({
+ url,
+ limit: 50,
+ }),
+ });
+
+ const mapData = await mapResponse.json();
+
+ // Step 3: Search for reviews and ratings
+ const searchName = clinicName || scrapeData.data?.json?.clinicName || url;
+ const searchResponse = await fetch("https://api.firecrawl.dev/v1/search", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${FIRECRAWL_API_KEY}`,
+ },
+ body: JSON.stringify({
+ query: `${searchName} 리뷰 평점 후기 강남언니 바비톡`,
+ limit: 5,
+ }),
+ });
+
+ const searchData = await searchResponse.json();
+
+ // Combine all data
+ const result = {
+ clinic: scrapeData.data?.json || {},
+ siteLinks: scrapeData.data?.links || [],
+ siteMap: mapData.success ? mapData.links || [] : [],
+ reviews: searchData.data || [],
+ scrapedAt: new Date().toISOString(),
+ sourceUrl: url,
+ };
+
+ // Save to Supabase if configured
+ const supabaseUrl = Deno.env.get("SUPABASE_URL");
+ const supabaseKey = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY");
+
+ if (supabaseUrl && supabaseKey) {
+ const supabase = createClient(supabaseUrl, supabaseKey);
+ await supabase.from("scrape_results").insert({
+ url,
+ clinic_name: result.clinic.clinicName || searchName,
+ data: result,
+ });
+ }
+
+ return new Response(JSON.stringify({ success: true, data: result }), {
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ });
+ } catch (error) {
+ return new Response(
+ JSON.stringify({ success: false, error: error.message }),
+ {
+ status: 500,
+ headers: { ...corsHeaders, "Content-Type": "application/json" },
+ }
+ );
+ }
+});
diff --git a/supabase/migrations/20260330_create_tables.sql b/supabase/migrations/20260330_create_tables.sql
new file mode 100644
index 0000000..69e276c
--- /dev/null
+++ b/supabase/migrations/20260330_create_tables.sql
@@ -0,0 +1,40 @@
+-- Scrape results cache
+CREATE TABLE IF NOT EXISTS scrape_results (
+ id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
+ url TEXT NOT NULL,
+ clinic_name TEXT,
+ data JSONB NOT NULL DEFAULT '{}',
+ created_at TIMESTAMPTZ DEFAULT NOW()
+);
+
+-- Marketing intelligence reports
+CREATE TABLE IF NOT EXISTS marketing_reports (
+ id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
+ url TEXT NOT NULL,
+ clinic_name TEXT,
+ report JSONB NOT NULL DEFAULT '{}',
+ scrape_data JSONB DEFAULT '{}',
+ analysis_data JSONB DEFAULT '{}',
+ created_at TIMESTAMPTZ DEFAULT NOW(),
+ updated_at TIMESTAMPTZ DEFAULT NOW()
+);
+
+-- Enable RLS
+ALTER TABLE scrape_results ENABLE ROW LEVEL SECURITY;
+ALTER TABLE marketing_reports ENABLE ROW LEVEL SECURITY;
+
+-- Service role can do everything (Edge Functions use service_role key)
+CREATE POLICY "service_role_all_scrape" ON scrape_results
+ FOR ALL USING (auth.role() = 'service_role');
+
+CREATE POLICY "service_role_all_reports" ON marketing_reports
+ FOR ALL USING (auth.role() = 'service_role');
+
+-- Anon users can read their own reports (future: add user_id column)
+CREATE POLICY "anon_read_reports" ON marketing_reports
+ FOR SELECT USING (true);
+
+-- Index for faster lookups
+CREATE INDEX idx_scrape_results_url ON scrape_results(url);
+CREATE INDEX idx_marketing_reports_url ON marketing_reports(url);
+CREATE INDEX idx_marketing_reports_created ON marketing_reports(created_at DESC);