o2o-castad-backend/poc/crawling/nvMapScraper.py

119 lines
5.0 KiB
Python

import re
import aiohttp
import json
import asyncio
import bs4
PLACE_PATTERN = r"/place/(\d+)"
GRAPHQL_URL = "https://pcmap-api.place.naver.com/graphql"
NAVER_COOKIES="NAC=mQ7mBownbQf4A; NNB=TQPII6AKDBFGQ; PLACE_LANGUAGE=ko; NACT=1; nid_inf=1431570813; NID_AUT=k2T7FraXOdIMRCHzEZIFtHQup+I7b87M5fd7+p65AXZTdGB/gelRmW8s/Q4oDxm8; tooltipDisplayed=true; SRT30=1762660151; NID_SES=AAAB1Lpy3y3hGzuPbJpJl8vvFx18C+HXXuZEFou/YPgocHe7k2/5MpFlgE48X1JF7c7IPoU2khZKkkuLx+tsvWAzOf0TnG/G8RrBGeawnSluSJcKcTdKKRJ4cygKc/OabVxoc3TNZJWxer3vFtXBoXkDS5querVNS6wvcMhA/p4vkPKOeepwKLR+1IJERlQJWZw4q29IdAysrbBNn3Akf9mDA5eTYvMDLYyRkToRh10TVMW/yhyNQeMXlIdnR8U1ZCNqe/9ErYdos5gQDstswEJQQA0T2cHFGJOtmlYMPlnhWado5w521iZXGJyKcA9ZawizM/i5nK5xNYtPGS3cvImUYl6B5ulIipUJSqpj8v2XstK0TZlOGxHToXaVDrCNmSfCA9vFYbTb6xJHB2JRAT3Jik/z6QgLjJLBWRnsucMDqldxoiEDAUHEhY3pjgZ89quR3c3hwAuTlI9hBn5I3e5VQR0Y/GxoS9mIkMF8pJmcGneqnE0BNIt91RN6Se5rDM69B+JWppBXtSir1JGuXADaRLLMP8VlxJX949iH0UYTKWKsrD4OgNNK5aUx24nAH494WPknBMlx4fCMIeWzy7K3sEZkNUn/+A+eHraqIFfbGpveSCNM+8EqEjMgA+YRgg3eig==; _naver_usersession_=Kkgzim/64JicPJzgkIIvqQ==; page_uid=jesTPsqVWUZssE4qJeossssssD0-011300; SRT5=1762662010; BUC=z5Fu3sAYtFwpbRDrrDFYdn4AgK5hNkOqX-DdaLU7VJM="
OVERVIEW_QUERY = '''
query getAccommodation($id: String!, $deviceType: String) {
business: placeDetail(input: {id: $id, isNx: true, deviceType: $deviceType}) {
base {
id
name
category
roadAddress
address
phone
virtualPhone
microReviews
conveniences
visitorReviewsTotal
}
images { images { origin url } }
cpImages(source: [ugcImage]) { images { origin url } }
}
}'''
REQUEST_HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
"Referer": "https://map.naver.com/",
"Origin": "https://map.naver.com",
"Content-Type": "application/json",
"Cookie": NAVER_COOKIES
}
class GraphQLException(Exception):
pass
class nvMapScraper():
url : str = None
scrap_type : str = None
rawdata : dict = None
image_link_list : list[str] = None
base_info : dict = None
def __init__(self, url):
self.url = url
async def parse_url(self):
if 'place' not in self.url:
if 'naver.me' in self.url:
async with aiohttp.ClientSession() as session:
async with session.get(self.url) as response:
self.url = str(response.url)
else:
raise GraphQLException("this shorten url not have place id")
try:
place_id = re.search(PLACE_PATTERN, self.url)[1]
except Exception as E:
raise GraphQLException("Cannot find place id")
return place_id
async def scrap(self):
try:
place_id = await self.parse_url()
data = await self.call_get_accomodation(place_id)
self.rawdata = data
fac_data = await self.get_facility_string(place_id)
self.rawdata['facilities'] = fac_data
self.image_link_list = [nv_image['origin'] for nv_image in data['data']['business']['images']['images']]
self.base_info = data['data']['business']['base']
self.facility_info = fac_data
self.scrap_type = "GraphQL"
except GraphQLException as G:
print (G)
print("fallback")
self.scrap_type = "Playwright"
pass # 나중에 pw 이용한 crawling으로 fallback 추가
return
async def call_get_accomodation(self, place_id):
payload = {
"operationName" : "getAccommodation",
"variables": { "id": place_id, "deviceType": "pc" },
"query": OVERVIEW_QUERY,
}
json_payload = json.dumps(payload)
async with aiohttp.ClientSession() as session:
async with session.post(GRAPHQL_URL, data=json_payload, headers=REQUEST_HEADERS) as response:
response.encoding = 'utf-8'
if response.status == 200: # 요청 성공
return await response.json() # await 주의
else: # 요청 실패
print('실패 상태 코드:', response.status)
print(response.text)
raise Exception()
async def get_facility_string(self, place_id):
url = f"https://pcmap.place.naver.com/accommodation/{place_id}/home"
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=REQUEST_HEADERS) as response:
soup = bs4.BeautifulSoup(await response.read(), 'html.parser')
c_elem = soup.find('span', 'place_blind', string='편의')
facilities = c_elem.parent.parent.find('div').string
return facilities
# url = "https://naver.me/IgJGCCic"
# scraper = nvMapScraper(url)
# asyncio.run(scraper.scrap())
# print(scraper.image_link_list)
# print(len(scraper.image_link_list))