본문 바로가기

데이터 분석

데이터 분석 - [네이버 API]

# 강남역 맛집 검색 시 나오는 블로그 리스트들을 csv파일로 저장하기.

 

# 네이버 검색 API예제는 블로그를 비롯 전문자료까지 호출방법이 동일하므로 blog검색만 대표로 예제를 올렸습니다.
# 네이버 검색 Open API 예제 - 블로그 검색
import os
import sys
import urllib.request
import csv
import json
client_id = "YOUR_CLIENT_ID"
client_secret = "YOUR_CLIENT_SECRET"
encText = urllib.parse.quote("강남역 맛집")
url = "https://openapi.naver.com/v1/search/blog?query=" + encText # json 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # xml 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
    response_body = response.read()
    # print(response_body.decode("utf-8"))
    dic = response_body.decode("utf-8")
    dic = json.loads(dic)
    lis = dic["items"]
    
    header = ["제목", "URL", "등록일", "설명"]
    with open("naver.csv", "w", encoding="utf-8") as s_file:
        writer = csv.writer(s_file, delimiter=",", quotechar="'", quoting=csv.QUOTE_ALL)
        writer.writerow(header)
        for row in lis:
            print(row)
            list_row = []
            list_row.append(row['title'])
            list_row.append(row['link'])
            list_row.append(row['postdate'])
            list_row.append(row['description'])
            writer.writerow(list_row)
        
else:
    print("Error Code:" + rescode)

반응형