The problem i am facing is that Scrapy code, specifically pipeline presents me with a Programming error
mysql.connector.errors.ProgrammingError: Not all parameters were used in the SQL statement'
This is my code for the pipeline:
import csv
from scrapy.exceptions import DropItem
from scrapy import log
import sys
import mysql.connector
class CsvWriterPipeline(object):
def __init__(self):
self.connection = mysql.connector.connect(host='localhost', user='test', password='test', db='test')
self.cursor = self.connection.cursor()
def process_item(self, item, spider):
self.cursor.execute("SELECT title, url FROM items WHERE title= %s", item['title'])
result = self.cursor.fetchone()
if result:
log.msg("Item already in database: %s" % item, level=log.DEBUG)
else:
self.cursor.execute(
"INSERT INTO items (title, url) VALUES (%s, %s)",
(item['title'][0], item['link'][0]))
self.connection.commit()
log.msg("Item stored : " % item, level=log.DEBUG)
return item
def handle_error(self, e):
log.err(e)
It gives me this exact error when i run the spider. http://hastebin.com/xakotugaha.py
As u can see, it clearly crawls so i doubt anything wrong with the spider.
I am currently using Scrapy web crawler with MySql database. Thanks for your help.