can't insert foreign keys with sqlite3 after scraping with bs4

79 Views Asked by At

I'm scraping data from google scholar and I want to store it into a db, I can do it with the first 2 tables, but I can't insert the foreign keys into the third one, I think the tables are good, because I create them just fine with no problems, the problem appears when I try to insert the data there... I also want to know how can I insert a paper only one time into the table "Publicacion", because one paper can be in the profile of others authors but in that table I just need it once

chrome_path = r"chromedriver.exe"
driver = webdriver.Chrome(chrome_path)
wait = W(driver, 1)
papers = []
try:
    conn = sqlite3.connect('scholar.db')

except:
    print("No connection")
    sys.exit(0)

conn.execute("PRAGMA foreign_keys = 1")
cur=conn.cursor()

cur.execute("""CREATE TABLE IF NOT EXISTS Autor (
                                 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
                                 Nombre_Autor TEXT NOT NULL,
                                 Titulo_Paper TEXT NOT NULL,
                                 Citado_Por TEXT NOT NULL,
                                 Año TEXT NOT NULL,
                                 Id_Paper TEXT NOT NULL,
                                 Link_Paper TEXT NOT NULL
                                 )""")
cur.execute("""CREATE TABLE IF NOT EXISTS Publicacion (
                                 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
                                 Nombre_Autor TEXT NOT NULL,
                                 Titulo_Paper TEXT NOT NULL,      
                                 Id_Paper TEXT NOT NULL
                                 )""")

cur.execute("""CREATE TABLE IF NOT EXISTS Autor_Publicacion(
                                A_id INTEGER,
                                P_id INTEGER,
                                FOREIGN KEY(A_id) REFERENCES Autor(id),
                                FOREIGN KEY(P_id) REFERENCES Publicacion(id)
                                )""")
conn.commit()
# https://scholar.google.com/citations?hl=en&user=2BTxUj4AAAAJ
urls = []
with open(r'perfil.csv', 'r') as f:
    for line in f:
        urls.append(line)

for url in urls:
    driver.get(url)
    more = driver.find_element_by_class_name('gs_btnPD')
    for _ in range(0, 5):
        ActionChains(driver).click(more).perform()
        time.sleep(1)

    while True:
        soup = BeautifulSoup(driver.page_source, 'html.parser')
        citation_indices = soup.find('table', attrs={'id': 'gsc_rsb_st'})
        research_article = soup.find_all('tr', {'class': 'gsc_a_tr'})
        author_details = soup.find('div', {'id': 'gsc_prf_i'})
        time.sleep(3)
        try:
            for i, research in enumerate(research_article, 1):
              
                name = author_details.find('div', attrs={'id': 'gsc_prf_in'}).text
                
                pub_details = research.find('td', attrs={'class': 'gsc_a_t'})
                pub_ref = pub_details.a['href']
                pub_meta = pub_details.find_all('div')
                title = pub_details.a.text
                
                cited_by = research.find('a', attrs={'class': 'gsc_a_ac'}).text or ''
                year = research.find('span', attrs={'class': 'gsc_a_h'}).text or ''
                idpaper = research.find('a', attrs={'class': 'gsc_a_at'})
                d = idpaper.get('data-href')
                linkpaper = urllib.parse.urljoin("https://scholar.google.com", d)
                parsed = urllib.parse.urlparse(d)
                mydata = ([name,  title, cited_by, year, parsed.query, linkpaper])

                mydata2 = ([name, title, parsed.query])
                papers.append(mydata)
                papers.append(mydata2)
                cur.executemany("""INSERT INTO Autor (Nombre_Autor, Titulo_Paper,
                                              Citado_Por, Año, Id_Paper, Link_Paper) VALUES (?, ?, ?, ?, 
                                            ?, ?)""", [mydata])
                A_id = cur.lastrowid

                cur.executemany("""INSERT INTO Publicacion (Nombre_Autor, Titulo_Paper, Id_Paper) VALUES (?, ?, ?)""", [mydata2])
                P_id = cur.lastrowid
                cur.executemany("""INSERT INTO Autor_Publicacion (A_id, P_id) VALUES(?, ?)""", [A_id, P_id])
                conn.commit()
        except:
            pass

        if len(research_article) != 100:
            print(f'Page {url} scraped')
            break 
0

There are 0 best solutions below