2025-09-17 17:05:27 +02:00
|
|
|
from time import sleep
|
|
|
|
|
import SPARQLWrapper
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BASE_URL = "https://dbpedia.org/sparql"
|
|
|
|
|
TYPE = SPARQLWrapper.CSV
|
2025-09-18 12:03:09 +02:00
|
|
|
TIMEOUT_SECONDS = 1.5
|
|
|
|
|
LIMIT = int(1E4)
|
2025-09-17 17:05:27 +02:00
|
|
|
OFFSET = LIMIT
|
|
|
|
|
INITIAL_OFFSET = 0
|
2025-09-18 12:03:09 +02:00
|
|
|
MAX_PAGES = int(1E9)
|
2025-09-17 17:05:27 +02:00
|
|
|
|
|
|
|
|
FILE_URI = "./Assets/Dataset/1-hop/dataset.csv"
|
|
|
|
|
|
|
|
|
|
QUERY = """
|
|
|
|
|
PREFIX dbo: <http://dbpedia.org/ontology/>
|
|
|
|
|
PREFIX dbp: <http://dbpedia.org/property/>
|
|
|
|
|
PREFIX dbr: <http://dbpedia.org/resource/>
|
2025-09-18 12:03:09 +02:00
|
|
|
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
|
2025-09-17 17:05:27 +02:00
|
|
|
|
|
|
|
|
SELECT ?subject, ?relationship, ?object
|
|
|
|
|
WHERE {
|
|
|
|
|
?subject ?relationship ?object .
|
2025-09-18 12:03:09 +02:00
|
|
|
?subject rdf:type dbo:Film .
|
|
|
|
|
?a foaf:primaryTopic ?subject
|
|
|
|
|
FILTER (?relationship NOT IN (
|
|
|
|
|
dbo:wikiPageRedirects,
|
|
|
|
|
dbo:wikiPageExternalLink,
|
|
|
|
|
dbo:wikiPageWikiLink
|
|
|
|
|
))
|
2025-09-17 17:05:27 +02:00
|
|
|
}"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
|
2025-09-18 12:03:09 +02:00
|
|
|
exit = False
|
|
|
|
|
page = INITIAL_OFFSET
|
|
|
|
|
|
|
|
|
|
while not exit:
|
2025-09-17 17:05:27 +02:00
|
|
|
|
|
|
|
|
print(f"Starting to get page {page}")
|
|
|
|
|
|
|
|
|
|
CURRENT_OFFSET = OFFSET * page
|
|
|
|
|
sparql = SPARQLWrapper.SPARQLWrapper(BASE_URL)
|
|
|
|
|
|
|
|
|
|
sparql.setReturnFormat(TYPE)
|
|
|
|
|
|
|
|
|
|
CURRENT_PAGE_QUERY = "\n".join([
|
|
|
|
|
QUERY,
|
|
|
|
|
f"LIMIT {LIMIT}",
|
|
|
|
|
f"OFFSET {CURRENT_OFFSET}"
|
|
|
|
|
])
|
|
|
|
|
|
|
|
|
|
print(f"\nCurrent Query:\n{CURRENT_PAGE_QUERY}\n")
|
|
|
|
|
|
|
|
|
|
sparql.setQuery(CURRENT_PAGE_QUERY)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
res = sparql.queryAndConvert()
|
|
|
|
|
text = ""
|
|
|
|
|
|
|
|
|
|
if type(res) == bytes:
|
|
|
|
|
|
|
|
|
|
initial_offset = 0
|
|
|
|
|
|
|
|
|
|
if page != 0:
|
|
|
|
|
initial_offset = 1
|
|
|
|
|
|
|
|
|
|
lines = res.decode("utf-8", "ignore").split("\n")
|
|
|
|
|
text = "\n".join(lines[initial_offset:])
|
|
|
|
|
|
2025-09-18 12:03:09 +02:00
|
|
|
if text == "":
|
|
|
|
|
exit = True
|
|
|
|
|
continue
|
|
|
|
|
|
2025-09-17 17:05:27 +02:00
|
|
|
with open(FILE_URI, "a+", encoding="utf-8") as dataset:
|
|
|
|
|
|
|
|
|
|
print(f"Writing page {page} on {FILE_URI}")
|
|
|
|
|
dataset.write(
|
|
|
|
|
text
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
except Exception as ex:
|
|
|
|
|
print(f"Something went wrong during page {page}:\n\t{ex}")
|
|
|
|
|
|
|
|
|
|
print(f"Sleeping for {TIMEOUT_SECONDS}")
|
2025-09-18 12:03:09 +02:00
|
|
|
|
|
|
|
|
page += 1
|
|
|
|
|
|
|
|
|
|
if page == MAX_PAGES - 1:
|
|
|
|
|
exit = True
|
|
|
|
|
|
2025-09-17 17:05:27 +02:00
|
|
|
sleep(TIMEOUT_SECONDS)
|
|
|
|
|
|
|
|
|
|
main()
|