WDQS data loading queries in case of disaster
This commit is contained in:
parent
92d1ad30cc
commit
2d33b4859e
29
commandswdqsdataloading.txt
Normal file
29
commandswdqsdataloading.txt
Normal file
@ -0,0 +1,29 @@
|
||||
HOW TO MANUALLY PUT DATA FROM WIKIBASE INTO WQDS
|
||||
|
||||
# dump the data:
|
||||
|
||||
docker exec wikibase-docker_wikibase_1 php ./extensions/Wikibase/repo/maintenance/dumpRdf.php --server http://daap.bannerrepeater.org --output /tmp/rdfOutput
|
||||
|
||||
|
||||
# copy from container to container
|
||||
|
||||
docker cp wikibase-docker_wikibase_1:/tmp/rdfOutput /root/rdf_dump_[date]
|
||||
|
||||
docker cp /root/rdf_dump_15052020 wikibase-docker_wdqs_1:/tmp/rdf_dump_[date]
|
||||
|
||||
|
||||
# jump into the container
|
||||
|
||||
docker exec -it wikibase-docker_wdqs_1 bash
|
||||
|
||||
|
||||
# import with munge using directly the java executable
|
||||
|
||||
java -cp lib/wikidata-query-tools-*-jar-with-dependencies.jar org.wikidata.query.rdf.tool.Munge --from /tmp/rdf_dump_[date] --to /tmp/mungeOut/wikidump-%09d.ttl.gz --skipSiteLinks --chunkSize 100000 -w daap.bannerrepeater.org
|
||||
|
||||
|
||||
# load the data into the wdqs
|
||||
|
||||
docker exec wikibase-docker_wdqs_1 ./loadData.sh -n wdq -d /tmp/mungeOut
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user