| from qwikidata.entity import WikidataItem | |
| from qwikidata.json_dump import WikidataJsonDump | |
| import pyarrow as pa | |
| import pyarrow.parquet as pq | |
| import pandas as pd | |
| # create an instance of WikidataJsonDump | |
| wjd_dump_path = "wikidata-20240304-all.json.bz2" | |
| wjd = WikidataJsonDump(wjd_dump_path) | |
| # Create an empty list to store the dictionaries | |
| data = [] | |
| # # Iterate over the entities in wjd and add them to the list | |
| for ii, entity_dict in enumerate(wjd): | |
| if ii > 1000: | |
| break | |
| if entity_dict["type"] == "item": | |
| data.append(entity_dict) | |
| # Create a Parquet schema for the [Wikidata Snak Format](https://doc.wikimedia.org/Wikibase/master/php/docs_topics_json.html#json_snaks) | |
| # { | |
| # "snaktype": "value", | |
| # "property": "P17", | |
| # "datatype": "wikibase-item", | |
| # "datavalue": { | |
| # "value": { | |
| # "entity-type": "item", | |
| # "id": "Q30", | |
| # "numeric-id": 30 | |
| # }, | |
| # "type": "wikibase-entityid" | |
| # } | |
| snak = pa.struct([ | |
| ("snaktype", pa.string()), | |
| ("property", pa.string()), | |
| ("datatype", pa.string()), | |
| ("datavalue", pa.struct([ | |
| ("value", pa.struct([ | |
| ("entity-type", pa.string()), | |
| ("id", pa.string()), | |
| ("numeric-id", pa.int64()) | |
| ])), | |
| ("type", pa.string()) | |
| ])) | |
| ]) | |
| # TODO: Schema for Data Set | |
| # Based on the [Wikidata JSON Format Docs](https://doc.wikimedia.org/Wikibase/master/php/docs_topics_json.html) | |
| # Create a schema for the table | |
| # { | |
| # "id": "Q60", | |
| # "type": "item", | |
| # "labels": {}, | |
| # "descriptions": {}, | |
| # "aliases": {}, | |
| # "claims": {}, | |
| # "sitelinks": {}, | |
| # "lastrevid": 195301613, | |
| # "modified": "2020-02-10T12:42:02Z" | |
| #} | |
| schema = pa.schema([ | |
| ("id", pa.string()), | |
| ("type", pa.string()), | |
| # { | |
| # "labels": { | |
| # "en": { | |
| # "language": "en", | |
| # "value": "New York City" | |
| # }, | |
| # "ar": { | |
| # "language": "ar", | |
| # "value": "\u0645\u062f\u064a\u0646\u0629 \u0646\u064a\u0648 \u064a\u0648\u0631\u0643" | |
| # } | |
| # } | |
| ("labels", pa.map_(pa.string(), pa.struct([ | |
| ("language", pa.string()), | |
| ("value", pa.string()) | |
| ]))), | |
| # "descriptions": { | |
| # "en": { | |
| # "language": "en", | |
| # "value": "largest city in New York and the United States of America" | |
| # }, | |
| # "it": { | |
| # "language": "it", | |
| # "value": "citt\u00e0 degli Stati Uniti d'America" | |
| # } | |
| # } | |
| ("descriptions", pa.map_(pa.string(), pa.struct([ | |
| ("language", pa.string()), | |
| ("value", pa.string()) | |
| ]))), | |
| # "aliases": { | |
| # "en": [ | |
| # { | |
| # "language": "en",pa.string | |
| # "value": "New York" | |
| # } | |
| # ], | |
| # "fr": [ | |
| # { | |
| # "language": "fr", | |
| # "value": "New York City" | |
| # }, | |
| # { | |
| # "language": "fr", | |
| # "value": "NYC" | |
| # }, | |
| # { | |
| # "language": "fr", | |
| # "value": "The City" | |
| # }, | |
| # { | |
| # "language": "fr", | |
| # "value": "La grosse pomme" | |
| # } | |
| # ] | |
| # } | |
| # } | |
| ("aliases", pa.map_(pa.string(), pa.list_(pa.struct([ | |
| ("language", pa.string()), | |
| ("value", pa.string()) | |
| ])))), | |
| # { | |
| # "claims": { | |
| # "P17": [ | |
| # { | |
| # "id": "q60$5083E43C-228B-4E3E-B82A-4CB20A22A3FB", | |
| # "mainsnak": {}, | |
| # "type": "statement", | |
| # "rank": "normal", | |
| # "qualifiers": { | |
| # "P580": [], | |
| # "P5436": [] | |
| # }, | |
| # "references": [ | |
| # { | |
| # "hash": "d103e3541cc531fa54adcaffebde6bef28d87d32", | |
| # "snaks": [] | |
| # } | |
| # ] | |
| # } | |
| # ] | |
| # } | |
| # } | |
| ("claims", pa.map_(pa.string(), pa.list_(snak))), | |
| ("sitelinks", pa.struct([ | |
| ("site", pa.string()), | |
| ("title", pa.string()) | |
| ])), | |
| ("lastrevid", pa.int64()), | |
| ("modified", pa.string()) | |
| ]) | |
| # Create a table from the list of dictionaries and the schema | |
| table = pa.Table.from_pandas(pd.DataFrame(data), schema=schema) | |
| # table = pa.Table.from_pandas(pd.DataFrame(wjd)) | |
| # Write the table to disk as parquet | |
| parquet_path = "wikidata-20240304-all.parquet" | |
| pq.write_table(table, parquet_path) | |