diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3204261..d94289a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.4.8 + rev: v0.6.3 hooks: # Run the linter. - id: ruff diff --git a/example/example_ntv_numpy.ipynb b/example/example_ntv_numpy.ipynb index 46244c5..159cac5 100644 --- a/example/example_ntv_numpy.ipynb +++ b/example/example_ntv_numpy.ipynb @@ -142,7 +142,15 @@ "source": [ "from base64 import b64encode\n", "from IPython.display import Image, display\n", - "display(Image(url=\"https://mermaid.ink/img/\" + b64encode(open('struc.mmd', 'r', encoding=\"utf-8\").read().encode(\"ascii\")).decode(\"ascii\")))" + "\n", + "display(\n", + " Image(\n", + " url=\"https://mermaid.ink/img/\"\n", + " + b64encode(\n", + " open(\"struc.mmd\", \"r\", encoding=\"utf-8\").read().encode(\"ascii\")\n", + " ).decode(\"ascii\")\n", + " )\n", + ")" ] }, { @@ -235,25 +243,22 @@ "outputs": [], "source": [ "example = {\n", - " 'example:xdataset': {\n", - " 'var1:float[kg]': [['x', 'y'], [2, 2], [10.1, 0.4, 3.4, 8.2]],\n", - " 'var1.variance': [[2, 2], [0.1, 0.2, 0.3, 0.4]],\n", - " 'var1.mask1': [['x'], [True, False]],\n", - " 'var1.mask2': [[2, 2], [True, False, False, True]],\n", - " \n", - " 'var2': [['x', 'y'], 'var2.ntv'], \n", - " \n", - " 'x:string': [{'test': 21}, ['23F0AE', '578B98']],\n", - " 'y:date': [['2021-01-01', '2022-02-02']],\n", - " \n", - " 'ranking:month': [['var1'], [2, 2], [1, 2, 3, 4]],\n", - " 'z:float': [['x'], [10, 20]],\n", - " 'z.uncertainty': [[0.1, 0.2]],\n", - " \n", - " 'z_bis': [['z1_bis', 'z2_bis']],\n", - " \n", - " 'info': {'path': 'https://github.com/loco-philippe/ntv-numpy/tree/main/example/'},\n", - " 'location:string': [['paris']]\n", + " \"example:xdataset\": {\n", + " \"var1:float[kg]\": [[\"x\", \"y\"], [2, 2], [10.1, 0.4, 3.4, 8.2]],\n", + " \"var1.variance\": [[2, 2], [0.1, 0.2, 0.3, 0.4]],\n", + " \"var1.mask1\": [[\"x\"], [True, False]],\n", + " \"var1.mask2\": [[2, 2], [True, False, False, True]],\n", + " \"var2\": [[\"x\", \"y\"], \"var2.ntv\"],\n", + " \"x:string\": [{\"test\": 21}, [\"23F0AE\", \"578B98\"]],\n", + " \"y:date\": [[\"2021-01-01\", \"2022-02-02\"]],\n", + " \"ranking:month\": [[\"var1\"], [2, 2], [1, 2, 3, 4]],\n", + " \"z:float\": [[\"x\"], [10, 20]],\n", + " \"z.uncertainty\": [[0.1, 0.2]],\n", + " \"z_bis\": [[\"z1_bis\", \"z2_bis\"]],\n", + " \"info\": {\n", + " \"path\": \"https://github.com/loco-philippe/ntv-numpy/tree/main/example/\"\n", + " },\n", + " \"location:string\": [[\"paris\"]],\n", " }\n", "}" ] @@ -319,10 +324,9 @@ ], "source": [ "from ntv_numpy import Xdataset\n", - "import ntv_pandas as npd\n", "\n", "x_example = Xdataset.read_json(example)\n", - "x_example.info['structure']" + "x_example.info[\"structure\"]" ] }, { @@ -387,7 +391,14 @@ } ], "source": [ - "display(Image(url=\"https://mermaid.ink/img/\" + b64encode(open('interop.mmd', 'r', encoding=\"utf-8\").read().encode(\"ascii\")).decode(\"ascii\")))" + "display(\n", + " Image(\n", + " url=\"https://mermaid.ink/img/\"\n", + " + b64encode(\n", + " open(\"interop.mmd\", \"r\", encoding=\"utf-8\").read().encode(\"ascii\")\n", + " ).decode(\"ascii\")\n", + " )\n", + ")" ] }, { @@ -1305,7 +1316,9 @@ } ], "source": [ - "x_dataframe_empty_bis = Xdataset.from_dataframe(x_dataframe_empty).to_dataframe(json_name=False, info=False)[list(x_dataframe_empty.columns)]\n", + "x_dataframe_empty_bis = Xdataset.from_dataframe(x_dataframe_empty).to_dataframe(\n", + " json_name=False, info=False\n", + ")[list(x_dataframe_empty.columns)]\n", "x_dataframe_empty_bis.equals(x_dataframe_empty)" ] }, @@ -1787,7 +1800,7 @@ ], "source": [ "x_scipp = x_example.to_scipp()\n", - "print(x_scipp['example'])" + "print(x_scipp[\"example\"])" ] }, { @@ -1927,19 +1940,38 @@ ], "source": [ "example = {\n", - " 'example:xdataset': {\n", - " 'data:float64[erg / s]': [[1,2,3,4]],\n", - " 'data.mask': [[False, False, True, True]],\n", - " 'data.uncertainty:float64[std]': [[1.0, 1.414, 1.732, 2.0]],\n", - " 'meta': {'object': 'fictional data.'},\n", - " 'wcs': {'WCSAXES': 2, 'CRPIX1': 2048.0, 'CRPIX2': 1024.0, 'PC1_1': 1.2905625619716e-05,\n", - " 'PC1_2': 5.9530912331034e-06, 'PC2_1': 5.0220581265601e-06, 'PC2_2': -1.2644774105568e-05,\n", - " 'CDELT1': 1.0, 'CDELT2': 1.0, 'CUNIT1': 'deg', 'CUNIT2': 'deg', 'CTYPE1': 'RA---TAN',\n", - " 'CTYPE2': 'DEC--TAN', 'CRVAL1': 5.63056810618, 'CRVAL2': -72.05457184279, 'LONPOLE': 180.0,\n", - " 'LATPOLE': -72.05457184279, 'WCSNAME': 'IDC_qbu1641sj', 'MJDREF': 0.0, 'RADESYS': 'ICRS'},\n", - " 'psf:float64[erg / s]': [[1,2,3,4]]}} \n", + " \"example:xdataset\": {\n", + " \"data:float64[erg / s]\": [[1, 2, 3, 4]],\n", + " \"data.mask\": [[False, False, True, True]],\n", + " \"data.uncertainty:float64[std]\": [[1.0, 1.414, 1.732, 2.0]],\n", + " \"meta\": {\"object\": \"fictional data.\"},\n", + " \"wcs\": {\n", + " \"WCSAXES\": 2,\n", + " \"CRPIX1\": 2048.0,\n", + " \"CRPIX2\": 1024.0,\n", + " \"PC1_1\": 1.2905625619716e-05,\n", + " \"PC1_2\": 5.9530912331034e-06,\n", + " \"PC2_1\": 5.0220581265601e-06,\n", + " \"PC2_2\": -1.2644774105568e-05,\n", + " \"CDELT1\": 1.0,\n", + " \"CDELT2\": 1.0,\n", + " \"CUNIT1\": \"deg\",\n", + " \"CUNIT2\": \"deg\",\n", + " \"CTYPE1\": \"RA---TAN\",\n", + " \"CTYPE2\": \"DEC--TAN\",\n", + " \"CRVAL1\": 5.63056810618,\n", + " \"CRVAL2\": -72.05457184279,\n", + " \"LONPOLE\": 180.0,\n", + " \"LATPOLE\": -72.05457184279,\n", + " \"WCSNAME\": \"IDC_qbu1641sj\",\n", + " \"MJDREF\": 0.0,\n", + " \"RADESYS\": \"ICRS\",\n", + " },\n", + " \"psf:float64[erg / s]\": [[1, 2, 3, 4]],\n", + " }\n", + "}\n", "n_example = Xdataset.read_json(example)\n", - "n_example.info['structure']" + "n_example.info[\"structure\"]" ] }, { @@ -2089,32 +2121,29 @@ } ], "source": [ - "#only structural data\n", + "# only structural data\n", "example = {\n", - " 'example:xdataset': {\n", - " 'var1:float[kg]': [['x', 'y'], [2, 2], 'var1.ntv'],\n", - " 'var1.variance': [[2, 2], 'var1_variance.ntv'],\n", - " 'var1.mask1': [['x'],'var1_mask1.ntv'], \n", - " 'var1.mask2': [[2, 2], 'var1_mask2.ntv'],\n", - " \n", - " 'var2': [['x', 'y'],'var2.ntv'], \n", - " \n", - " 'x': [{'test': 21},'x.ntv'], \n", - " 'y:date': ['y.ntv'],\n", - " \n", - " 'ranking:month': [['var1'], [2, 2], 'ranking.ntv'],\n", - " 'z:float': [['x'], 'z.ntv'], \n", - " 'z.uncertainty': ['z_uncertainty.ntv'],\n", - " \n", - " 'z_bis': ['z_bis.ntv'],\n", - " \n", - " 'info': {'path': 'https://github.com/loco-philippe/ntv-numpy/tree/main/example/'},\n", - " 'location:string': [['paris']]\n", + " \"example:xdataset\": {\n", + " \"var1:float[kg]\": [[\"x\", \"y\"], [2, 2], \"var1.ntv\"],\n", + " \"var1.variance\": [[2, 2], \"var1_variance.ntv\"],\n", + " \"var1.mask1\": [[\"x\"], \"var1_mask1.ntv\"],\n", + " \"var1.mask2\": [[2, 2], \"var1_mask2.ntv\"],\n", + " \"var2\": [[\"x\", \"y\"], \"var2.ntv\"],\n", + " \"x\": [{\"test\": 21}, \"x.ntv\"],\n", + " \"y:date\": [\"y.ntv\"],\n", + " \"ranking:month\": [[\"var1\"], [2, 2], \"ranking.ntv\"],\n", + " \"z:float\": [[\"x\"], \"z.ntv\"],\n", + " \"z.uncertainty\": [\"z_uncertainty.ntv\"],\n", + " \"z_bis\": [\"z_bis.ntv\"],\n", + " \"info\": {\n", + " \"path\": \"https://github.com/loco-philippe/ntv-numpy/tree/main/example/\"\n", + " },\n", + " \"location:string\": [[\"paris\"]],\n", " }\n", "}\n", "\n", "x_example_mixte = Xdataset.read_json(example)\n", - "x_example_mixte.info['structure']" + "x_example_mixte.info[\"structure\"]" ] }, { @@ -2147,21 +2176,33 @@ "from copy import copy\n", "\n", "# simulation of reading \".ntv\" json files at the indicated \"path\"\n", - "var1 = [[10.1, 0.4, 3.4, 8.2]]\n", - "var1_variance = ['float', [0.1, 0.2, 0.3, 0.4]]\n", - "var1_mask1 = [[True, False]]\n", - "var1_mask2 = [[True, False, False, True]]\n", - "var2 = ['var2.ntv']\n", - "x = ['string', ['23F0AE', '578B98']]\n", - "y = ['date', ['2021-01-01', '2022-02-02']]\n", - "ranking = [[1, 2, 3, 4]]\n", - "z = [[10.0, 20.0]]\n", + "var1 = [[10.1, 0.4, 3.4, 8.2]]\n", + "var1_variance = [\"float\", [0.1, 0.2, 0.3, 0.4]]\n", + "var1_mask1 = [[True, False]]\n", + "var1_mask2 = [[True, False, False, True]]\n", + "var2 = [\"var2.ntv\"]\n", + "x = [\"string\", [\"23F0AE\", \"578B98\"]]\n", + "y = [\"date\", [\"2021-01-01\", \"2022-02-02\"]]\n", + "ranking = [[1, 2, 3, 4]]\n", + "z = [[10.0, 20.0]]\n", "z_uncertainty = [[0.1, 0.2]]\n", - "z_bis = [['z1_bis', 'z2_bis']]\n", - "\n", - "json_files = [var1, var1_variance, var1_mask1, var1_mask2, var2, x, y, ranking, z, z_uncertainty, z_bis]\n", + "z_bis = [[\"z1_bis\", \"z2_bis\"]]\n", + "\n", + "json_files = [\n", + " var1,\n", + " var1_variance,\n", + " var1_mask1,\n", + " var1_mask2,\n", + " var2,\n", + " x,\n", + " y,\n", + " ranking,\n", + " z,\n", + " z_uncertainty,\n", + " z_bis,\n", + "]\n", "\n", - "x_example_mixte_json =copy(x_example_mixte)\n", + "x_example_mixte_json = copy(x_example_mixte)\n", "\n", "for data, xnda in zip(json_files, x_example_mixte_json.xnd):\n", " xnda.set_ndarray(Ndarray.read_json(data))\n", @@ -2190,19 +2231,31 @@ "import numpy as np\n", "\n", "# simulation of reading files at the indicated \"path\"\n", - "var1 = np.array([10.1, 0.4, 3.4, 8.2])\n", - "var1_variance = Ndarray([0.1, 0.2, 0.3, 0.4], ntv_type='float')\n", - "var1_mask1 = np.array([True, False])\n", - "var1_mask2 = np.array([True, False, False, True])\n", - "var2 = Ndarray('var2.ntv')\n", - "x = np.array(['23F0AE', '578B98'])\n", - "y = np.array(['2021-01-01', '2022-02-02'], dtype='datetime64[D]')\n", - "ranking = np.array([1, 2, 3, 4])\n", - "z = np.array([10.0, 20.0])\n", + "var1 = np.array([10.1, 0.4, 3.4, 8.2])\n", + "var1_variance = Ndarray([0.1, 0.2, 0.3, 0.4], ntv_type=\"float\")\n", + "var1_mask1 = np.array([True, False])\n", + "var1_mask2 = np.array([True, False, False, True])\n", + "var2 = Ndarray(\"var2.ntv\")\n", + "x = np.array([\"23F0AE\", \"578B98\"])\n", + "y = np.array([\"2021-01-01\", \"2022-02-02\"], dtype=\"datetime64[D]\")\n", + "ranking = np.array([1, 2, 3, 4])\n", + "z = np.array([10.0, 20.0])\n", "z_uncertainty = np.array([0.1, 0.2])\n", - "z_bis = np.array(['z1_bis', 'z2_bis'])\n", - "\n", - "array_data = [var1, var1_variance, var1_mask1, var1_mask2, var2, x, y, ranking, z, z_uncertainty, z_bis]\n", + "z_bis = np.array([\"z1_bis\", \"z2_bis\"])\n", + "\n", + "array_data = [\n", + " var1,\n", + " var1_variance,\n", + " var1_mask1,\n", + " var1_mask2,\n", + " var2,\n", + " x,\n", + " y,\n", + " ranking,\n", + " z,\n", + " z_uncertainty,\n", + " z_bis,\n", + "]\n", "\n", "x_example_mixte_numpy = copy(x_example_mixte)\n", "for data, xnda in zip(array_data, x_example_mixte_numpy.xnd):\n", @@ -2561,17 +2614,18 @@ "from ntv_numpy import Ndarray\n", "\n", "examples = [\n", - " np.array([10, 10, 20, 10, 30, 50]).astype('int64').reshape((2, 3)),\n", - " np.array(['test1', 'test2'], dtype='str'),\n", - " np.array(['2022-01-01', '2023-01-01'], dtype='datetime64[D]'),\n", - " np.array(['2022', '2023'], dtype='datetime64[Y]'),\n", - " np.array([b'abc\\x09', b'abc'], dtype='bytes'),\n", - " np.array([True, False], dtype='bool')]\n", + " np.array([10, 10, 20, 10, 30, 50]).astype(\"int64\").reshape((2, 3)),\n", + " np.array([\"test1\", \"test2\"], dtype=\"str\"),\n", + " np.array([\"2022-01-01\", \"2023-01-01\"], dtype=\"datetime64[D]\"),\n", + " np.array([\"2022\", \"2023\"], dtype=\"datetime64[Y]\"),\n", + " np.array([b\"abc\\x09\", b\"abc\"], dtype=\"bytes\"),\n", + " np.array([True, False], dtype=\"bool\"),\n", + "]\n", "\n", "for example in examples:\n", " equal = np.array_equal(example, Ndarray(example).ndarray)\n", " jsn = Ndarray(example).to_json(header=False)\n", - " print('reversibility : ', equal, ', JSON representation : ', jsn)" + " print(\"reversibility : \", equal, \", JSON representation : \", jsn)" ] }, { @@ -2622,23 +2676,29 @@ "\n", "examples = [\n", " np.array([time(10, 2, 3), time(20, 2, 3)]),\n", - " np.array([Decimal('10.5'), Decimal('20.5')]),\n", - " np.array([Point([1,2]), Point([3,4])]),\n", + " np.array([Decimal(\"10.5\"), Decimal(\"20.5\")]),\n", + " np.array([Point([1, 2]), Point([3, 4])]),\n", " np.array([None, None]),\n", - " np.array([{'one':1}, {'two':2}]),\n", - " np.fromiter([[1,2], [3,4]], dtype='object')]\n", + " np.array([{\"one\": 1}, {\"two\": 2}]),\n", + " np.fromiter([[1, 2], [3, 4]], dtype=\"object\"),\n", + "]\n", "\n", "for example in examples:\n", " reverse = Ndarray(example).ndarray\n", " equal = np.array_equal(example, reverse)\n", " jsn_example = Ndarray(example).to_json(header=False)\n", - " print('reversibility : ', equal, ', JSON representation : ', jsn_example)\n", + " print(\"reversibility : \", equal, \", JSON representation : \", jsn_example)\n", "\n", "examples = [\n", - " np.fromiter([np.array([1, 2], dtype='int64'), \n", - " np.array(['test1', 'test2'], dtype='str')], dtype='object'),\n", - " np.fromiter([Ntv.obj({':point':[1,2]}), NtvSingle(12, 'noon', 'hour')], dtype='object'),\n", - " np.fromiter([pd.Series([1,2,3]), pd.Series([4,5,6])], dtype='object')]\n", + " np.fromiter(\n", + " [np.array([1, 2], dtype=\"int64\"), np.array([\"test1\", \"test2\"], dtype=\"str\")],\n", + " dtype=\"object\",\n", + " ),\n", + " np.fromiter(\n", + " [Ntv.obj({\":point\": [1, 2]}), NtvSingle(12, \"noon\", \"hour\")], dtype=\"object\"\n", + " ),\n", + " np.fromiter([pd.Series([1, 2, 3]), pd.Series([4, 5, 6])], dtype=\"object\"),\n", + "]\n", "\n", "for example in examples:\n", " reverse = Ndarray(example).ndarray\n", @@ -2646,7 +2706,7 @@ " jsn_reverse = Ndarray(reverse).to_json(header=False)\n", " equal = jsn_example == jsn_reverse\n", "\n", - " print('reversibility : ', equal, ', JSON representation : ', jsn_example)" + " print(\"reversibility : \", equal, \", JSON representation : \", jsn_example)" ] }, { @@ -2690,17 +2750,18 @@ } ], "source": [ - "examples_json = [\n", - " ['int64[kg]', [1, 2, 3, 4]],\n", - " ['month', [2, 2], [1, 2, 3, 4]],\n", - " ['binary', ['010011', '001101', '101']],\n", - " ['json', [1, 'two', {'three': 3}]],\n", - " ['email', ['John Doe ', 'Anna Doe ']]]\n", - " \n", + "examples_json = [\n", + " [\"int64[kg]\", [1, 2, 3, 4]],\n", + " [\"month\", [2, 2], [1, 2, 3, 4]],\n", + " [\"binary\", [\"010011\", \"001101\", \"101\"]],\n", + " [\"json\", [1, \"two\", {\"three\": 3}]],\n", + " [\"email\", [\"John Doe \", \"Anna Doe \"]],\n", + "]\n", + "\n", "for ex_json in examples_json:\n", - " nda = Ndarray.read_json(ex_json) \n", + " nda = Ndarray.read_json(ex_json)\n", " equal = nda == Ndarray.read_json(nda.to_json())\n", - " print('reversibility : ', equal, ', Numpy entity : ', repr(nda.ndarray))" + " print(\"reversibility : \", equal, \", Numpy entity : \", repr(nda.ndarray))" ] }, { @@ -2740,14 +2801,12 @@ "source": [ "from ntv_numpy import Xndarray\n", "\n", - "examples_json = [\n", - " {'unit': 'kg'},\n", - " {'meta': {'everything': 1}}]\n", + "examples_json = [{\"unit\": \"kg\"}, {\"meta\": {\"everything\": 1}}]\n", "\n", "for ex_json in examples_json:\n", - " xnda = Xndarray.read_json(ex_json) \n", + " xnda = Xndarray.read_json(ex_json)\n", " equal = xnda == Xndarray.read_json(xnda.to_json())\n", - " print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + " print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2781,21 +2840,28 @@ } ], "source": [ - "examples = [Ndarray(np.array([1, 'two', {'three': 3}], dtype='object'), ntv_type='json'),\n", - " Ndarray(np.array([1, 2, 3, 4], dtype='object'), ntv_type='int64[kg]'),\n", - " Ndarray(np.array([1, 2, 3, 4], dtype='object'), ntv_type='month'),\n", - " Ndarray(np.array([[1, 2], [3, 4]], dtype='object'), ntv_type='int'),\n", - " Ndarray(np.array(['1F23', '236A5E'], dtype='object'), ntv_type='base16'),\n", - " Ndarray(np.array(['P3Y6M4DT12H30M5S'], dtype='object'), ntv_type='duration'),\n", - " Ndarray(np.array(['geo:13.4125,103.86673'], dtype='object'), ntv_type='uri'),\n", - " Ndarray(np.array(['192.168.1.1'], dtype='object'), ntv_type='ipv4'),\n", - " Ndarray(np.array(['John Doe ', 'Ann Doe '], dtype='object'), ntv_type='email')\n", - " ]\n", + "examples = [\n", + " Ndarray(np.array([1, \"two\", {\"three\": 3}], dtype=\"object\"), ntv_type=\"json\"),\n", + " Ndarray(np.array([1, 2, 3, 4], dtype=\"object\"), ntv_type=\"int64[kg]\"),\n", + " Ndarray(np.array([1, 2, 3, 4], dtype=\"object\"), ntv_type=\"month\"),\n", + " Ndarray(np.array([[1, 2], [3, 4]], dtype=\"object\"), ntv_type=\"int\"),\n", + " Ndarray(np.array([\"1F23\", \"236A5E\"], dtype=\"object\"), ntv_type=\"base16\"),\n", + " Ndarray(np.array([\"P3Y6M4DT12H30M5S\"], dtype=\"object\"), ntv_type=\"duration\"),\n", + " Ndarray(np.array([\"geo:13.4125,103.86673\"], dtype=\"object\"), ntv_type=\"uri\"),\n", + " Ndarray(np.array([\"192.168.1.1\"], dtype=\"object\"), ntv_type=\"ipv4\"),\n", + " Ndarray(\n", + " np.array(\n", + " [\"John Doe \", \"Ann Doe \"],\n", + " dtype=\"object\",\n", + " ),\n", + " ntv_type=\"email\",\n", + " ),\n", + "]\n", "\n", "for example in examples:\n", - " xnda = Xndarray('example', example)\n", + " xnda = Xndarray(\"example\", example)\n", " equal = xnda == Xndarray.read_json(xnda.to_json())\n", - " print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + " print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2823,10 +2889,10 @@ } ], "source": [ - "xnda = Xndarray('example', np.array(['x1', 'x2']))\n", + "xnda = Xndarray(\"example\", np.array([\"x1\", \"x2\"]))\n", "equal = xnda == Xndarray.read_json(xnda.to_json())\n", "\n", - "print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + "print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2854,10 +2920,10 @@ } ], "source": [ - "xnda = Xndarray('x.mask', np.array([True, False]))\n", + "xnda = Xndarray(\"x.mask\", np.array([True, False]))\n", "equal = xnda == Xndarray.read_json(xnda.to_json())\n", "\n", - "print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + "print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2885,10 +2951,14 @@ } ], "source": [ - "xnda = Xndarray('var2', Ndarray(np.array([10.1, 0.4, 3.4, 8.2]).reshape([2, 2]), ntv_type='float[kg]'), links = ['x', 'y'])\n", + "xnda = Xndarray(\n", + " \"var2\",\n", + " Ndarray(np.array([10.1, 0.4, 3.4, 8.2]).reshape([2, 2]), ntv_type=\"float[kg]\"),\n", + " links=[\"x\", \"y\"],\n", + ")\n", "equal = xnda == Xndarray.read_json(xnda.to_json())\n", "\n", - "print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + "print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2916,10 +2986,16 @@ } ], "source": [ - "xnda = Xndarray('var1', Ndarray('https://raw.githubusercontent.com/loco-philippe/ntv-numpy/master/example/ex_ndarray.ntv'), links = ['x', 'y'])\n", + "xnda = Xndarray(\n", + " \"var1\",\n", + " Ndarray(\n", + " \"https://raw.githubusercontent.com/loco-philippe/ntv-numpy/master/example/ex_ndarray.ntv\"\n", + " ),\n", + " links=[\"x\", \"y\"],\n", + ")\n", "equal = xnda == Xndarray.read_json(xnda.to_json())\n", "\n", - "print('reversibility : ', equal, ', Xndarray entity : ', xnda.to_json(header=False))" + "print(\"reversibility : \", equal, \", Xndarray entity : \", xnda.to_json(header=False))" ] }, { @@ -2958,12 +3034,17 @@ "source": [ "from ntv_numpy import Xdataset\n", "\n", - "xn1 = Xndarray.read_json({'x:string': [['x1', 'x2']]})\n", - "xn2 = Xndarray.read_json({'z:string': [['x'], ['z1', 'z2']]})\n", + "xn1 = Xndarray.read_json({\"x:string\": [[\"x1\", \"x2\"]]})\n", + "xn2 = Xndarray.read_json({\"z:string\": [[\"x\"], [\"z1\", \"z2\"]]})\n", "\n", "xd = Xdataset([xn1, xn2])\n", "\n", - "print(\"reversibility : \", Xdataset.read_json(xd.to_json()) == xd, ', Xdataset entity : ', xd.to_json(header=False))" + "print(\n", + " \"reversibility : \",\n", + " Xdataset.read_json(xd.to_json()) == xd,\n", + " \", Xdataset entity : \",\n", + " xd.to_json(header=False),\n", + ")" ] }, { @@ -3007,20 +3088,26 @@ "source": [ "from pprint import pprint\n", "\n", - "example = {'test': {\n", - " 'var1': [['x', 'y'], 'https://github.com/loco-philippe/ntv-numpy/tree/main/example/ex_ndarray.ntv'], \n", - " 'var2:float[kg]': [['x', 'y'], [2, 2], [10.1, 0.4, 3.4, 8.2]], \n", - " 'ranking': [['var2'], [2, 2], [1, 2, 3, 4]], \n", - " 'x': [{'test': 21}, ['x1', 'x2']],\n", - " 'y': [['y1', 'y2']],\n", - " 'z': [['x'], ['z1', 'z2']],\n", - " 'z_bis': [['z1_bis', 'z2_bis']],\n", - " 'x.mask': [['x'], [True, False]],\n", - " 'x.variance': [['x'], [0.1, 0.2]], \n", - " 'z.variance': [['x'], [0.1, 0.2]],\n", - " 'unit': 'kg',\n", - " 'info': {'example': 'everything'}}}\n", - "xd = Xdataset.read_json(example) \n", + "example = {\n", + " \"test\": {\n", + " \"var1\": [\n", + " [\"x\", \"y\"],\n", + " \"https://github.com/loco-philippe/ntv-numpy/tree/main/example/ex_ndarray.ntv\",\n", + " ],\n", + " \"var2:float[kg]\": [[\"x\", \"y\"], [2, 2], [10.1, 0.4, 3.4, 8.2]],\n", + " \"ranking\": [[\"var2\"], [2, 2], [1, 2, 3, 4]],\n", + " \"x\": [{\"test\": 21}, [\"x1\", \"x2\"]],\n", + " \"y\": [[\"y1\", \"y2\"]],\n", + " \"z\": [[\"x\"], [\"z1\", \"z2\"]],\n", + " \"z_bis\": [[\"z1_bis\", \"z2_bis\"]],\n", + " \"x.mask\": [[\"x\"], [True, False]],\n", + " \"x.variance\": [[\"x\"], [0.1, 0.2]],\n", + " \"z.variance\": [[\"x\"], [0.1, 0.2]],\n", + " \"unit\": \"kg\",\n", + " \"info\": {\"example\": \"everything\"},\n", + " }\n", + "}\n", + "xd = Xdataset.read_json(example)\n", "\n", "pprint(xd.to_json())\n", "print(\"\\nreversibility : \", Xdataset.read_json(xd.to_json()) == xd)" @@ -3072,7 +3159,7 @@ } ], "source": [ - "xd.info['structure']" + "xd.info[\"structure\"]" ] }, { @@ -3103,8 +3190,8 @@ } ], "source": [ - "del(xd[('var1')])\n", - "xd.info['structure']" + "del xd[(\"var1\")]\n", + "xd.info[\"structure\"]" ] }, { @@ -3144,11 +3231,11 @@ "from json_ntv import NtvList\n", "\n", "nd = np.array([1, 2, 3, 4])\n", - "xn1 = Xndarray('example', nda=np.array(['x1', 'x2']))\n", - "xn2 = Xndarray.read_json({'z:string': [['x'], ['z1', 'z2']]})\n", - "xd = Xdataset([xn1, xn2], 'test')\n", + "xn1 = Xndarray(\"example\", nda=np.array([\"x1\", \"x2\"]))\n", + "xn2 = Xndarray.read_json({\"z:string\": [[\"x\"], [\"z1\", \"z2\"]]})\n", + "xd = Xdataset([xn1, xn2], \"test\")\n", "\n", - "print('example NTVsingle\\n')\n", + "print(\"example NTVsingle\\n\")\n", "print(NtvSingle(nd))" ] }, @@ -3169,7 +3256,7 @@ } ], "source": [ - "print('example NTVlist\\n')\n", + "print(\"example NTVlist\\n\")\n", "print(NtvList([nd, xn1, xd]))" ] }, @@ -3190,8 +3277,16 @@ } ], "source": [ - "print('example with mixed data (Json representation)\\n')\n", - "mixte = Ntv.obj({'mixed': [xn1, {'coordinate':Point(1,2)}, {'pandas series': pd.Series([1,2,3])}]})\n", + "print(\"example with mixed data (Json representation)\\n\")\n", + "mixte = Ntv.obj(\n", + " {\n", + " \"mixed\": [\n", + " xn1,\n", + " {\"coordinate\": Point(1, 2)},\n", + " {\"pandas series\": pd.Series([1, 2, 3])},\n", + " ]\n", + " }\n", + ")\n", "print(mixte)" ] }, @@ -3217,8 +3312,8 @@ } ], "source": [ - "print('example with mixed data (object representation)\\n')\n", - "pprint(mixte.to_obj(format='obj'), width=20)" + "print(\"example with mixed data (object representation)\\n\")\n", + "pprint(mixte.to_obj(format=\"obj\"), width=20)" ] }, { @@ -3245,7 +3340,7 @@ ], "source": [ "sr = pd.Series([1, 2, nd, xn1])\n", - "mixin = Ntv.obj({'mixin': sr})\n", + "mixin = Ntv.obj({\"mixin\": sr})\n", "print(mixin)" ] }, @@ -3299,8 +3394,15 @@ } ], "source": [ - "df= pd.DataFrame({'a': [2,2,3,4,2], 'b': [1,1,0,0,1], 'c': [1,1,1,0,1], 'd': [1,1,1,1,1]})\n", - "xnd =Xdataset.from_dataframe(df)\n", + "df = pd.DataFrame(\n", + " {\n", + " \"a\": [2, 2, 3, 4, 2],\n", + " \"b\": [1, 1, 0, 0, 1],\n", + " \"c\": [1, 1, 1, 0, 1],\n", + " \"d\": [1, 1, 1, 1, 1],\n", + " }\n", + ")\n", + "xnd = Xdataset.from_dataframe(df)\n", "print(xnd.to_json())" ] }, @@ -3369,19 +3471,19 @@ ], "source": [ "example_tab = {\n", - " 'tabular:tab': {\n", - " 'first_name::string': ['john', 'judith', 'jim'],\n", - " 'equinox::date': ['2023-09-23', '2023-03-20', '2024-09-22']\n", + " \"tabular:tab\": {\n", + " \"first_name::string\": [\"john\", \"judith\", \"jim\"],\n", + " \"equinox::date\": [\"2023-09-23\", \"2023-03-20\", \"2024-09-22\"],\n", " }\n", "}\n", "\n", "example_multi = {\n", - " 'multi:xdataset': {\n", - " 'first_name:string': [['john', 'judith', 'jim']],\n", - " 'equinox:date': [['2023-09-23', '2023-03-20', '2024-09-22']]\n", + " \"multi:xdataset\": {\n", + " \"first_name:string\": [[\"john\", \"judith\", \"jim\"]],\n", + " \"equinox:date\": [[\"2023-09-23\", \"2023-03-20\", \"2024-09-22\"]],\n", " }\n", - "} \n", - "Xdataset.read_json(example_multi).to_dataframe() " + "}\n", + "Xdataset.read_json(example_multi).to_dataframe()" ] }, { @@ -3430,7 +3532,7 @@ } ], "source": [ - "NtvSingle('kg', 'mass', 'unit')" + "NtvSingle(\"kg\", \"mass\", \"unit\")" ] }, { @@ -3460,8 +3562,10 @@ ], "source": [ "examples = [\n", - " Ndarray([2., 2.5, 3., 3.5, 4., 4.5, 5.], ntv_type='float[m/s]'),\n", - " Xndarray('example', Ndarray([2., 2.5, 3., 3.5, 4., 4.5, 5.], ntv_type='float[m/s]'))\n", + " Ndarray([2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0], ntv_type=\"float[m/s]\"),\n", + " Xndarray(\n", + " \"example\", Ndarray([2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0], ntv_type=\"float[m/s]\")\n", + " ),\n", "]\n", "for example in examples:\n", " print(example.to_json(header=False))\n", @@ -3490,11 +3594,18 @@ } ], "source": [ - "ntv = Ntv.obj({\"list_of_ndarray::ndarray[kg]\": { \"array1\": [[2, 2], [1, 2, 3, 4]], \"array2\": [[2, 2], [5, 6, 7, 8]]}})\n", + "ntv = Ntv.obj(\n", + " {\n", + " \"list_of_ndarray::ndarray[kg]\": {\n", + " \"array1\": [[2, 2], [1, 2, 3, 4]],\n", + " \"array2\": [[2, 2], [5, 6, 7, 8]],\n", + " }\n", + " }\n", + ")\n", "\n", - "print('json representation :\\n', ntv[0])\n", - "print('\\njson representation of a list :\\n', ntv)\n", - "print('\\nNdarray representation :\\n', ntv[0].to_obj(format='obj', type=True))" + "print(\"json representation :\\n\", ntv[0])\n", + "print(\"\\njson representation of a list :\\n\", ntv)\n", + "print(\"\\nNdarray representation :\\n\", ntv[0].to_obj(format=\"obj\", type=True))" ] }, {