How to use to_json method in avocado

Best Python code snippet using avocado_python

test_pandas.py

Source:test_pandas.py Github

copy

Full Screen

...65 [["a", "b"], ["c", "d"]],66 index=['index " 1', "index / 2"],67 columns=["a \\ b", "y / z"],68 )69 assert_frame_equal(df, read_json(df.to_json(orient="split"), orient="split"))70 assert_frame_equal(71 df, read_json(df.to_json(orient="columns"), orient="columns")72 )73 assert_frame_equal(df, read_json(df.to_json(orient="index"), orient="index"))74 df_unser = read_json(df.to_json(orient="records"), orient="records")75 assert_index_equal(df.columns, df_unser.columns)76 tm.assert_numpy_array_equal(df.values, df_unser.values)77 def test_frame_non_unique_index(self):78 df = DataFrame([["a", "b"], ["c", "d"]], index=[1, 1], columns=["x", "y"])79 msg = "DataFrame index must be unique for orient='index'"80 with pytest.raises(ValueError, match=msg):81 df.to_json(orient="index")82 msg = "DataFrame index must be unique for orient='columns'"83 with pytest.raises(ValueError, match=msg):84 df.to_json(orient="columns")85 assert_frame_equal(df, read_json(df.to_json(orient="split"), orient="split"))86 unser = read_json(df.to_json(orient="records"), orient="records")87 tm.assert_index_equal(df.columns, unser.columns)88 tm.assert_almost_equal(df.values, unser.values)89 unser = read_json(df.to_json(orient="values"), orient="values")90 tm.assert_numpy_array_equal(df.values, unser.values)91 def test_frame_non_unique_columns(self):92 df = DataFrame([["a", "b"], ["c", "d"]], index=[1, 2], columns=["x", "x"])93 msg = "DataFrame columns must be unique for orient='index'"94 with pytest.raises(ValueError, match=msg):95 df.to_json(orient="index")96 msg = "DataFrame columns must be unique for orient='columns'"97 with pytest.raises(ValueError, match=msg):98 df.to_json(orient="columns")99 msg = "DataFrame columns must be unique for orient='records'"100 with pytest.raises(ValueError, match=msg):101 df.to_json(orient="records")102 assert_frame_equal(103 df, read_json(df.to_json(orient="split"), orient="split", dtype=False)104 )105 unser = read_json(df.to_json(orient="values"), orient="values")106 tm.assert_numpy_array_equal(df.values, unser.values)107 # GH4377; duplicate columns not processing correctly108 df = DataFrame([["a", "b"], ["c", "d"]], index=[1, 2], columns=["x", "y"])109 result = read_json(df.to_json(orient="split"), orient="split")110 assert_frame_equal(result, df)111 def _check(df):112 result = read_json(113 df.to_json(orient="split"), orient="split", convert_dates=["x"]114 )115 assert_frame_equal(result, df)116 for o in [117 [["a", "b"], ["c", "d"]],118 [[1.5, 2.5], [3.5, 4.5]],119 [[1, 2.5], [3, 4.5]],120 [[Timestamp("20130101"), 3.5], [Timestamp("20130102"), 4.5]],121 ]:122 _check(DataFrame(o, index=[1, 2], columns=["x", "x"]))123 def test_frame_from_json_to_json(self):124 def _check_orient(125 df,126 orient,127 dtype=None,128 numpy=False,129 convert_axes=True,130 check_dtype=True,131 raise_ok=None,132 sort=None,133 check_index_type=True,134 check_column_type=True,135 check_numpy_dtype=False,136 ):137 if sort is not None:138 df = df.sort_values(sort)139 else:140 df = df.sort_index()141 # if we are not unique, then check that we are raising ValueError142 # for the appropriate orients143 if not df.index.is_unique and orient in ["index", "columns"]:144 msg = "DataFrame index must be unique for orient='{}'".format(orient)145 with pytest.raises(ValueError, match=msg):146 df.to_json(orient=orient)147 return148 if not df.columns.is_unique and orient in ["index", "columns", "records"]:149 # TODO: not executed. fix this.150 with pytest.raises(ValueError, match="ksjkajksfjksjfkjs"):151 df.to_json(orient=orient)152 return153 dfjson = df.to_json(orient=orient)154 try:155 unser = read_json(156 dfjson,157 orient=orient,158 dtype=dtype,159 numpy=numpy,160 convert_axes=convert_axes,161 )162 except Exception as detail:163 if raise_ok is not None:164 if isinstance(detail, raise_ok):165 return166 raise167 if sort is not None and sort in unser.columns:168 unser = unser.sort_values(sort)169 else:170 unser = unser.sort_index()171 if not dtype:172 check_dtype = False173 if not convert_axes and df.index.dtype.type == np.datetime64:174 unser.index = DatetimeIndex(unser.index.values.astype("i8") * 1e6)175 if orient == "records":176 # index is not captured in this orientation177 tm.assert_almost_equal(178 df.values, unser.values, check_dtype=check_numpy_dtype179 )180 tm.assert_index_equal(181 df.columns, unser.columns, exact=check_column_type182 )183 elif orient == "values":184 # index and cols are not captured in this orientation185 if numpy is True and df.shape == (0, 0):186 assert unser.shape[0] == 0187 else:188 tm.assert_almost_equal(189 df.values, unser.values, check_dtype=check_numpy_dtype190 )191 elif orient == "split":192 # index and col labels might not be strings193 unser.index = [str(i) for i in unser.index]194 unser.columns = [str(i) for i in unser.columns]195 if sort is None:196 unser = unser.sort_index()197 tm.assert_almost_equal(198 df.values, unser.values, check_dtype=check_numpy_dtype199 )200 else:201 if convert_axes:202 tm.assert_frame_equal(203 df,204 unser,205 check_dtype=check_dtype,206 check_index_type=check_index_type,207 check_column_type=check_column_type,208 )209 else:210 tm.assert_frame_equal(211 df, unser, check_less_precise=False, check_dtype=check_dtype212 )213 def _check_all_orients(214 df,215 dtype=None,216 convert_axes=True,217 raise_ok=None,218 sort=None,219 check_index_type=True,220 check_column_type=True,221 ):222 # numpy=False223 if convert_axes:224 _check_orient(225 df,226 "columns",227 dtype=dtype,228 sort=sort,229 check_index_type=False,230 check_column_type=False,231 )232 _check_orient(233 df,234 "records",235 dtype=dtype,236 sort=sort,237 check_index_type=False,238 check_column_type=False,239 )240 _check_orient(241 df,242 "split",243 dtype=dtype,244 sort=sort,245 check_index_type=False,246 check_column_type=False,247 )248 _check_orient(249 df,250 "index",251 dtype=dtype,252 sort=sort,253 check_index_type=False,254 check_column_type=False,255 )256 _check_orient(257 df,258 "values",259 dtype=dtype,260 sort=sort,261 check_index_type=False,262 check_column_type=False,263 )264 _check_orient(df, "columns", dtype=dtype, convert_axes=False, sort=sort)265 _check_orient(df, "records", dtype=dtype, convert_axes=False, sort=sort)266 _check_orient(df, "split", dtype=dtype, convert_axes=False, sort=sort)267 _check_orient(df, "index", dtype=dtype, convert_axes=False, sort=sort)268 _check_orient(df, "values", dtype=dtype, convert_axes=False, sort=sort)269 # numpy=True and raise_ok might be not None, so ignore the error270 if convert_axes:271 _check_orient(272 df,273 "columns",274 dtype=dtype,275 numpy=True,276 raise_ok=raise_ok,277 sort=sort,278 check_index_type=False,279 check_column_type=False,280 )281 _check_orient(282 df,283 "records",284 dtype=dtype,285 numpy=True,286 raise_ok=raise_ok,287 sort=sort,288 check_index_type=False,289 check_column_type=False,290 )291 _check_orient(292 df,293 "split",294 dtype=dtype,295 numpy=True,296 raise_ok=raise_ok,297 sort=sort,298 check_index_type=False,299 check_column_type=False,300 )301 _check_orient(302 df,303 "index",304 dtype=dtype,305 numpy=True,306 raise_ok=raise_ok,307 sort=sort,308 check_index_type=False,309 check_column_type=False,310 )311 _check_orient(312 df,313 "values",314 dtype=dtype,315 numpy=True,316 raise_ok=raise_ok,317 sort=sort,318 check_index_type=False,319 check_column_type=False,320 )321 _check_orient(322 df,323 "columns",324 dtype=dtype,325 numpy=True,326 convert_axes=False,327 raise_ok=raise_ok,328 sort=sort,329 )330 _check_orient(331 df,332 "records",333 dtype=dtype,334 numpy=True,335 convert_axes=False,336 raise_ok=raise_ok,337 sort=sort,338 )339 _check_orient(340 df,341 "split",342 dtype=dtype,343 numpy=True,344 convert_axes=False,345 raise_ok=raise_ok,346 sort=sort,347 )348 _check_orient(349 df,350 "index",351 dtype=dtype,352 numpy=True,353 convert_axes=False,354 raise_ok=raise_ok,355 sort=sort,356 )357 _check_orient(358 df,359 "values",360 dtype=dtype,361 numpy=True,362 convert_axes=False,363 raise_ok=raise_ok,364 sort=sort,365 )366 # basic367 _check_all_orients(self.frame)368 assert self.frame.to_json() == self.frame.to_json(orient="columns")369 _check_all_orients(self.intframe, dtype=self.intframe.values.dtype)370 _check_all_orients(self.intframe, dtype=False)371 # big one372 # index and columns are strings as all unserialised JSON object keys373 # are assumed to be strings374 biggie = DataFrame(375 np.zeros((200, 4)),376 columns=[str(i) for i in range(4)],377 index=[str(i) for i in range(200)],378 )379 _check_all_orients(biggie, dtype=False, convert_axes=False)380 # dtypes381 _check_all_orients(382 DataFrame(biggie, dtype=np.float64), dtype=np.float64, convert_axes=False383 )384 _check_all_orients(385 DataFrame(biggie, dtype=np.int), dtype=np.int, convert_axes=False386 )387 _check_all_orients(388 DataFrame(biggie, dtype="U3"),389 dtype="U3",390 convert_axes=False,391 raise_ok=ValueError,392 )393 # categorical394 _check_all_orients(self.categorical, sort="sort", raise_ok=ValueError)395 # empty396 _check_all_orients(397 self.empty_frame, check_index_type=False, check_column_type=False398 )399 # time series data400 _check_all_orients(self.tsframe)401 # mixed data402 index = pd.Index(["a", "b", "c", "d", "e"])403 data = {404 "A": [0.0, 1.0, 2.0, 3.0, 4.0],405 "B": [0.0, 1.0, 0.0, 1.0, 0.0],406 "C": ["foo1", "foo2", "foo3", "foo4", "foo5"],407 "D": [True, False, True, False, True],408 }409 df = DataFrame(data=data, index=index)410 _check_orient(df, "split", check_dtype=False)411 _check_orient(df, "records", check_dtype=False)412 _check_orient(df, "values", check_dtype=False)413 _check_orient(df, "columns", check_dtype=False)414 # index oriented is problematic as it is read back in in a transposed415 # state, so the columns are interpreted as having mixed data and416 # given object dtypes.417 # force everything to have object dtype beforehand418 _check_orient(df.transpose().transpose(), "index", dtype=False)419 def test_frame_from_json_bad_data(self):420 with pytest.raises(ValueError, match="Expected object or value"):421 read_json(StringIO('{"key":b:a:d}'))422 # too few indices423 json = StringIO(424 '{"columns":["A","B"],'425 '"index":["2","3"],'426 '"data":[[1.0,"1"],[2.0,"2"],[null,"3"]]}'427 )428 msg = r"Shape of passed values is \(3, 2\), indices imply \(2, 2\)"429 with pytest.raises(ValueError, match=msg):430 read_json(json, orient="split")431 # too many columns432 json = StringIO(433 '{"columns":["A","B","C"],'434 '"index":["1","2","3"],'435 '"data":[[1.0,"1"],[2.0,"2"],[null,"3"]]}'436 )437 msg = "3 columns passed, passed data had 2 columns"438 with pytest.raises(ValueError, match=msg):439 read_json(json, orient="split")440 # bad key441 json = StringIO(442 '{"badkey":["A","B"],'443 '"index":["2","3"],'444 '"data":[[1.0,"1"],[2.0,"2"],[null,"3"]]}'445 )446 with pytest.raises(ValueError, match=r"unexpected key\(s\): badkey"):447 read_json(json, orient="split")448 def test_frame_from_json_nones(self):449 df = DataFrame([[1, 2], [4, 5, 6]])450 unser = read_json(df.to_json())451 assert np.isnan(unser[2][0])452 df = DataFrame([["1", "2"], ["4", "5", "6"]])453 unser = read_json(df.to_json())454 assert np.isnan(unser[2][0])455 unser = read_json(df.to_json(), dtype=False)456 assert unser[2][0] is None457 unser = read_json(df.to_json(), convert_axes=False, dtype=False)458 assert unser["2"]["0"] is None459 unser = read_json(df.to_json(), numpy=False)460 assert np.isnan(unser[2][0])461 unser = read_json(df.to_json(), numpy=False, dtype=False)462 assert unser[2][0] is None463 unser = read_json(df.to_json(), numpy=False, convert_axes=False, dtype=False)464 assert unser["2"]["0"] is None465 # infinities get mapped to nulls which get mapped to NaNs during466 # deserialisation467 df = DataFrame([[1, 2], [4, 5, 6]])468 df.loc[0, 2] = np.inf469 unser = read_json(df.to_json())470 assert np.isnan(unser[2][0])471 unser = read_json(df.to_json(), dtype=False)472 assert np.isnan(unser[2][0])473 df.loc[0, 2] = np.NINF474 unser = read_json(df.to_json())475 assert np.isnan(unser[2][0])476 unser = read_json(df.to_json(), dtype=False)477 assert np.isnan(unser[2][0])478 @pytest.mark.skipif(479 is_platform_32bit(), reason="not compliant on 32-bit, xref #15865"480 )481 def test_frame_to_json_float_precision(self):482 df = pd.DataFrame([dict(a_float=0.95)])483 encoded = df.to_json(double_precision=1)484 assert encoded == '{"a_float":{"0":1.0}}'485 df = pd.DataFrame([dict(a_float=1.95)])486 encoded = df.to_json(double_precision=1)487 assert encoded == '{"a_float":{"0":2.0}}'488 df = pd.DataFrame([dict(a_float=-1.95)])489 encoded = df.to_json(double_precision=1)490 assert encoded == '{"a_float":{"0":-2.0}}'491 df = pd.DataFrame([dict(a_float=0.995)])492 encoded = df.to_json(double_precision=2)493 assert encoded == '{"a_float":{"0":1.0}}'494 df = pd.DataFrame([dict(a_float=0.9995)])495 encoded = df.to_json(double_precision=3)496 assert encoded == '{"a_float":{"0":1.0}}'497 df = pd.DataFrame([dict(a_float=0.99999999999999944)])498 encoded = df.to_json(double_precision=15)499 assert encoded == '{"a_float":{"0":1.0}}'500 def test_frame_to_json_except(self):501 df = DataFrame([1, 2, 3])502 msg = "Invalid value 'garbage' for option 'orient'"503 with pytest.raises(ValueError, match=msg):504 df.to_json(orient="garbage")505 def test_frame_empty(self):506 df = DataFrame(columns=["jim", "joe"])507 assert not df._is_mixed_type508 assert_frame_equal(509 read_json(df.to_json(), dtype=dict(df.dtypes)), df, check_index_type=False510 )511 # GH 7445512 result = pd.DataFrame({"test": []}, index=[]).to_json(orient="columns")513 expected = '{"test":{}}'514 assert result == expected515 def test_frame_empty_mixedtype(self):516 # mixed type517 df = DataFrame(columns=["jim", "joe"])518 df["joe"] = df["joe"].astype("i8")519 assert df._is_mixed_type520 assert_frame_equal(521 read_json(df.to_json(), dtype=dict(df.dtypes)), df, check_index_type=False522 )523 def test_frame_mixedtype_orient(self): # GH10289524 vals = [525 [10, 1, "foo", 0.1, 0.01],526 [20, 2, "bar", 0.2, 0.02],527 [30, 3, "baz", 0.3, 0.03],528 [40, 4, "qux", 0.4, 0.04],529 ]530 df = DataFrame(531 vals, index=list("abcd"), columns=["1st", "2nd", "3rd", "4th", "5th"]532 )533 assert df._is_mixed_type534 right = df.copy()535 for orient in ["split", "index", "columns"]:536 inp = df.to_json(orient=orient)537 left = read_json(inp, orient=orient, convert_axes=False)538 assert_frame_equal(left, right)539 right.index = np.arange(len(df))540 inp = df.to_json(orient="records")541 left = read_json(inp, orient="records", convert_axes=False)542 assert_frame_equal(left, right)543 right.columns = np.arange(df.shape[1])544 inp = df.to_json(orient="values")545 left = read_json(inp, orient="values", convert_axes=False)546 assert_frame_equal(left, right)547 def test_v12_compat(self):548 df = DataFrame(549 [550 [1.56808523, 0.65727391, 1.81021139, -0.17251653],551 [-0.2550111, -0.08072427, -0.03202878, -0.17581665],552 [1.51493992, 0.11805825, 1.629455, -1.31506612],553 [-0.02765498, 0.44679743, 0.33192641, -0.27885413],554 [0.05951614, -2.69652057, 1.28163262, 0.34703478],555 ],556 columns=["A", "B", "C", "D"],557 index=pd.date_range("2000-01-03", "2000-01-07"),558 )559 df["date"] = pd.Timestamp("19920106 18:21:32.12")560 df.iloc[3, df.columns.get_loc("date")] = pd.Timestamp("20130101")561 df["modified"] = df["date"]562 df.iloc[1, df.columns.get_loc("modified")] = pd.NaT563 v12_json = os.path.join(self.dirpath, "tsframe_v012.json")564 df_unser = pd.read_json(v12_json)565 assert_frame_equal(df, df_unser)566 df_iso = df.drop(["modified"], axis=1)567 v12_iso_json = os.path.join(self.dirpath, "tsframe_iso_v012.json")568 df_unser_iso = pd.read_json(v12_iso_json)569 assert_frame_equal(df_iso, df_unser_iso)570 def test_blocks_compat_GH9037(self):571 index = pd.date_range("20000101", periods=10, freq="H")572 df_mixed = DataFrame(573 OrderedDict(574 float_1=[575 -0.92077639,576 0.77434435,577 1.25234727,578 0.61485564,579 -0.60316077,580 0.24653374,581 0.28668979,582 -2.51969012,583 0.95748401,584 -1.02970536,585 ],586 int_1=[587 19680418,588 75337055,589 99973684,590 65103179,591 79373900,592 40314334,593 21290235,594 4991321,595 41903419,596 16008365,597 ],598 str_1=[599 "78c608f1",600 "64a99743",601 "13d2ff52",602 "ca7f4af2",603 "97236474",604 "bde7e214",605 "1a6bde47",606 "b1190be5",607 "7a669144",608 "8d64d068",609 ],610 float_2=[611 -0.0428278,612 -1.80872357,613 3.36042349,614 -0.7573685,615 -0.48217572,616 0.86229683,617 1.08935819,618 0.93898739,619 -0.03030452,620 1.43366348,621 ],622 str_2=[623 "14f04af9",624 "d085da90",625 "4bcfac83",626 "81504caf",627 "2ffef4a9",628 "08e2f5c4",629 "07e1af03",630 "addbd4a7",631 "1f6a09ba",632 "4bfc4d87",633 ],634 int_2=[635 86967717,636 98098830,637 51927505,638 20372254,639 12601730,640 20884027,641 34193846,642 10561746,643 24867120,644 76131025,645 ],646 ),647 index=index,648 )649 # JSON deserialisation always creates unicode strings650 df_mixed.columns = df_mixed.columns.astype("unicode")651 df_roundtrip = pd.read_json(df_mixed.to_json(orient="split"), orient="split")652 assert_frame_equal(653 df_mixed,654 df_roundtrip,655 check_index_type=True,656 check_column_type=True,657 check_frame_type=True,658 by_blocks=True,659 check_exact=True,660 )661 def test_frame_nonprintable_bytes(self):662 # GH14256: failing column caused segfaults, if it is not the last one663 class BinaryThing:664 def __init__(self, hexed):665 self.hexed = hexed666 self.binary = bytes.fromhex(hexed)667 def __str__(self):668 return self.hexed669 hexed = "574b4454ba8c5eb4f98a8f45"670 binthing = BinaryThing(hexed)671 # verify the proper conversion of printable content672 df_printable = DataFrame({"A": [binthing.hexed]})673 assert df_printable.to_json() == '{{"A":{{"0":"{hex}"}}}}'.format(hex=hexed)674 # check if non-printable content throws appropriate Exception675 df_nonprintable = DataFrame({"A": [binthing]})676 msg = "Unsupported UTF-8 sequence length when encoding string"677 with pytest.raises(OverflowError, match=msg):678 df_nonprintable.to_json()679 # the same with multiple columns threw segfaults680 df_mixed = DataFrame({"A": [binthing], "B": [1]}, columns=["A", "B"])681 with pytest.raises(OverflowError):682 df_mixed.to_json()683 # default_handler should resolve exceptions for non-string types684 assert df_nonprintable.to_json(685 default_handler=str686 ) == '{{"A":{{"0":"{hex}"}}}}'.format(hex=hexed)687 assert df_mixed.to_json(688 default_handler=str689 ) == '{{"A":{{"0":"{hex}"}},"B":{{"0":1}}}}'.format(hex=hexed)690 def test_label_overflow(self):691 # GH14256: buffer length not checked when writing label692 df = pd.DataFrame({"bar" * 100000: [1], "foo": [1337]})693 assert df.to_json() == '{{"{bar}":{{"0":1}},"foo":{{"0":1337}}}}'.format(694 bar=("bar" * 100000)695 )696 def test_series_non_unique_index(self):697 s = Series(["a", "b"], index=[1, 1])698 msg = "Series index must be unique for orient='index'"699 with pytest.raises(ValueError, match=msg):700 s.to_json(orient="index")701 assert_series_equal(702 s, read_json(s.to_json(orient="split"), orient="split", typ="series")703 )704 unser = read_json(s.to_json(orient="records"), orient="records", typ="series")705 tm.assert_numpy_array_equal(s.values, unser.values)706 def test_series_from_json_to_json(self):707 def _check_orient(708 series, orient, dtype=None, numpy=False, check_index_type=True709 ):710 series = series.sort_index()711 unser = read_json(712 series.to_json(orient=orient),713 typ="series",714 orient=orient,715 numpy=numpy,716 dtype=dtype,717 )718 unser = unser.sort_index()719 if orient == "records" or orient == "values":720 assert_almost_equal(series.values, unser.values)721 else:722 if orient == "split":723 assert_series_equal(724 series, unser, check_index_type=check_index_type725 )726 else:727 assert_series_equal(728 series,729 unser,730 check_names=False,731 check_index_type=check_index_type,732 )733 def _check_all_orients(series, dtype=None, check_index_type=True):734 _check_orient(735 series, "columns", dtype=dtype, check_index_type=check_index_type736 )737 _check_orient(738 series, "records", dtype=dtype, check_index_type=check_index_type739 )740 _check_orient(741 series, "split", dtype=dtype, check_index_type=check_index_type742 )743 _check_orient(744 series, "index", dtype=dtype, check_index_type=check_index_type745 )746 _check_orient(series, "values", dtype=dtype)747 _check_orient(748 series,749 "columns",750 dtype=dtype,751 numpy=True,752 check_index_type=check_index_type,753 )754 _check_orient(755 series,756 "records",757 dtype=dtype,758 numpy=True,759 check_index_type=check_index_type,760 )761 _check_orient(762 series,763 "split",764 dtype=dtype,765 numpy=True,766 check_index_type=check_index_type,767 )768 _check_orient(769 series,770 "index",771 dtype=dtype,772 numpy=True,773 check_index_type=check_index_type,774 )775 _check_orient(776 series,777 "values",778 dtype=dtype,779 numpy=True,780 check_index_type=check_index_type,781 )782 # basic783 _check_all_orients(self.series)784 assert self.series.to_json() == self.series.to_json(orient="index")785 objSeries = Series(786 [str(d) for d in self.objSeries],787 index=self.objSeries.index,788 name=self.objSeries.name,789 )790 _check_all_orients(objSeries, dtype=False)791 # empty_series has empty index with object dtype792 # which cannot be revert793 assert self.empty_series.index.dtype == np.object_794 _check_all_orients(self.empty_series, check_index_type=False)795 _check_all_orients(self.ts)796 # dtype797 s = Series(range(6), index=["a", "b", "c", "d", "e", "f"])798 _check_all_orients(Series(s, dtype=np.float64), dtype=np.float64)799 _check_all_orients(Series(s, dtype=np.int), dtype=np.int)800 def test_series_to_json_except(self):801 s = Series([1, 2, 3])802 msg = "Invalid value 'garbage' for option 'orient'"803 with pytest.raises(ValueError, match=msg):804 s.to_json(orient="garbage")805 def test_series_from_json_precise_float(self):806 s = Series([4.56, 4.56, 4.56])807 result = read_json(s.to_json(), typ="series", precise_float=True)808 assert_series_equal(result, s, check_index_type=False)809 def test_series_with_dtype(self):810 # GH 21986811 s = Series([4.56, 4.56, 4.56])812 result = read_json(s.to_json(), typ="series", dtype=np.int64)813 expected = Series([4] * 3)814 assert_series_equal(result, expected)815 def test_frame_from_json_precise_float(self):816 df = DataFrame([[4.56, 4.56, 4.56], [4.56, 4.56, 4.56]])817 result = read_json(df.to_json(), precise_float=True)818 assert_frame_equal(result, df, check_index_type=False, check_column_type=False)819 def test_typ(self):820 s = Series(range(6), index=["a", "b", "c", "d", "e", "f"], dtype="int64")821 result = read_json(s.to_json(), typ=None)822 assert_series_equal(result, s)823 def test_reconstruction_index(self):824 df = DataFrame([[1, 2, 3], [4, 5, 6]])825 result = read_json(df.to_json())826 assert_frame_equal(result, df)827 df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}, index=["A", "B", "C"])828 result = read_json(df.to_json())829 assert_frame_equal(result, df)830 def test_path(self):831 with ensure_clean("test.json") as path:832 for df in [833 self.frame,834 self.frame2,835 self.intframe,836 self.tsframe,837 self.mixed_frame,838 ]:839 df.to_json(path)840 read_json(path)841 def test_axis_dates(self):842 # frame843 json = self.tsframe.to_json()844 result = read_json(json)845 assert_frame_equal(result, self.tsframe)846 # series847 json = self.ts.to_json()848 result = read_json(json, typ="series")849 assert_series_equal(result, self.ts, check_names=False)850 assert result.name is None851 def test_convert_dates(self):852 # frame853 df = self.tsframe.copy()854 df["date"] = Timestamp("20130101")855 json = df.to_json()856 result = read_json(json)857 assert_frame_equal(result, df)858 df["foo"] = 1.0859 json = df.to_json(date_unit="ns")860 result = read_json(json, convert_dates=False)861 expected = df.copy()862 expected["date"] = expected["date"].values.view("i8")863 expected["foo"] = expected["foo"].astype("int64")864 assert_frame_equal(result, expected)865 # series866 ts = Series(Timestamp("20130101"), index=self.ts.index)867 json = ts.to_json()868 result = read_json(json, typ="series")869 assert_series_equal(result, ts)870 def test_convert_dates_infer(self):871 # GH10747872 from pandas.io.json import dumps873 infer_words = [874 "trade_time",875 "date",876 "datetime",877 "sold_at",878 "modified",879 "timestamp",880 "timestamps",881 ]882 for infer_word in infer_words:883 data = [{"id": 1, infer_word: 1036713600000}, {"id": 2}]884 expected = DataFrame(885 [[1, Timestamp("2002-11-08")], [2, pd.NaT]], columns=["id", infer_word]886 )887 result = read_json(dumps(data))[["id", infer_word]]888 assert_frame_equal(result, expected)889 def test_date_format_frame(self):890 df = self.tsframe.copy()891 def test_w_date(date, date_unit=None):892 df["date"] = Timestamp(date)893 df.iloc[1, df.columns.get_loc("date")] = pd.NaT894 df.iloc[5, df.columns.get_loc("date")] = pd.NaT895 if date_unit:896 json = df.to_json(date_format="iso", date_unit=date_unit)897 else:898 json = df.to_json(date_format="iso")899 result = read_json(json)900 expected = df.copy()901 expected.index = expected.index.tz_localize("UTC")902 expected["date"] = expected["date"].dt.tz_localize("UTC")903 assert_frame_equal(result, expected)904 test_w_date("20130101 20:43:42.123")905 test_w_date("20130101 20:43:42", date_unit="s")906 test_w_date("20130101 20:43:42.123", date_unit="ms")907 test_w_date("20130101 20:43:42.123456", date_unit="us")908 test_w_date("20130101 20:43:42.123456789", date_unit="ns")909 msg = "Invalid value 'foo' for option 'date_unit'"910 with pytest.raises(ValueError, match=msg):911 df.to_json(date_format="iso", date_unit="foo")912 def test_date_format_series(self):913 def test_w_date(date, date_unit=None):914 ts = Series(Timestamp(date), index=self.ts.index)915 ts.iloc[1] = pd.NaT916 ts.iloc[5] = pd.NaT917 if date_unit:918 json = ts.to_json(date_format="iso", date_unit=date_unit)919 else:920 json = ts.to_json(date_format="iso")921 result = read_json(json, typ="series")922 expected = ts.copy()923 expected.index = expected.index.tz_localize("UTC")924 expected = expected.dt.tz_localize("UTC")925 assert_series_equal(result, expected)926 test_w_date("20130101 20:43:42.123")927 test_w_date("20130101 20:43:42", date_unit="s")928 test_w_date("20130101 20:43:42.123", date_unit="ms")929 test_w_date("20130101 20:43:42.123456", date_unit="us")930 test_w_date("20130101 20:43:42.123456789", date_unit="ns")931 ts = Series(Timestamp("20130101 20:43:42.123"), index=self.ts.index)932 msg = "Invalid value 'foo' for option 'date_unit'"933 with pytest.raises(ValueError, match=msg):934 ts.to_json(date_format="iso", date_unit="foo")935 def test_date_unit(self):936 df = self.tsframe.copy()937 df["date"] = Timestamp("20130101 20:43:42")938 dl = df.columns.get_loc("date")939 df.iloc[1, dl] = Timestamp("19710101 20:43:42")940 df.iloc[2, dl] = Timestamp("21460101 20:43:42")941 df.iloc[4, dl] = pd.NaT942 for unit in ("s", "ms", "us", "ns"):943 json = df.to_json(date_format="epoch", date_unit=unit)944 # force date unit945 result = read_json(json, date_unit=unit)946 assert_frame_equal(result, df)947 # detect date unit948 result = read_json(json, date_unit=None)949 assert_frame_equal(result, df)950 def test_weird_nested_json(self):951 # this used to core dump the parser952 s = r"""{953 "status": "success",954 "data": {955 "posts": [956 {957 "id": 1,958 "title": "A blog post",959 "body": "Some useful content"960 },961 {962 "id": 2,963 "title": "Another blog post",964 "body": "More content"965 }966 ]967 }968 }"""969 read_json(s)970 def test_doc_example(self):971 dfj2 = DataFrame(np.random.randn(5, 2), columns=list("AB"))972 dfj2["date"] = Timestamp("20130101")973 dfj2["ints"] = range(5)974 dfj2["bools"] = True975 dfj2.index = pd.date_range("20130101", periods=5)976 json = dfj2.to_json()977 result = read_json(json, dtype={"ints": np.int64, "bools": np.bool_})978 assert_frame_equal(result, result)979 def test_misc_example(self):980 # parsing unordered input fails981 result = read_json('[{"a": 1, "b": 2}, {"b":2, "a" :1}]', numpy=True)982 expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])983 error_msg = """DataFrame\\.index are different984DataFrame\\.index values are different \\(100\\.0 %\\)985\\[left\\]: Index\\(\\['a', 'b'\\], dtype='object'\\)986\\[right\\]: RangeIndex\\(start=0, stop=2, step=1\\)"""987 with pytest.raises(AssertionError, match=error_msg):988 assert_frame_equal(result, expected, check_index_type=False)989 result = read_json('[{"a": 1, "b": 2}, {"b":2, "a" :1}]')990 expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])991 assert_frame_equal(result, expected)992 @network993 @pytest.mark.single994 def test_round_trip_exception_(self):995 # GH 3867996 csv = "https://raw.github.com/hayd/lahman2012/master/csvs/Teams.csv"997 df = pd.read_csv(csv)998 s = df.to_json()999 result = pd.read_json(s)1000 assert_frame_equal(result.reindex(index=df.index, columns=df.columns), df)1001 @network1002 @pytest.mark.single1003 @pytest.mark.parametrize(1004 "field,dtype",1005 [1006 ["created_at", pd.DatetimeTZDtype(tz="UTC")],1007 ["closed_at", "datetime64[ns]"],1008 ["updated_at", pd.DatetimeTZDtype(tz="UTC")],1009 ],1010 )1011 def test_url(self, field, dtype):1012 url = "https://api.github.com/repos/pandas-dev/pandas/issues?per_page=5" # noqa1013 result = read_json(url, convert_dates=True)1014 assert result[field].dtype == dtype1015 def test_timedelta(self):1016 converter = lambda x: pd.to_timedelta(x, unit="ms")1017 s = Series([timedelta(23), timedelta(seconds=5)])1018 assert s.dtype == "timedelta64[ns]"1019 result = pd.read_json(s.to_json(), typ="series").apply(converter)1020 assert_series_equal(result, s)1021 s = Series([timedelta(23), timedelta(seconds=5)], index=pd.Index([0, 1]))1022 assert s.dtype == "timedelta64[ns]"1023 result = pd.read_json(s.to_json(), typ="series").apply(converter)1024 assert_series_equal(result, s)1025 frame = DataFrame([timedelta(23), timedelta(seconds=5)])1026 assert frame[0].dtype == "timedelta64[ns]"1027 assert_frame_equal(frame, pd.read_json(frame.to_json()).apply(converter))1028 frame = DataFrame(1029 {1030 "a": [timedelta(days=23), timedelta(seconds=5)],1031 "b": [1, 2],1032 "c": pd.date_range(start="20130101", periods=2),1033 }1034 )1035 result = pd.read_json(frame.to_json(date_unit="ns"))1036 result["a"] = pd.to_timedelta(result.a, unit="ns")1037 result["c"] = pd.to_datetime(result.c)1038 assert_frame_equal(frame, result)1039 def test_mixed_timedelta_datetime(self):1040 frame = DataFrame(1041 {"a": [timedelta(23), pd.Timestamp("20130101")]}, dtype=object1042 )1043 expected = DataFrame(1044 {"a": [pd.Timedelta(frame.a[0]).value, pd.Timestamp(frame.a[1]).value]}1045 )1046 result = pd.read_json(frame.to_json(date_unit="ns"), dtype={"a": "int64"})1047 assert_frame_equal(result, expected, check_index_type=False)1048 def test_default_handler(self):1049 value = object()1050 frame = DataFrame({"a": [7, value]})1051 expected = DataFrame({"a": [7, str(value)]})1052 result = pd.read_json(frame.to_json(default_handler=str))1053 assert_frame_equal(expected, result, check_index_type=False)1054 def test_default_handler_indirect(self):1055 from pandas.io.json import dumps1056 def default(obj):1057 if isinstance(obj, complex):1058 return [("mathjs", "Complex"), ("re", obj.real), ("im", obj.imag)]1059 return str(obj)1060 df_list = [1061 9,1062 DataFrame(1063 {"a": [1, "STR", complex(4, -5)], "b": [float("nan"), None, "N/A"]},1064 columns=["a", "b"],1065 ),1066 ]1067 expected = (1068 '[9,[[1,null],["STR",null],[[["mathjs","Complex"],'1069 '["re",4.0],["im",-5.0]],"N\\/A"]]]'1070 )1071 assert dumps(df_list, default_handler=default, orient="values") == expected1072 def test_default_handler_numpy_unsupported_dtype(self):1073 # GH12554 to_json raises 'Unhandled numpy dtype 15'1074 df = DataFrame(1075 {"a": [1, 2.3, complex(4, -5)], "b": [float("nan"), None, complex(1.2, 0)]},1076 columns=["a", "b"],1077 )1078 expected = (1079 '[["(1+0j)","(nan+0j)"],'1080 '["(2.3+0j)","(nan+0j)"],'1081 '["(4-5j)","(1.2+0j)"]]'1082 )1083 assert df.to_json(default_handler=str, orient="values") == expected1084 def test_default_handler_raises(self):1085 msg = "raisin"1086 def my_handler_raises(obj):1087 raise TypeError(msg)1088 with pytest.raises(TypeError, match=msg):1089 DataFrame({"a": [1, 2, object()]}).to_json(1090 default_handler=my_handler_raises1091 )1092 with pytest.raises(TypeError, match=msg):1093 DataFrame({"a": [1, 2, complex(4, -5)]}).to_json(1094 default_handler=my_handler_raises1095 )1096 def test_categorical(self):1097 # GH4377 df.to_json segfaults with non-ndarray blocks1098 df = DataFrame({"A": ["a", "b", "c", "a", "b", "b", "a"]})1099 df["B"] = df["A"]1100 expected = df.to_json()1101 df["B"] = df["A"].astype("category")1102 assert expected == df.to_json()1103 s = df["A"]1104 sc = df["B"]1105 assert s.to_json() == sc.to_json()1106 def test_datetime_tz(self):1107 # GH4377 df.to_json segfaults with non-ndarray blocks1108 tz_range = pd.date_range("20130101", periods=3, tz="US/Eastern")1109 tz_naive = tz_range.tz_convert("utc").tz_localize(None)1110 df = DataFrame({"A": tz_range, "B": pd.date_range("20130101", periods=3)})1111 df_naive = df.copy()1112 df_naive["A"] = tz_naive1113 expected = df_naive.to_json()1114 assert expected == df.to_json()1115 stz = Series(tz_range)1116 s_naive = Series(tz_naive)1117 assert stz.to_json() == s_naive.to_json()1118 @pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")1119 @pytest.mark.filterwarnings("ignore:DataFrame.to_sparse:FutureWarning")1120 @pytest.mark.filterwarnings("ignore:Series.to_sparse:FutureWarning")1121 def test_sparse(self):1122 # GH4377 df.to_json segfaults with non-ndarray blocks1123 df = pd.DataFrame(np.random.randn(10, 4))1124 df.loc[:8] = np.nan1125 sdf = df.to_sparse()1126 expected = df.to_json()1127 assert expected == sdf.to_json()1128 s = pd.Series(np.random.randn(10))1129 s.loc[:8] = np.nan1130 ss = s.to_sparse()1131 expected = s.to_json()1132 assert expected == ss.to_json()1133 def test_tz_is_utc(self):1134 from pandas.io.json import dumps1135 exp = '"2013-01-10T05:00:00.000Z"'1136 ts = Timestamp("2013-01-10 05:00:00Z")1137 assert dumps(ts, iso_dates=True) == exp1138 dt = ts.to_pydatetime()1139 assert dumps(dt, iso_dates=True) == exp1140 ts = Timestamp("2013-01-10 00:00:00", tz="US/Eastern")1141 assert dumps(ts, iso_dates=True) == exp1142 dt = ts.to_pydatetime()1143 assert dumps(dt, iso_dates=True) == exp1144 ts = Timestamp("2013-01-10 00:00:00-0500")1145 assert dumps(ts, iso_dates=True) == exp1146 dt = ts.to_pydatetime()1147 assert dumps(dt, iso_dates=True) == exp1148 def test_tz_range_is_utc(self):1149 from pandas.io.json import dumps1150 exp = '["2013-01-01T05:00:00.000Z","2013-01-02T05:00:00.000Z"]'1151 dfexp = (1152 '{"DT":{'1153 '"0":"2013-01-01T05:00:00.000Z",'1154 '"1":"2013-01-02T05:00:00.000Z"}}'1155 )1156 tz_range = pd.date_range("2013-01-01 05:00:00Z", periods=2)1157 assert dumps(tz_range, iso_dates=True) == exp1158 dti = pd.DatetimeIndex(tz_range)1159 assert dumps(dti, iso_dates=True) == exp1160 df = DataFrame({"DT": dti})1161 result = dumps(df, iso_dates=True)1162 assert result == dfexp1163 tz_range = pd.date_range("2013-01-01 00:00:00", periods=2, tz="US/Eastern")1164 assert dumps(tz_range, iso_dates=True) == exp1165 dti = pd.DatetimeIndex(tz_range)1166 assert dumps(dti, iso_dates=True) == exp1167 df = DataFrame({"DT": dti})1168 assert dumps(df, iso_dates=True) == dfexp1169 tz_range = pd.date_range("2013-01-01 00:00:00-0500", periods=2)1170 assert dumps(tz_range, iso_dates=True) == exp1171 dti = pd.DatetimeIndex(tz_range)1172 assert dumps(dti, iso_dates=True) == exp1173 df = DataFrame({"DT": dti})1174 assert dumps(df, iso_dates=True) == dfexp1175 def test_read_inline_jsonl(self):1176 # GH91801177 result = read_json('{"a": 1, "b": 2}\n{"b":2, "a" :1}\n', lines=True)1178 expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])1179 assert_frame_equal(result, expected)1180 @td.skip_if_not_us_locale1181 def test_read_s3_jsonl(self, s3_resource):1182 # GH172001183 result = read_json("s3n://pandas-test/items.jsonl", lines=True)1184 expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])1185 assert_frame_equal(result, expected)1186 def test_read_local_jsonl(self):1187 # GH172001188 with ensure_clean("tmp_items.json") as path:1189 with open(path, "w") as infile:1190 infile.write('{"a": 1, "b": 2}\n{"b":2, "a" :1}\n')1191 result = read_json(path, lines=True)1192 expected = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])1193 assert_frame_equal(result, expected)1194 def test_read_jsonl_unicode_chars(self):1195 # GH15132: non-ascii unicode characters1196 # \u201d == RIGHT DOUBLE QUOTATION MARK1197 # simulate file handle1198 json = '{"a": "foo”", "b": "bar"}\n{"a": "foo", "b": "bar"}\n'1199 json = StringIO(json)1200 result = read_json(json, lines=True)1201 expected = DataFrame([["foo\u201d", "bar"], ["foo", "bar"]], columns=["a", "b"])1202 assert_frame_equal(result, expected)1203 # simulate string1204 json = '{"a": "foo”", "b": "bar"}\n{"a": "foo", "b": "bar"}\n'1205 result = read_json(json, lines=True)1206 expected = DataFrame([["foo\u201d", "bar"], ["foo", "bar"]], columns=["a", "b"])1207 assert_frame_equal(result, expected)1208 def test_read_json_large_numbers(self):1209 # GH188421210 json = '{"articleId": "1404366058080022500245"}'1211 json = StringIO(json)1212 result = read_json(json, typ="series")1213 expected = Series(1.404366e21, index=["articleId"])1214 assert_series_equal(result, expected)1215 json = '{"0": {"articleId": "1404366058080022500245"}}'1216 json = StringIO(json)1217 result = read_json(json)1218 expected = DataFrame(1.404366e21, index=["articleId"], columns=[0])1219 assert_frame_equal(result, expected)1220 def test_to_jsonl(self):1221 # GH91801222 df = DataFrame([[1, 2], [1, 2]], columns=["a", "b"])1223 result = df.to_json(orient="records", lines=True)1224 expected = '{"a":1,"b":2}\n{"a":1,"b":2}'1225 assert result == expected1226 df = DataFrame([["foo}", "bar"], ['foo"', "bar"]], columns=["a", "b"])1227 result = df.to_json(orient="records", lines=True)1228 expected = '{"a":"foo}","b":"bar"}\n{"a":"foo\\"","b":"bar"}'1229 assert result == expected1230 assert_frame_equal(pd.read_json(result, lines=True), df)1231 # GH15096: escaped characters in columns and data1232 df = DataFrame([["foo\\", "bar"], ['foo"', "bar"]], columns=["a\\", "b"])1233 result = df.to_json(orient="records", lines=True)1234 expected = '{"a\\\\":"foo\\\\","b":"bar"}\n' '{"a\\\\":"foo\\"","b":"bar"}'1235 assert result == expected1236 assert_frame_equal(pd.read_json(result, lines=True), df)1237 # TODO: there is a near-identical test for pytables; can we share?1238 def test_latin_encoding(self):1239 # GH 137741240 pytest.skip("encoding not implemented in .to_json(), xref #13774")1241 values = [1242 [b"E\xc9, 17", b"", b"a", b"b", b"c"],1243 [b"E\xc9, 17", b"a", b"b", b"c"],1244 [b"EE, 17", b"", b"a", b"b", b"c"],1245 [b"E\xc9, 17", b"\xf8\xfc", b"a", b"b", b"c"],1246 [b"", b"a", b"b", b"c"],1247 [b"\xf8\xfc", b"a", b"b", b"c"],1248 [b"A\xf8\xfc", b"", b"a", b"b", b"c"],1249 [np.nan, b"", b"b", b"c"],1250 [b"A\xf8\xfc", np.nan, b"", b"b", b"c"],1251 ]1252 def _try_decode(x, encoding="latin-1"):1253 try:1254 return x.decode(encoding)1255 except AttributeError:1256 return x1257 # not sure how to remove latin-1 from code in python 2 and 31258 values = [[_try_decode(x) for x in y] for y in values]1259 examples = []1260 for dtype in ["category", object]:1261 for val in values:1262 examples.append(Series(val, dtype=dtype))1263 def roundtrip(s, encoding="latin-1"):1264 with ensure_clean("test.json") as path:1265 s.to_json(path, encoding=encoding)1266 retr = read_json(path, encoding=encoding)1267 assert_series_equal(s, retr, check_categorical=False)1268 for s in examples:1269 roundtrip(s)1270 def test_data_frame_size_after_to_json(self):1271 # GH153441272 df = DataFrame({"a": [str(1)]})1273 size_before = df.memory_usage(index=True, deep=True).sum()1274 df.to_json()1275 size_after = df.memory_usage(index=True, deep=True).sum()1276 assert size_before == size_after1277 @pytest.mark.parametrize(1278 "index", [None, [1, 2], [1.0, 2.0], ["a", "b"], ["1", "2"], ["1.", "2."]]1279 )1280 @pytest.mark.parametrize("columns", [["a", "b"], ["1", "2"], ["1.", "2."]])1281 def test_from_json_to_json_table_index_and_columns(self, index, columns):1282 # GH25433 GH254351283 expected = DataFrame([[1, 2], [3, 4]], index=index, columns=columns)1284 dfjson = expected.to_json(orient="table")1285 result = pd.read_json(dfjson, orient="table")1286 assert_frame_equal(result, expected)1287 def test_from_json_to_json_table_dtypes(self):1288 # GH213451289 expected = pd.DataFrame({"a": [1, 2], "b": [3.0, 4.0], "c": ["5", "6"]})1290 dfjson = expected.to_json(orient="table")1291 result = pd.read_json(dfjson, orient="table")1292 assert_frame_equal(result, expected)1293 @pytest.mark.parametrize("dtype", [True, {"b": int, "c": int}])1294 def test_read_json_table_dtype_raises(self, dtype):1295 # GH213451296 df = pd.DataFrame({"a": [1, 2], "b": [3.0, 4.0], "c": ["5", "6"]})1297 dfjson = df.to_json(orient="table")1298 msg = "cannot pass both dtype and orient='table'"1299 with pytest.raises(ValueError, match=msg):1300 pd.read_json(dfjson, orient="table", dtype=dtype)1301 def test_read_json_table_convert_axes_raises(self):1302 # GH25433 GH254351303 df = DataFrame([[1, 2], [3, 4]], index=[1.0, 2.0], columns=["1.", "2."])1304 dfjson = df.to_json(orient="table")1305 msg = "cannot pass both convert_axes and orient='table'"1306 with pytest.raises(ValueError, match=msg):1307 pd.read_json(dfjson, orient="table", convert_axes=True)1308 @pytest.mark.parametrize(1309 "data, expected",1310 [1311 (1312 DataFrame([[1, 2], [4, 5]], columns=["a", "b"]),1313 {"columns": ["a", "b"], "data": [[1, 2], [4, 5]]},1314 ),1315 (1316 DataFrame([[1, 2], [4, 5]], columns=["a", "b"]).rename_axis("foo"),1317 {"columns": ["a", "b"], "data": [[1, 2], [4, 5]]},1318 ),1319 (1320 DataFrame(1321 [[1, 2], [4, 5]], columns=["a", "b"], index=[["a", "b"], ["c", "d"]]1322 ),1323 {"columns": ["a", "b"], "data": [[1, 2], [4, 5]]},1324 ),1325 (Series([1, 2, 3], name="A"), {"name": "A", "data": [1, 2, 3]}),1326 (1327 Series([1, 2, 3], name="A").rename_axis("foo"),1328 {"name": "A", "data": [1, 2, 3]},1329 ),1330 (1331 Series([1, 2], name="A", index=[["a", "b"], ["c", "d"]]),1332 {"name": "A", "data": [1, 2]},1333 ),1334 ],1335 )1336 def test_index_false_to_json_split(self, data, expected):1337 # GH 173941338 # Testing index=False in to_json with orient='split'1339 result = data.to_json(orient="split", index=False)1340 result = json.loads(result)1341 assert result == expected1342 @pytest.mark.parametrize(1343 "data",1344 [1345 (DataFrame([[1, 2], [4, 5]], columns=["a", "b"])),1346 (DataFrame([[1, 2], [4, 5]], columns=["a", "b"]).rename_axis("foo")),1347 (1348 DataFrame(1349 [[1, 2], [4, 5]], columns=["a", "b"], index=[["a", "b"], ["c", "d"]]1350 )1351 ),1352 (Series([1, 2, 3], name="A")),1353 (Series([1, 2, 3], name="A").rename_axis("foo")),1354 (Series([1, 2], name="A", index=[["a", "b"], ["c", "d"]])),1355 ],1356 )1357 def test_index_false_to_json_table(self, data):1358 # GH 173941359 # Testing index=False in to_json with orient='table'1360 result = data.to_json(orient="table", index=False)1361 result = json.loads(result)1362 expected = {1363 "schema": pd.io.json.build_table_schema(data, index=False),1364 "data": DataFrame(data).to_dict(orient="records"),1365 }1366 assert result == expected1367 @pytest.mark.parametrize("orient", ["records", "index", "columns", "values"])1368 def test_index_false_error_to_json(self, orient):1369 # GH 173941370 # Testing error message from to_json with index=False1371 df = pd.DataFrame([[1, 2], [4, 5]], columns=["a", "b"])1372 msg = "'index=False' is only valid when 'orient' is 'split' or 'table'"1373 with pytest.raises(ValueError, match=msg):1374 df.to_json(orient=orient, index=False)1375 @pytest.mark.parametrize("orient", ["split", "table"])1376 @pytest.mark.parametrize("index", [True, False])1377 def test_index_false_from_json_to_json(self, orient, index):1378 # GH251701379 # Test index=False in from_json to_json1380 expected = DataFrame({"a": [1, 2], "b": [3, 4]})1381 dfjson = expected.to_json(orient=orient, index=index)1382 result = read_json(dfjson, orient=orient)1383 assert_frame_equal(result, expected)1384 def test_read_timezone_information(self):1385 # GH 255461386 result = read_json(1387 '{"2019-01-01T11:00:00.000Z":88}', typ="series", orient="index"1388 )1389 expected = Series([88], index=DatetimeIndex(["2019-01-01 11:00:00"], tz="UTC"))...

Full Screen

Full Screen

streamlined_syntax.py

Source:streamlined_syntax.py Github

copy

Full Screen

...14 if (type(value)) == str:15 value = int(value)16 assert(type(value) == int and value >= 0)17 self.hv = Test(Switch(value))18 def to_json(self):19 return self.hv.to_json()20class PortEq(Pred):21 def __init__(self, value):22 if (type(value)) == str:23 value = int(value)24 assert(type(value) == int and value >= 0)25 self.hv = Test(Location(Physical(value)))26 def to_json(self):27 return self.hv.to_json()28class EthSrcEq(Pred):29 def __init__(self, value):30 assert(type(value) == str or type(value == unicode))31 self.hv = Test(EthSrc(value))32 def to_json(self):33 return self.hv.to_json()34class EthDstEq(Pred):35 def __init__(self, value):36 assert(type(value) == str or type(value == unicode))37 self.hv = Test(EthDst(value))38 def to_json(self):39 return self.hv.to_json()40class VlanEq(Pred):41 def __init__(self, value):42 if (type(value)) == str:43 value = int(value)44 assert(type(value) == int and value >= 0)45 self.hv = Test(Vlan(value))46 def to_json(self):47 return self.hv.to_json()48class VlanPcpEq(Pred):49 def __init__(self, value):50 if (type(value)) == str:51 value = int(value)52 assert(type(value) == int and value >= 0)53 self.hv = Test(VlanPcp(value))54 def to_json(self):55 return self.hv.to_json()56class EthTypeEq(Pred):57 def __init__(self, value):58 if (type(value)) == str:59 value = int(value)60 assert(type(value) == int and value >= 0)61 self.hv = Test(EthType(value))62 def to_json(self):63 return self.hv.to_json()64class IPProtoEq(Pred):65 def __init__(self, value):66 if (type(value)) == str:67 value = int(value)68 assert(type(value) == int and value >= 0)69 self.hv = Test(IPProto(value))70 def to_json(self):71 return self.hv.to_json()72class IP4SrcEq(Pred):73 def __init__(self, value, mask = None):74 assert(type(value) == str or type(value == unicode))75 if mask != None:76 assert type(mask) == int77 self.hv = Test(IP4Src(value, mask))78 def to_json(self):79 return self.hv.to_json()80class IP4DstEq(Pred):81 def __init__(self, value, mask = None):82 assert(type(value) == str or type(value == unicode))83 if mask != None:84 assert type(mask) == int85 self.hv = Test(IP4Dst(value, mask))86 def to_json(self):87 return self.hv.to_json()88class TCPSrcPortEq(Pred):89 def __init__(self, value):90 if (type(value)) == str:91 value = int(value)92 assert(type(value) == int and value >= 0)93 self.hv = Test(IPProto(value))94 def to_json(self):95 return self.hv.to_json()96class TCPDstPortEq(Pred):97 def __init__(self, value):98 if (type(value)) == str:99 value = int(value)100 assert(type(value) == int and value >= 0)101 self.hv = Test(IPProto(value))102 def to_json(self):103 return self.hv.to_json()104########## ___NotEq105class SwitchNotEq(Pred):106 def __init__(self, value):107 if (type(value)) == str:108 value = int(value)109 assert(type(value) == int and value >= 0)110 self.hv = Not(Test(Switch(value)))111 def to_json(self):112 return self.hv.to_json()113class PortNotEq(Pred):114 def __init__(self, value):115 if (type(value)) == str:116 value = int(value)117 assert(type(value) == int and value >= 0)118 self.hv = Not(Test(Location(Physical(value))))119 def to_json(self):120 return self.hv.to_json()121class EthSrcNotEq(Pred):122 def __init__(self, value):123 assert(type(value) == str or type(value == unicode))124 self.hv = Not(Test(EthSrc(value)))125 def to_json(self):126 return self.hv.to_json()127class EthDstNotEq(Pred):128 def __init__(self, value):129 assert(type(value) == str or type(value == unicode))130 self.hv = Not(Test(EthDst(value)))131 def to_json(self):132 return self.hv.to_json()133class VlanNotEq(Pred):134 def __init__(self, value):135 if (type(value)) == str:136 value = int(value)137 assert(type(value) == int and value >= 0)138 self.hv = Not(Test(Vlan(value)))139 def to_json(self):140 return self.hv.to_json()141class VlanPcpNotEq(Pred):142 def __init__(self, value):143 if (type(value)) == str:144 value = int(value)145 assert(type(value) == int and value >= 0)146 self.hv = Not(Test(VlanPcp(value)))147 def to_json(self):148 return self.hv.to_json()149class EthTypeNotEq(Pred):150 def __init__(self, value):151 if (type(value)) == str:152 value = int(value)153 assert(type(value) == int and value >= 0)154 self.hv = Not(Test(EthType(value)))155 def to_json(self):156 return self.hv.to_json()157class IPProtoNotEq(Pred):158 def __init__(self, value):159 if (type(value)) == str:160 value = int(value)161 assert(type(value) == int and value >= 0)162 self.hv = Not(Test(IPProto(value)))163 def to_json(self):164 return self.hv.to_json()165class IP4SrcNotEq(Pred):166 def __init__(self, value, mask = None):167 assert(type(value) == str or type(value == unicode))168 if mask != None:169 assert type(mask) == int170 self.hv = Not(Test(IP4Src(value, mask)))171 def to_json(self):172 return self.hv.to_json()173class IP4DstNotEq(Pred):174 def __init__(self, value, mask = None):175 assert(type(value) == str or type(value == unicode))176 if mask != None:177 assert type(mask) == int178 self.hv = Not(Test(IP4Dst(value, mask)))179 def to_json(self):180 return self.hv.to_json()181class TCPSrcPortNotEq(Pred):182 def __init__(self, value):183 if (type(value)) == str:184 value = int(value)185 assert(type(value) == int and value >= 0)186 self.hv = Not(Test(IPProto(value)))187 def to_json(self):188 return self.hv.to_json()189class TCPDstPortNotEq(Pred):190 def __init__(self, value):191 if (type(value)) == str:192 value = int(value)193 assert(type(value) == int and value >= 0)194 self.hv = Not(Test(IPProto(value)))195 def to_json(self):196 return self.hv.to_json()197########## Set___198class SetEthSrc(Policy):199 def __init__(self, value):200 assert(type(value) == str or type(value == unicode))201 self.hv = Mod(EthSrc(value))202 def to_json(self):203 return self.hv.to_json()204class SetEthDst(Policy):205 def __init__(self, value):206 assert(type(value) == str or type(value == unicode))207 self.hv = Mod(EthDst(value))208 def to_json(self):209 return self.hv.to_json()210class SetVlan(Policy):211 def __init__(self, value):212 if (type(value)) == str:213 value = int(value)214 assert(type(value) == int and value >= 0)215 self.hv = Mod(Vlan(value))216 def to_json(self):217 return self.hv.to_json()218class SetVlanPcp(Policy):219 def __init__(self, value):220 if (type(value)) == str:221 value = int(value)222 assert(type(value) == int and value >= 0)223 self.hv = Mod(VlanPcp(value))224 def to_json(self):225 return self.hv.to_json()226class SetEthType(Policy):227 def __init__(self, value):228 if (type(value)) == str:229 value = int(value)230 assert(type(value) == int and value >= 0)231 self.hv = Mod(EthType(value))232 def to_json(self):233 return self.hv.to_json()234class SetIPProto(Policy):235 def __init__(self, value):236 if (type(value)) == str:237 value = int(value)238 assert(type(value) == int and value >= 0)239 self.hv = Mod(IPProto(value))240 def to_json(self):241 return self.hv.to_json()242class SetIP4Src(Policy):243 def __init__(self, value, mask = None):244 assert(type(value) == str or type(value == unicode))245 if mask != None:246 assert type(mask) == int247 self.hv = Mod(IP4Src(value, mask))248 def to_json(self):249 return self.hv.to_json()250class SetIP4Dst(Policy):251 def __init__(self, value, mask = None):252 assert(type(value) == str or type(value == unicode))253 if mask != None:254 assert type(mask) == int255 self.hv = Mod(IP4Dst(value, mask))256 def to_json(self):257 return self.hv.to_json()258class SetTCPSrcPort(Policy):259 def __init__(self, value):260 if (type(value)) == str:261 value = int(value)262 assert(type(value) == int and value >= 0)263 self.hv = Mod(IPProto(value))264 def to_json(self):265 return self.hv.to_json()266class SetTCPDstPort(Policy):267 def __init__(self, value):268 if (type(value)) == str:269 value = int(value)270 assert(type(value) == int and value >= 0)271 self.hv = Mod(IPProto(value))272 def to_json(self):273 return self.hv.to_json()274############### Misc.275class Send(Policy):276 def __init__(self, value):277 if (type(value)) == str:278 value = int(value)279 assert(type(value) == int and value >= 1 and value <= 65535)280 self.hv = Mod(Location(Physical(value)))281 def to_json(self):282 return self.hv.to_json()283class SendToController(Policy):284 def __init__(self, value):285 assert(type(value) == str or type(value == unicode))286 self.hv = Mod(Location(Pipe(value)))287 def to_json(self):...

Full Screen

Full Screen

graphql_inputs.py

Source:graphql_inputs.py Github

copy

Full Screen

...25 province: str26 postalCode: str27 unitNumber: str28 poBox: str29 def to_json(self):30 return {31 "streetNumber": self.streetNumber,32 "streetName": self.streetName,33 "city": self.city,34 "province": self.province,35 "postalCode": self.postalCode,36 "unitNumber": self.unitNumber,37 "poBox": self.poBox38 }39 40@strawberry.input41class EmailInput:42 email: str43 def to_json(self): 44 return {45 "email": self.email46 }47@strawberry.input48class ContactInfoInput:49 contact: str50 def to_json(self): 51 return {52 "contact": self.contact53 }54@strawberry.input55class LandlordInfoInput:56 fullName: str57 receiveDocumentsByEmail: bool58 emails: List[EmailInput]59 contactInfo: bool60 contacts: List[ContactInfoInput]61 def to_json(self): 62 return {63 "fullName": self.fullName,64 "receiveDocumentsByEmail": self.receiveDocumentsByEmail,65 "contactInfo": self.contactInfo,66 "contacts": [contact.to_json() for contact in self.contacts],67 "emails": [email.to_json() for email in self.emails]68 }69@strawberry.input70class ParkingDescriptionInput:71 description: str72 def to_json(self): 73 return {74 "description": self.description75 }76@strawberry.input77class RentalAddressInput:78 streetNumber: str79 streetName: str80 city: str81 province: str82 postalCode: str83 unitName: str84 isCondo: bool85 parkingDescriptions: List[ParkingDescriptionInput]86 def to_json(self):87 return {88 "streetNumber": self.streetNumber,89 "streetName": self.streetName,90 "city": self.city,91 "province": self.province,92 "postalCode": self.postalCode,93 "unitName": self.unitName,94 "isCondo": self.isCondo,95 "parkingDescriptions": [parkingDescription.to_json() for parkingDescription in self.parkingDescriptions]96 }97@strawberry.input98class RentServiceInput:99 name: str100 amount: str101 def to_json(self):102 return {103 "name": self.name,104 "amount": self.amount105 }106@strawberry.input107class PaymentOptionInput:108 name: str109 def to_json(self):110 return {111 "name": self.name112 }113@strawberry.input114class RentInput:115 baseRent: str116 rentMadePayableTo: str117 rentServices: List[RentServiceInput]118 paymentOptions: List[PaymentOptionInput]119 def to_json(self):120 return {121 "baseRent": self.baseRent,122 "rentMadePayableTo": self.rentMadePayableTo,123 "rentServices": [rentService.to_json() for rentService in self.rentServices],124 "paymentOptions": [paymentOption.to_json() for paymentOption in self.paymentOptions]125 }126@strawberry.input127class RentalPeriodInput:128 rentalPeriod: str129 endDate: str130 def to_json(self):131 return {132 "rentalPeriod": self.rentalPeriod,133 "endDate": self.endDate134 }135@strawberry.input136class PartialPeriodInput:137 amount: str138 dueDate: str139 startDate: str140 endDate: str141 isEnabled: bool142 def to_json(self):143 return {144 "amount": self.amount,145 "dueDate": self.dueDate,146 "startDate": self.startDate,147 "endDate": self.endDate,148 "isEnabled": self.isEnabled149 }150@strawberry.input151class TenancyTermsInput:152 rentalPeriod: RentalPeriodInput153 startDate: str154 rentDueDate: str155 paymentPeriod: str156 partialPeriod: PartialPeriodInput157 def to_json(self):158 return {159 "startDate": self.startDate,160 "rentDueDate": self.rentDueDate,161 "paymentPeriod": self.paymentPeriod,162 "rentalPeriod": self.rentalPeriod.to_json(),163 "partialPeriod": self.partialPeriod.to_json()164 }165 166@strawberry.input167class DetailInput:168 detail: str169 def to_json(self):170 return {171 "detail": self.detail.replace("$", "\$")172 }173@strawberry.input174class ServiceInput:175 name: str176 isIncludedInRent: bool177 isPayPerUse: Optional[bool]178 details: List[DetailInput]179 def to_json(self):180 return {181 "name": self.name,182 "isIncludedInRent": self.isIncludedInRent,183 "isPayPerUse": self.isPayPerUse,184 "details": [detail.to_json() for detail in self.details]185 }186@strawberry.input187class UtilityInput:188 name: str189 responsibility: str190 details: List[DetailInput]191 def to_json(self):192 return {193 "name": self.name,194 "responsibility": self.responsibility,195 "details": [detail.to_json() for detail in self.details]196 }197@strawberry.input198class RentDiscoutInput:199 name: str200 amount: str201 details: List[DetailInput]202 def to_json(self):203 return {204 "name": self.name,205 "amount": self.amount,206 "details": [detail.to_json() for detail in self.details]207 }208@strawberry.input209class RentDepositInput:210 name: str211 amount: str212 details: List[DetailInput]213 def to_json(self):214 return {215 "name": self.name,216 "amount": self.amount,217 "details": [detail.to_json() for detail in self.details]218 }219@strawberry.input220class AdditionalTermInput:221 name: str 222 details: List[DetailInput]223 def to_json(self):224 return {225 "name": self.name,226 "details": [detail.to_json() for detail in self.details]227 }228 229@strawberry.input230class TenantNameInput:231 name: str232 def to_json(self):233 return {234 "name": self.name235 }236@strawberry.input237class LeaseInput:238 landlordInfo: LandlordInfoInput239 landlordAddress: LandlordAddressInput240 rentalAddress: RentalAddressInput241 rent: RentInput242 tenancyTerms: TenancyTermsInput243 services: List[ServiceInput]244 utilities: List[UtilityInput]245 rentDeposits: List[RentDepositInput]246 rentDiscounts: List[RentDiscoutInput]247 additionalTerms: List[AdditionalTermInput]248 tenantNames: List[TenantNameInput]249 def to_json(self):250 return {251 "landlordInfo": self.landlordInfo.to_json(),252 "landlordAddress": self.landlordAddress.to_json(),253 "rentalAddress": self.rentalAddress.to_json(),254 "rent": self.rent.to_json(),255 "tenancyTerms": self.tenancyTerms.to_json(),256 "services": [service.to_json() for service in self.services],257 "utilities": [utility.to_json() for utility in self.utilities],258 "rentDiscounts": [rentDiscount.to_json() for rentDiscount in self.rentDiscounts],259 "rentDeposits": [rentDeposit.to_json() for rentDeposit in self.rentDeposits],260 "additionalTerms": [additionalTerm.to_json() for additionalTerm in self.additionalTerms],261 "tenantNames": [tenantName.to_json() for tenantName in self.tenantNames]262 }263 264@strawberry.input265class HouseInput:266 firebaseId: str...

Full Screen

Full Screen

constraint.py

Source:constraint.py Github

copy

Full Screen

1from .version import Version2class Constraint(object):3 def __init__(self) -> None:4 super().__init__()5 def to_json(self):6 pass7class ConstraintExactly(Constraint):8 def __init__(self, v: Version) -> None:9 super().__init__()10 self.v = v11 def to_json(self):12 return {"exactly": self.v.to_json()}13class ConstraintGeq(Constraint):14 def __init__(self, v: Version) -> None:15 super().__init__()16 self.v = v17 def to_json(self):18 return {"geq": self.v.to_json()}19class ConstraintGt(Constraint):20 def __init__(self, v: Version) -> None:21 super().__init__()22 self.v = v23 def to_json(self):24 return {"gt": self.v.to_json()}25class ConstraintLeq(Constraint):26 def __init__(self, v: Version) -> None:27 super().__init__()28 self.v = v29 def to_json(self):30 return {"leq": self.v.to_json()}31class ConstraintLt(Constraint):32 def __init__(self, v: Version) -> None:33 super().__init__()34 self.v = v35 def to_json(self):36 return {"lt": self.v.to_json()}37class ConstraintCaret(Constraint):38 def __init__(self, v: Version) -> None:39 super().__init__()40 self.v = v41 def to_json(self):42 return {"caret": self.v.to_json()}43class ConstraintTilde(Constraint):44 def __init__(self, v: Version) -> None:45 super().__init__()46 self.v = v47 def to_json(self):48 return {"tilde": self.v.to_json()}49class ConstraintAnd(Constraint):50 def __init__(self, left: Constraint, right: Constraint) -> None:51 super().__init__()52 self.left = left53 self.right = right54 def to_json(self):55 return {"and": {"left": self.left.to_json(), "right": self.right.to_json()}}56class ConstraintOr(Constraint):57 def __init__(self, left: Constraint, right: Constraint) -> None:58 super().__init__()59 self.left = left60 self.right = right61 def to_json(self):62 return {"or": {"left": self.left.to_json(), "right": self.right.to_json()}}63 64class ConstraintWildcardBug(Constraint):65 def __init__(self, major: int, minor: int) -> None:66 super().__init__()67 self.major = major68 self.minor = minor69 def to_json(self):70 return {"wildcardBug": {"major": self.major, "minor": self.minor}}71class ConstraintWildcardMinor(Constraint):72 def __init__(self, major: int) -> None:73 super().__init__()74 self.major = major75 76 def to_json(self):77 return {"wildcardMinor": {"major": self.major}}78class ConstraintWildcardMajor(Constraint):79 def __init__(self) -> None:80 super().__init__()81 def to_json(self):82 return {"wildcardMajor": None}83class ConstraintNot(Constraint):84 def __init__(self, c: Constraint) -> None:85 super().__init__()86 self.c = c87 88 def to_json(self):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run avocado automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful