How to use raise_exception method in Slash

Best Python code snippet using slash

_datatype_validation_helper.py

Source:_datatype_validation_helper.py Github

copy

Full Screen

1"""Internal helpers for dataset validation."""2from pathlib import Path3from typing import Any, Iterable, List, Optional, Sequence, Tuple, Union4import numpy as np5import pandas as pd6from biopsykit.utils._types import _Hashable, path_t7from biopsykit.utils.exceptions import FileExtensionError, ValidationError, ValueRangeError8def _assert_is_dir(path: path_t, raise_exception: Optional[bool] = True) -> Optional[bool]:9 """Check if a path is a directory.10 Parameters11 ----------12 path : path or str13 path to check if it's a directory14 raise_exception : bool, optional15 whether to raise an exception or return a bool value16 Returns17 -------18 ``True`` if ``path`` is a directory, ``False`` otherwise (if ``raise_exception`` is ``False``)19 Raises20 ------21 ValueError22 if ``raise_exception`` is ``True`` and ``path`` is not a directory23 """24 # ensure pathlib25 file_name = Path(path)26 if not file_name.is_dir():27 if raise_exception:28 raise ValueError("The path '{}' is expected to be a directory, but it's not!".format(path))29 return False30 return True31def _assert_file_extension(32 file_name: path_t, expected_extension: Union[str, Sequence[str]], raise_exception: Optional[bool] = True33) -> Optional[bool]:34 """Check if a file has the correct file extension.35 Parameters36 ----------37 file_name : path or str38 file name to check for correct extension39 expected_extension : str or list of str40 file extension (or a list of file extensions) to check for41 raise_exception : bool, optional42 whether to raise an exception or return a bool value43 Returns44 -------45 ``True`` if ``file_name`` ends with one of the specified file extensions, ``False`` otherwise46 (if ``raise_exception`` is ``False``)47 Raises48 ------49 :exc:`~biopsykit.exceptions.FileExtensionError`50 if ``raise_exception`` is ``True`` and ``file_name`` does not end with any of the specified51 ``expected_extension``52 """53 # ensure pathlib54 file_name = Path(file_name)55 if isinstance(expected_extension, str):56 expected_extension = [expected_extension]57 if file_name.suffix not in expected_extension:58 if raise_exception:59 raise FileExtensionError(60 "The file name extension is expected to be one of {}. "61 "Instead it has the following extension: {}".format(expected_extension, file_name.suffix)62 )63 return False64 return True65def _assert_is_dtype(66 obj, dtype: Union[type, Tuple[type, ...]], raise_exception: Optional[bool] = True67) -> Optional[bool]:68 """Check if an object has a specific data type.69 Parameters70 ----------71 obj : any object72 object to check73 dtype : type or list of type74 data type of tuple of data types to check75 raise_exception : bool, optional76 whether to raise an exception or return a bool value77 Returns78 -------79 ``True`` if ``obj`` is one of the expected data types, ``False`` otherwise (if ``raise_exception`` is ``False``)80 Raises81 ------82 :exc:`~biopsykit.exceptions.ValidationError`83 if ``raise_exception`` is ``True`` and ``obj`` is none of the expected data types84 """85 if not isinstance(obj, dtype):86 if raise_exception:87 raise ValidationError(88 "The data object is expected to be one of ({},). But it is a {}".format(dtype, type(obj))89 )90 return False91 return True92def _assert_has_multiindex(93 df: pd.DataFrame,94 expected: Optional[bool] = True,95 nlevels: Optional[int] = 2,96 nlevels_atleast: Optional[int] = False,97 raise_exception: Optional[bool] = True,98) -> Optional[bool]:99 """Check if a :any:`pandas.DataFrame` has a :any:`pandas.MultiIndex` as index.100 Parameters101 ----------102 df : :class:`~pandas.DataFrame`103 The dataframe to check104 expected : bool, optional105 Whether the df is expected to have a MultiIndex index or not106 nlevels : int, optional107 If MultiIndex is expected, how many levels the MultiIndex index should have108 nlevels_atleast : bool, optional109 Whether the MultiIndex has to have at least ``nlevels`` (``True``)110 or exactly match the number of levels (``False``)111 raise_exception : bool, optional112 whether to raise an exception or return a bool value113 Returns114 -------115 ``True`` if ``df`` meets the expected index format, ``False`` otherwise (if ``raise_exception`` is ``False``)116 Raises117 ------118 :exc:`~biopsykit.exceptions.ValidationError`119 if ``raise_exception`` is ``True`` and ``df`` does not meet the expected index format120 """121 return _multiindex_check_helper(122 df=df,123 idx_or_col="index",124 expected=expected,125 nlevels=nlevels,126 nlevels_atleast=nlevels_atleast,127 raise_exception=raise_exception,128 )129def _assert_has_index_levels(130 df: pd.DataFrame,131 index_levels: Iterable[_Hashable],132 match_atleast: Optional[bool] = False,133 match_order: Optional[bool] = False,134 raise_exception: Optional[bool] = True,135) -> Optional[bool]:136 """Check if the dataframe has all index level names.137 Parameters138 ----------139 df : :class:`~pandas.DataFrame`140 The dataframe to check141 index_levels : list142 Set of index level names to check143 match_atleast : bool, optional144 Whether the MultiIndex columns have to have at least the specified column levels (``True``)145 or exactly match the column levels (``False``)146 match_order : bool, optional147 Whether to also match the level order148 raise_exception : bool, optional149 whether to raise an exception or return a bool value150 Returns151 -------152 ``True`` if ``df`` has the expected index level names, ``False`` otherwise (if ``raise_exception`` is ``False``)153 Raises154 ------155 :exc:`~biopsykit.exceptions.ValidationError`156 if ``raise_exception`` is ``True`` and ``df`` does not have the expected index level names157 """158 return _multiindex_level_names_helper(159 df,160 level_names=index_levels,161 idx_or_col="index",162 match_atleast=match_atleast,163 match_order=match_order,164 raise_exception=raise_exception,165 )166def _assert_has_columns(167 df: pd.DataFrame,168 columns_sets: Sequence[Union[List[_Hashable], List[str], pd.Index]],169 raise_exception: Optional[bool] = True,170) -> Optional[bool]:171 """Check if the dataframe has at least all columns sets.172 Parameters173 ----------174 df : :class:`~pandas.DataFrame`175 The dataframe to check176 columns_sets : list177 Column set or list of column sets to check178 raise_exception : bool, optional179 whether to raise an exception or return a bool value180 Returns181 -------182 ``True`` if ``df`` has the expected column names, ``False`` otherwise (if ``raise_exception`` is ``False``)183 Raises184 ------185 :exc:`~biopsykit.exceptions.ValidationError`186 if ``raise_exception`` is ``True`` and ``df`` does not have the expected index level names187 Examples188 --------189 >>> df = pd.DataFrame()190 >>> df.columns = ["col1", "col2"]191 >>> _assert_has_columns(df, [["other_col1", "other_col2"], ["col1", "col2"]])192 >>> # This raises no error, as df contains all columns of the second set193 """194 columns = df.columns195 result = False196 for col_set in columns_sets:197 result = result or all(v in columns for v in col_set)198 if result is False:199 if len(columns_sets) == 1:200 helper_str = "the following columns: {}".format(columns_sets[0])201 else:202 helper_str = "one of the following sets of columns: {}".format(columns_sets)203 if raise_exception:204 raise ValidationError(205 "The dataframe is expected to have {}. Instead it has the following columns: {}".format(206 helper_str, list(df.columns)207 )208 )209 return result210def _assert_has_column_multiindex(211 df: pd.DataFrame,212 expected: Optional[bool] = True,213 nlevels: Optional[int] = 2,214 nlevels_atleast: Optional[int] = False,215 raise_exception: Optional[bool] = True,216) -> Optional[bool]:217 """Check if a :any:`pandas.DataFrame` has a :any:`pandas.MultiIndex` as columns.218 Parameters219 ----------220 df : :class:`~pandas.DataFrame`221 The dataframe to check222 expected : bool, optional223 Whether the df is expected to have MultiIndex column or not224 nlevels : int, optional225 If MultiIndex is expected, how many levels the MultiIndex columns should have226 nlevels_atleast : bool, optional227 Whether the MultiIndex has to have at least ``nlevels`` (``True``)228 or exactly match the number of levels (``False``)229 raise_exception : bool, optional230 Whether to raise an exception or return a bool value231 Returns232 -------233 ``True`` if ``df`` meets the expected column index format, ``False`` otherwise (if ``raise_exception`` is ``False``)234 Raises235 ------236 :exc:`~biopsykit.exceptions.ValidationError`237 if ``raise_exception` is ``True`` and ``df`` does not meet the expected column index format238 """239 return _multiindex_check_helper(240 df=df,241 idx_or_col="column",242 expected=expected,243 nlevels=nlevels,244 nlevels_atleast=nlevels_atleast,245 raise_exception=raise_exception,246 )247def _assert_has_columns_any_level(248 df: pd.DataFrame,249 columns_sets: Sequence[Union[List[_Hashable], List[str], pd.Index]],250 raise_exception: Optional[bool] = True,251) -> Optional[bool]:252 """Check if the dataframe has the expected set of column names at any level of a :any:`pandas.MultiIndex`.253 Parameters254 ----------255 df : :class:`~pandas.DataFrame`256 The dataframe to check257 columns_sets : list258 Column set of list of column sets to check259 raise_exception : bool, optional260 whether to raise an exception or return a bool value261 Returns262 -------263 ``True`` if ``df`` has the expected column names at any :any:`pandas.MultiIndex` level,264 ``False`` otherwise (if ``raise_exception`` is ``False``)265 Raises266 ------267 :exc:`~biopsykit.exceptions.ValidationError`268 if ``raise_exception`` is ``True`` and ``df`` does not have the expected column names269 Examples270 --------271 >>> df = pd.DataFrame()272 >>> df.columns = pd.MultiIndex.from_tuples([("Name", "col1"), ("Name", "col2")])273 >>> _assert_has_columns_any_level(df, [["col1", "col2"]])274 >>> # This raises no error, as df contains all columns in the seconds level275 """276 _assert_has_column_multiindex(df, expected=True, nlevels_atleast=True)277 column_levels = [np.array(df.columns.get_level_values(i)) for i in range(df.columns.nlevels)]278 result = False279 for columns in column_levels:280 for col_set in columns_sets:281 result = result or all(v in columns for v in col_set)282 if result is False:283 if len(columns_sets) == 1:284 helper_str = "the following columns: {}".format(columns_sets[0])285 else:286 helper_str = "one of the following sets of columns: {}".format(columns_sets)287 if raise_exception:288 raise ValidationError(289 "The dataframe is expected to have {} at any level of the MultiIndex. Instead it has the "290 "following MultiIndex columns: {}".format(helper_str, column_levels)291 )292 return result293def _assert_has_column_levels(294 df: pd.DataFrame,295 column_levels: Iterable[_Hashable],296 match_atleast: Optional[bool] = False,297 match_order: Optional[bool] = False,298 raise_exception: Optional[bool] = True,299) -> Optional[bool]:300 """Check if the dataframe has all column level names of a MultiIndex column.301 Parameters302 ----------303 df : :class:`~pandas.DataFrame`304 The dataframe to check305 column_levels : list306 Set of column level names to check307 match_atleast : bool, optional308 Whether the MultiIndex columns have to have at least the specified column levels (``True``)309 or exactly match the column levels (``False``)310 match_order : bool, optional311 Whether to also match the level order312 raise_exception : bool, optional313 Whether to raise an exception or return a bool value314 Returns315 -------316 ``True`` if ``df`` has the expected column level names, ``False`` otherwise (if ``raise_exception`` is ``False``)317 Raises318 ------319 :exc:`~biopsykit.exceptions.ValidationError`320 if ``raise_exception`` is ``True`` and ``df`` does not have the expected index level names321 """322 return _multiindex_level_names_helper(323 df,324 level_names=column_levels,325 idx_or_col="column",326 match_atleast=match_atleast,327 match_order=match_order,328 raise_exception=raise_exception,329 )330def _assert_value_range(331 data: Union[pd.DataFrame, pd.Series],332 value_range: Sequence[Union[int, float]],333 raise_exception: Optional[bool] = True,334) -> Optional[bool]:335 """Check if all values are within the specified range.336 Parameters337 ----------338 data : :class:`~pandas.DataFrame`339 data to check values340 value_range : tuple of numbers341 value range in the format [min_val, max_val]342 raise_exception : bool, optional343 Whether to raise an exception or return a bool value344 Returns345 -------346 ``True`` if all values in ``data`` are within ``value_range``, ``False`` otherwise347 (if ``raise_exception`` is ``False``)348 Raises349 ------350 :exc:`~biopsykit.exceptions.ValueRangeError`351 if ``raise_exception`` is ``True`` and any value of ``data`` is not within ``value_range``352 """353 max_val = np.nanmax(data)354 min_val = np.nanmin(data)355 if not (min_val >= value_range[0] and max_val <= value_range[1]):356 if raise_exception:357 raise ValueRangeError(358 "Some of the values are out of the expected range. "359 "Expected were values in the range {}, got values in the range {}. "360 "If values are part of questionnaire scores, "361 "you can convert questionnaire items into the correct range by calling "362 "`biopsykit.questionnaire.utils.convert_scale()`.".format(value_range, [min_val, max_val])363 )364 return False365 return True366def _assert_num_columns(367 data: pd.DataFrame, num_cols: Union[int, Sequence[int]], raise_exception: Optional[bool] = True368) -> Optional[bool]:369 """Check if dataframe has (any of) the required number of columns.370 Parameters371 ----------372 data : :class:`~pandas.DataFrame`373 data to check374 num_cols : int or list of int375 the required number of columns (or any of the required number of columns in case ``num_cols`` is a list)376 raise_exception : bool, optional377 Whether to raise an exception or return a bool value378 Returns379 -------380 ``True`` if ``data`` has the required number of columns, ``False`` otherwise (if ``raise_exception`` is ``False``)381 Raises382 ------383 :exc:`~biopsykit.exceptions.ValidationError`384 if ``raise_exception`` is ``True`` and ``data`` does not have the required number of columns385 """386 if isinstance(num_cols, int):387 num_cols = [num_cols]388 if not any(len(data.columns) == num for num in num_cols):389 if raise_exception:390 raise ValidationError(391 "The dataframe does not have the required number of columns. "392 "Expected were any of {} columns, but has {} columns.".format(num_cols, len(data.columns))393 )394 return False395 return True396def _assert_len_list(data: Sequence, length: int, raise_exception: Optional[bool] = True) -> Optional[bool]:397 """Check if a list has the required length.398 Parameters399 ----------400 data : list401 list to check402 length : int403 the required length or the list404 raise_exception : bool, optional405 Whether to raise an exception or return a bool value406 Returns407 -------408 ``True`` if ``data`` has the required length, ``False`` otherwise (if ``raise_exception`` is ``False``)409 Raises410 ------411 :exc:`~biopsykit.exceptions.ValidationError`412 if ``raise_exception`` is ``True`` and ``data`` does not have the required length413 """414 _assert_is_dtype(data, (list, tuple, np.ndarray))415 if len(data) != length:416 if raise_exception:417 raise ValidationError(418 "The list does not have the required length. "419 "Expected was length {}, but it has length {}.".format(length, len(data))420 )421 return False422 return True423def _assert_dataframes_same_length(424 df_list: Sequence[pd.DataFrame], raise_exception: Optional[bool] = True425) -> Optional[bool]:426 """Check if all dataframes have same length.427 Parameters428 ----------429 df_list : list430 list of dataframes to check431 raise_exception : bool, optional432 Whether to raise an exception or return a bool value433 Returns434 -------435 ``True`` if all dataframes in ``df_list`` have same length, ``False`` otherwise436 (if ``raise_exception`` is ``False``)437 Raises438 ------439 :exc:`~biopsykit.exceptions.ValidationError`440 if ``raise_exception`` is ``True`` and ``data`` does not have the required length441 """442 if len(set(len(df) for df in df_list)) != 1:443 if raise_exception:444 raise ValidationError("Not all dataframes have the same length!")445 return False446 return True447def _multiindex_level_names_helper_get_expected_levels(448 ac_levels: Sequence[str],449 ex_levels: Sequence[str],450 match_atleast: Optional[bool] = False,451 match_order: Optional[bool] = False,452) -> bool:453 if match_order:454 if match_atleast:455 ac_levels_slice = ac_levels[: len(ex_levels)]456 expected = ex_levels == ac_levels_slice457 else:458 expected = ex_levels == ac_levels459 else:460 if match_atleast:461 expected = all(level in ac_levels for level in ex_levels)462 else:463 expected = sorted(ex_levels) == sorted(ac_levels)464 return expected465def _multiindex_level_names_helper(466 df: pd.DataFrame,467 level_names: Iterable[_Hashable],468 idx_or_col: str,469 match_atleast: Optional[bool] = False,470 match_order: Optional[bool] = False,471 raise_exception: Optional[bool] = True,472) -> Optional[bool]:473 if isinstance(level_names, str):474 level_names = [level_names]475 ex_levels = list(level_names)476 if idx_or_col == "index":477 ac_levels = list(df.index.names)478 else:479 ac_levels = list(df.columns.names)480 expected = _multiindex_level_names_helper_get_expected_levels(ac_levels, ex_levels, match_atleast, match_order)481 if not expected:482 if raise_exception:483 raise ValidationError(484 "The dataframe is expected to have exactly the following {} level names {}, "485 "but it has {}".format(idx_or_col, level_names, ac_levels)486 )487 return False488 return True489def _multiindex_check_helper(490 df: pd.DataFrame,491 idx_or_col: str,492 expected: Optional[bool] = True,493 nlevels: Optional[int] = 2,494 nlevels_atleast: Optional[int] = False,495 raise_exception: Optional[bool] = True,496) -> Optional[bool]:497 has_multiindex, nlevels_act = _multiindex_check_helper_get_levels(df, idx_or_col)498 if has_multiindex is not expected:499 return _multiindex_check_helper_not_expected(idx_or_col, nlevels, nlevels_act, expected, raise_exception)500 if has_multiindex is True:501 if nlevels_atleast:502 expected = nlevels_act >= nlevels503 else:504 expected = nlevels_act == nlevels505 if not expected:506 if raise_exception:507 raise ValidationError(508 "The dataframe is expected to have a MultiIndex with {0} {1} levels. "509 "But it has a MultiIndex with {2} {1} levels.".format(nlevels, idx_or_col, nlevels_act)510 )511 return False512 return True513def _multiindex_check_helper_get_levels(df: pd.DataFrame, idx_or_col: str) -> Tuple[bool, int]:514 if idx_or_col == "index":515 has_multiindex = isinstance(df.index, pd.MultiIndex)516 nlevels_act = df.index.nlevels517 else:518 has_multiindex = isinstance(df.columns, pd.MultiIndex)519 nlevels_act = df.columns.nlevels520 return has_multiindex, nlevels_act521def _multiindex_check_helper_not_expected(522 idx_or_col: str, nlevels: int, nlevels_act: int, expected: bool, raise_exception: bool523) -> Optional[bool]:524 if not expected:525 if raise_exception:526 raise ValidationError(527 "The dataframe is expected to have a single level as {0}. "528 "But it has a MultiIndex with {1} {0} levels.".format(idx_or_col, nlevels_act)529 )530 return False531 if raise_exception:532 raise ValidationError(533 "The dataframe is expected to have a MultiIndex with {0} {1} levels. "534 "It has just a single normal {1} level.".format(nlevels, idx_or_col)535 )536 return False537def _assert_has_column_prefix(538 columns: Sequence[str], prefix: str, raise_exception: Optional[bool] = True539) -> Optional[bool]:540 """Check whether all columns start with the same prefix.541 Parameters542 ----------543 columns : list of str544 list of column names545 prefix : str546 expected prefix of all columns547 raise_exception : bool, optional548 Whether to raise an exception or return a bool value549 Returns550 -------551 ``True`` if ``columns`` all start with ``prefix``, ``False`` otherwise (if ``raise_exception`` is ``False``)552 Raises553 ------554 ValidationError555 if ``raise_exception`` is ``True`` and one of ``columns`` is not a string or does not start with ``prefix``556 """557 if prefix is None or len(prefix) == 0:558 if raise_exception:559 raise ValidationError("'prefix' is None or empty!")560 return False561 for col in columns:562 return _check_has_column_prefix_single_col(columns, col, prefix, raise_exception)563 return True564def _check_has_column_prefix_single_col(565 columns: Sequence[str], col: Any, prefix: str, raise_exception: bool566) -> Optional[bool]:567 if not _assert_is_dtype(col, str, raise_exception=False):568 if raise_exception:569 raise ValidationError("Column '{}' from {} is not a string!".format(col, columns))570 return False571 if not col.startswith(prefix):572 if raise_exception:573 raise ValidationError(574 "Column '{}' from {} are starting with the required prefix '{}'!".format(col, columns, prefix)575 )576 return False...

Full Screen

Full Screen

test_spire_models.py

Source:test_spire_models.py Github

copy

Full Screen

1import datetime2import pytest3from sqlalchemy.exc import DataError, IntegrityError4from tests.fixtures.factories import (5 SPIREApplicationFactory,6 SPIREBatchFactory,7 SPIREFootnoteEntryFactory,8 SPIREFootnoteFactory,9 SPIREGoodsIncidentFactory,10 SPIREIncidentFactory,11 SPIREMediaFootnoteCountryFactory,12 SPIREMediaFootnoteDetailFactory,13 SPIREReturnFactory,14 SPIREThirdPartyFactory,15 SPIREUltimateEndUserFactory,16)17def test_application_fk_constraint(app_with_db):18 with pytest.raises(IntegrityError):19 SPIREApplicationFactory(batch=None)20@pytest.mark.parametrize(21 'case_type,raise_exception',22 (23 ('HELLO', True),24 ('OIEL', False),25 ),26)27def test_application_check_constraint_1(app_with_db, case_type, raise_exception):28 if raise_exception:29 with pytest.raises(IntegrityError):30 SPIREApplicationFactory(case_type=case_type)31 else:32 SPIREApplicationFactory(case_type=case_type)33@pytest.mark.parametrize(34 'case_type,case_sub_type,raise_exception',35 (36 ('SIEL', 'PERMANENT', False),37 ('SIEL', 'TEMPORARY', False),38 ('SIEL', 'TRANSHIPMENT', False),39 ('SIEL', 'DEALER', True),40 ('OIEL', 'MEDIA', False),41 ('OIEL', 'MIL_DUAL', False),42 ('OIEL', 'UKCONTSHELF', False),43 ('OIEL', 'CRYPTO', False),44 ('OIEL', 'TEMPORARY', True),45 ('SITCL', None, False),46 ('OITCL', None, False),47 ('OGEL', None, False),48 ('GPL', None, False),49 ('TA_SIEL', None, False),50 ('TA_OIEL', None, False),51 ('GPL', 'MEDIA', True),52 ),53)54def test_application_check_constraint_2(app_with_db, case_type, case_sub_type, raise_exception):55 if raise_exception:56 with pytest.raises(IntegrityError):57 SPIREApplicationFactory(case_type=case_type, case_sub_type=case_sub_type)58 else:59 SPIREApplicationFactory(case_type=case_type, case_sub_type=case_sub_type)60@pytest.mark.parametrize(61 'withheld_status,raise_exception',62 (63 ('HELLO', True),64 ('PENDING', False),65 ('WITHHELD', False),66 ),67)68def test_application_check_constraint_3(app_with_db, withheld_status, raise_exception):69 if raise_exception:70 with pytest.raises(IntegrityError):71 SPIREApplicationFactory(withheld_status=withheld_status)72 else:73 SPIREApplicationFactory(withheld_status=withheld_status)74@pytest.mark.parametrize(75 'status,raise_exception',76 (77 ('HELLO', True),78 ('RELEASED', False),79 ('STAGING', False),80 ),81)82def test_batch_check_constraint_1(app_with_db, status, raise_exception):83 if raise_exception:84 with pytest.raises(IntegrityError):85 SPIREBatchFactory(status=status)86 else:87 SPIREBatchFactory(status=status)88@pytest.mark.parametrize(89 'batch_ref,start_date,end_date,raise_exception',90 (91 ('10', None, None, True),92 ('10', datetime.datetime(2020, 1, 1), datetime.datetime(2020, 2, 1), False),93 ('C10', None, None, False),94 ('10', datetime.datetime(2020, 1, 1), None, True),95 ),96)97def test_batch_check_constraint_2(app_with_db, batch_ref, start_date, end_date, raise_exception):98 if raise_exception:99 with pytest.raises(IntegrityError):100 SPIREBatchFactory(batch_ref=batch_ref, start_date=start_date, end_date=end_date)101 else:102 SPIREBatchFactory(batch_ref=batch_ref, start_date=start_date, end_date=end_date)103@pytest.mark.parametrize(104 'status,raise_exception',105 (106 ('HELLO', True),107 ('CURRENT', False),108 ('DELETED', False),109 ('ARCHIVED', False),110 ),111)112def test_footnotes_check_constraint(app_with_db, status, raise_exception):113 if raise_exception:114 with pytest.raises(IntegrityError):115 SPIREFootnoteFactory(status=status)116 else:117 SPIREFootnoteFactory(status=status)118@pytest.mark.parametrize(119 'goods_item_id,country_id,fnr_id,raise_exception',120 (121 (1, None, None, False),122 (None, None, None, False),123 (None, 1, None, False),124 (None, 1, 1, False),125 (1, 1, 1, True),126 (1, None, 1, True),127 (1, 1, None, True),128 ),129)130def test_footnote_entries_check_constraint_1(131 app_with_db, goods_item_id, country_id, fnr_id, raise_exception132):133 if raise_exception:134 with pytest.raises(IntegrityError):135 SPIREFootnoteEntryFactory(136 goods_item_id=goods_item_id, country_id=country_id, fnr_id=fnr_id137 )138 else:139 SPIREFootnoteEntryFactory(goods_item_id=goods_item_id, country_id=country_id, fnr_id=fnr_id)140@pytest.mark.parametrize(141 'version_no,raise_exception',142 ((1, False), (0, False), (-1, IntegrityError), ('HELLO', DataError)),143)144def test_footnote_entries_check_constraint_2(app_with_db, version_no, raise_exception):145 if raise_exception:146 with pytest.raises(raise_exception):147 SPIREFootnoteEntryFactory(version_no=version_no)148 else:149 SPIREFootnoteEntryFactory(version_no=version_no)150@pytest.mark.parametrize(151 'footnote,media_footnote_detail,mf_free_text,mf_grp_id,raise_exception',152 (153 (True, None, None, None, False),154 (None, True, None, 1, False),155 (None, None, 'HELLO', 1, False),156 (True, True, None, 1, True),157 (None, None, None, 1, True),158 (None, True, None, None, True),159 (None, None, None, None, True),160 (None, None, 'HELLO', None, True),161 ),162)163def test_footnote_entries_check_constraint_3(164 app_with_db, footnote, media_footnote_detail, mf_free_text, mf_grp_id, raise_exception165):166 if footnote:167 footnote = SPIREFootnoteFactory()168 if media_footnote_detail:169 media_footnote_detail = SPIREMediaFootnoteDetailFactory()170 if raise_exception:171 with pytest.raises(IntegrityError):172 SPIREFootnoteEntryFactory(173 footnote=footnote,174 mf_grp_id=mf_grp_id,175 media_footnote_detail=media_footnote_detail,176 mf_free_text=mf_free_text,177 )178 else:179 SPIREFootnoteEntryFactory(180 footnote=footnote,181 mf_grp_id=mf_grp_id,182 media_footnote_detail=media_footnote_detail,183 mf_free_text=mf_free_text,184 )185@pytest.mark.parametrize(186 '_type,raise_exception',187 (188 ('HELLO', True),189 ('REFUSAL', False),190 ('WITHHELD', True),191 ('ISSUE', False),192 ('REVOKE', False),193 ('SURRENDER', False),194 ),195)196def test_goods_incident_check_constraint_1(app_with_db, _type, raise_exception):197 if raise_exception:198 with pytest.raises(IntegrityError):199 SPIREGoodsIncidentFactory(type=_type)200 else:201 SPIREGoodsIncidentFactory(type=_type)202@pytest.mark.parametrize(203 'version_no,raise_exception',204 ((1, False), (0, False), (-1, IntegrityError), ('HELLO', DataError)),205)206def test_goods_incident_check_constraint_2(app_with_db, version_no, raise_exception):207 if raise_exception:208 with pytest.raises(raise_exception):209 SPIREGoodsIncidentFactory(version_no=version_no)210 else:211 SPIREGoodsIncidentFactory(version_no=version_no)212@pytest.mark.parametrize(213 'status,raise_exception',214 (215 ('HELLO', True),216 ('RELEASED', True),217 ('STAGING', True),218 ('READY', False),219 ('FOR_ATTENTION', False),220 ),221)222def test_incident_check_constraint_1(app_with_db, status, raise_exception):223 if raise_exception:224 with pytest.raises(IntegrityError):225 SPIREIncidentFactory(status=status)226 else:227 SPIREIncidentFactory(status=status)228@pytest.mark.parametrize(229 'version_no,raise_exception',230 ((1, False), (0, False), (-1, IntegrityError), ('HELLO', DataError)),231)232def test_incident_check_constraint_2(app_with_db, version_no, raise_exception):233 if raise_exception:234 with pytest.raises(raise_exception):235 SPIREIncidentFactory(version_no=version_no)236 else:237 SPIREIncidentFactory(version_no=version_no)238@pytest.mark.parametrize(239 'case_type,ogl_id,raise_exception',240 (241 ('GPL', None, False),242 ('OGEL', 1, False),243 ('OGEL', None, True),244 ('GPL', 1, True),245 ),246)247def test_incident_check_constraint_3(app_with_db, case_type, ogl_id, raise_exception):248 if raise_exception:249 with pytest.raises(IntegrityError):250 SPIREIncidentFactory(case_type=case_type, ogl_id=ogl_id)251 else:252 SPIREIncidentFactory(case_type=case_type, ogl_id=ogl_id)253@pytest.mark.parametrize(254 'temporary_licence_flag,raise_exception',255 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),256)257def test_incident_check_constraint_4(app_with_db, temporary_licence_flag, raise_exception):258 if raise_exception:259 with pytest.raises(raise_exception):260 SPIREIncidentFactory(temporary_licence_flag=temporary_licence_flag)261 else:262 SPIREIncidentFactory(temporary_licence_flag=temporary_licence_flag)263@pytest.mark.parametrize(264 '_type,licence_id,raise_exception',265 (266 ('REFUSAL', None, False),267 ('REFUSAL', 1, True),268 (None, None, True),269 ('ISSUE', 1, False),270 ),271)272def test_incident_check_constraint_5(app_with_db, _type, licence_id, raise_exception):273 if raise_exception:274 with pytest.raises(IntegrityError):275 SPIREIncidentFactory(type=_type, licence_id=licence_id)276 else:277 SPIREIncidentFactory(type=_type, licence_id=licence_id)278@pytest.mark.parametrize(279 '_type,else_id,raise_exception',280 (281 ('SUSPENSION', 1, False),282 ('SUSPENSION', None, True),283 (None, None, True),284 ('ISSUE', 1, True),285 ),286)287def test_incident_check_constraint_6(app_with_db, _type, else_id, raise_exception):288 if raise_exception:289 with pytest.raises(IntegrityError):290 SPIREIncidentFactory(type=_type, else_id=else_id)291 else:292 SPIREIncidentFactory(type=_type, else_id=else_id)293@pytest.mark.parametrize(294 '_type,raise_exception',295 (296 ('SUSPENSION', False),297 ('REFUSAL', False),298 ('ISSUE', False),299 ('REDUCTION', False),300 ('REVOKE', False),301 ('DEREGISTRATION', False),302 ('SURRENDER', False),303 ('HELLO', True),304 ),305)306def test_incident_check_constraint_7(app_with_db, _type, raise_exception):307 if raise_exception:308 with pytest.raises(IntegrityError):309 SPIREIncidentFactory(type=_type)310 else:311 SPIREIncidentFactory(type=_type)312@pytest.mark.parametrize(313 'case_type,raise_exception',314 (315 ('SIEL', False),316 ('OIEL', False),317 ('SITCL', False),318 ('OITCL', False),319 ('OGEL', False),320 ('GPL', False),321 ('TA_SIEL', False),322 ('TA_OIEL', False),323 ('HELLO', True),324 ),325)326def test_incident_check_constraint_8(app_with_db, case_type, raise_exception):327 if raise_exception:328 with pytest.raises(IntegrityError):329 SPIREIncidentFactory(case_type=case_type)330 else:331 SPIREIncidentFactory(case_type=case_type)332@pytest.mark.parametrize(333 'case_type,case_sub_type,raise_exception',334 (335 ('SIEL', 'PERMANENT', False),336 ('SIEL', 'TEMPORARY', False),337 ('SIEL', 'TRANSHIPMENT', False),338 ('SIEL', 'DEALER', True),339 ('OIEL', 'MEDIA', False),340 ('OIEL', 'MIL_DUAL', False),341 ('OIEL', 'UKCONTSHELF', False),342 ('OIEL', 'CRYPTO', False),343 ('OIEL', 'TEMPORARY', True),344 ('SITCL', None, False),345 ('OITCL', None, False),346 ('OGEL', None, False),347 ('GPL', None, False),348 ('TA_SIEL', None, False),349 ('TA_OIEL', None, False),350 ('GPL', 'MEDIA', True),351 ),352)353def test_incident_check_constraint_9(app_with_db, case_type, case_sub_type, raise_exception):354 if raise_exception:355 with pytest.raises(IntegrityError):356 SPIREIncidentFactory(case_type=case_type, case_sub_type=case_sub_type)357 else:358 SPIREIncidentFactory(case_type=case_type, case_sub_type=case_sub_type)359@pytest.mark.parametrize(360 'licence_conversion_flag,raise_exception',361 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),362)363def test_incident_check_constraint_10(app_with_db, licence_conversion_flag, raise_exception):364 if raise_exception:365 with pytest.raises(raise_exception):366 SPIREIncidentFactory(licence_conversion_flag=licence_conversion_flag)367 else:368 SPIREIncidentFactory(licence_conversion_flag=licence_conversion_flag)369@pytest.mark.parametrize(370 'incorporation_flag,raise_exception',371 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),372)373def test_incident_check_constraint_11(app_with_db, incorporation_flag, raise_exception):374 if raise_exception:375 with pytest.raises(raise_exception):376 SPIREIncidentFactory(incorporation_flag=incorporation_flag)377 else:378 SPIREIncidentFactory(incorporation_flag=incorporation_flag)379@pytest.mark.parametrize(380 'mil_flag,raise_exception',381 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),382)383def test_incident_check_constraint_12(app_with_db, mil_flag, raise_exception):384 if raise_exception:385 with pytest.raises(raise_exception):386 SPIREIncidentFactory(mil_flag=mil_flag)387 else:388 SPIREIncidentFactory(mil_flag=mil_flag)389@pytest.mark.parametrize(390 'other_flag,raise_exception',391 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),392)393def test_incident_check_constraint_13(app_with_db, other_flag, raise_exception):394 if raise_exception:395 with pytest.raises(raise_exception):396 SPIREIncidentFactory(other_flag=other_flag)397 else:398 SPIREIncidentFactory(other_flag=other_flag)399@pytest.mark.parametrize(400 'torture_flag,raise_exception',401 ((1, False), (0, False), (-1, IntegrityError), (2, IntegrityError), ('HELLO', DataError)),402)403def test_incident_check_constraint_14(app_with_db, torture_flag, raise_exception):404 if raise_exception:405 with pytest.raises(raise_exception):406 SPIREIncidentFactory(torture_flag=torture_flag)407 else:408 SPIREIncidentFactory(torture_flag=torture_flag)409@pytest.mark.parametrize(410 'status_control,end_datetime,raise_exception',411 (412 ('C', None, False),413 ('A', None, True),414 ('Z', None, True),415 (None, datetime.datetime(2020, 1, 1), False),416 ('C', datetime.datetime(2020, 1, 1), True),417 ),418)419def test_media_footnote_countries_check_1(420 app_with_db, status_control, end_datetime, raise_exception421):422 if raise_exception:423 with pytest.raises(IntegrityError):424 SPIREMediaFootnoteCountryFactory(425 status_control=status_control, end_datetime=end_datetime426 )427 else:428 SPIREMediaFootnoteCountryFactory(status_control=status_control, end_datetime=end_datetime)429@pytest.mark.parametrize(430 'status_control,end_datetime,raise_exception',431 (432 ('C', None, False),433 ('A', None, True),434 ('Z', None, True),435 (None, datetime.datetime(2020, 1, 1), False),436 ('C', datetime.datetime(2020, 1, 1), True),437 ),438)439def test_media_footnote_detail_check_1(app_with_db, status_control, end_datetime, raise_exception):440 if raise_exception:441 with pytest.raises(IntegrityError):442 SPIREMediaFootnoteDetailFactory(443 status_control=status_control, end_datetime=end_datetime444 )445 else:446 SPIREMediaFootnoteDetailFactory(status_control=status_control, end_datetime=end_datetime)447@pytest.mark.parametrize(448 'footnote_type,raise_exception',449 (450 ('STANDARD', False),451 ('END_USER', False),452 ('HELLO', True),453 ),454)455def test_media_footnote_detail_check_2(app_with_db, footnote_type, raise_exception):456 if raise_exception:457 with pytest.raises(IntegrityError):458 SPIREMediaFootnoteDetailFactory(footnote_type=footnote_type)459 else:460 SPIREMediaFootnoteDetailFactory(footnote_type=footnote_type)461@pytest.mark.parametrize(462 'elr_version,raise_exception',463 ((10, None), (1, None), (0, IntegrityError), (-1, IntegrityError), ('HELLO', DataError)),464)465def test_return_check_constraint_1(app_with_db, elr_version, raise_exception):466 if raise_exception:467 with pytest.raises(raise_exception):468 SPIREReturnFactory(elr_version=elr_version)469 else:470 SPIREReturnFactory(elr_version=elr_version)471@pytest.mark.parametrize(472 'status,raise_exception',473 (474 ('HELLO', True),475 ('WITHDRAWN', False),476 ('ACTIVE', False),477 ),478)479def test_return_check_constraint_2(app_with_db, status, raise_exception):480 if raise_exception:481 with pytest.raises(IntegrityError):482 SPIREReturnFactory(status=status)483 else:484 SPIREReturnFactory(status=status)485@pytest.mark.parametrize(486 'status_control,raise_exception',487 (488 ('HELLO', True),489 ('A', False),490 ('P', False),491 ('C', False),492 ('D', True),493 ),494)495def test_return_check_constraint_3(app_with_db, status_control, raise_exception):496 if raise_exception:497 with pytest.raises(IntegrityError):498 SPIREReturnFactory(status_control=status_control)499 else:500 SPIREReturnFactory(status_control=status_control)501@pytest.mark.parametrize(502 'licence_type,ogl_id,raise_exception',503 (504 ('OGEL', 1, False),505 ('OGEL', None, True),506 ('OIEL', None, False),507 ('OITCL', None, False),508 ('OITCL', 1, True),509 ('HELLO', None, True),510 ),511)512def test_return_check_constraint_4_and_5(app_with_db, licence_type, ogl_id, raise_exception):513 if raise_exception:514 with pytest.raises(IntegrityError):515 SPIREReturnFactory(licence_type=licence_type, ogl_id=ogl_id)516 else:517 SPIREReturnFactory(licence_type=licence_type, ogl_id=ogl_id),518@pytest.mark.parametrize(519 'ultimate_end_user_flag,raise_exception',520 (521 (1, None),522 (0, None),523 (None, None),524 (-1, IntegrityError),525 (20, IntegrityError),526 ('HELLO', DataError),527 ),528)529def test_third_party_check_constraint_1(app_with_db, ultimate_end_user_flag, raise_exception):530 if raise_exception:531 with pytest.raises(raise_exception):532 SPIREThirdPartyFactory(ultimate_end_user_flag=ultimate_end_user_flag)533 else:534 SPIREThirdPartyFactory(ultimate_end_user_flag=ultimate_end_user_flag)535@pytest.mark.parametrize(536 'version_no,raise_exception',537 ((56, False), (1, False), (0, False), (-1, IntegrityError), ('HELLO', DataError)),538)539def test_third_party_check_constraint_2(app_with_db, version_no, raise_exception):540 if raise_exception:541 with pytest.raises(raise_exception):542 SPIREThirdPartyFactory(version_no=version_no)543 else:544 SPIREThirdPartyFactory(version_no=version_no)545@pytest.mark.parametrize(546 'version_no,raise_exception',547 ((56, False), (1, False), (0, False), (-1, IntegrityError), ('HELLO', DataError)),548)549def test_ultimate_end_user_check_constraint_1(app_with_db, version_no, raise_exception):550 if raise_exception:551 with pytest.raises(raise_exception):552 SPIREUltimateEndUserFactory(version_no=version_no)553 else:554 SPIREUltimateEndUserFactory(version_no=version_no)555@pytest.mark.parametrize(556 'status_control,raise_exception',557 (558 ('HELLO', True),559 ('A', False),560 ('P', False),561 ('C', False),562 ('D', False),563 ('X', True),564 ),565)566def test_ultimate_end_user_check_constraint_2(app_with_db, status_control, raise_exception):567 if raise_exception:568 with pytest.raises(IntegrityError):569 SPIREUltimateEndUserFactory(status_control=status_control)570 else:...

Full Screen

Full Screen

test_data_nodes.py

Source:test_data_nodes.py Github

copy

Full Screen

1from django.test import TestCase2from rest_framework import serializers3from . import get_mock_context, get_mock_request4from api.serializers.data_nodes import URLDataNodeSerializer, \5 DataNodeSerializer6from api.serializers.data_objects import DataObjectSerializer7class TestURLDataNodeSerializer(TestCase):8 def testNoContents(self):9 raw_data = 710 s = DataNodeSerializer(11 data={'contents': raw_data}, 12 context={'type': 'integer'})13 s.is_valid()14 data_object = s.save()15 s = URLDataNodeSerializer(16 data_object,17 context=get_mock_context())18 data = s.data19 20 # only uuid and url should be rendered21 self.assertTrue('uuid' in data)22 self.assertTrue('url' in data)23 self.assertEqual(len(data.keys()), 2)24 25class TestDataNodeSerializer(TestCase):26 def testValidateString(self):27 raw_data = "[3,[]]]"28 s = DataNodeSerializer(29 data={'contents': raw_data}, 30 context={'type': 'string'})31 self.assertTrue(s.is_valid(raise_exception=True))32 def testValidateInteger(self):33 raw_data = 734 s = DataNodeSerializer(35 data={'contents': raw_data}, 36 context={'type': 'integer'})37 self.assertTrue(s.is_valid(raise_exception=True))38 def testValidateFloat(self):39 raw_data = 3.240 s = DataNodeSerializer(41 data={'contents': raw_data}, 42 context={'type': 'float'})43 self.assertTrue(s.is_valid(raise_exception=True))44 def testValidateBoolean(self):45 raw_data = False46 s = DataNodeSerializer(47 data={'contents': raw_data},48 context={'type': 'boolean'})49 self.assertTrue(s.is_valid(raise_exception=True))50 def testValidateLists(self):51 for data in [['word', 'draw'],52 [3.2,2.3],53 [7,3],54 [True,False]]:55 s = DataNodeSerializer(data={'contents': data})56 self.assertTrue(s.is_valid(raise_exception=True))57 def testValidateListOfLists(self):58 s = DataNodeSerializer(data={59 'contents': [[['word','drow'],['word','drow']],60 [['word','drow'],['word','drow']]]},61 context={'type': 'string'})62 self.assertTrue(s.is_valid(raise_exception=True))63 def testValidateEmptyList(self):64 s = DataNodeSerializer(data={'contents': []},65 context={'type': 'string'})66 self.assertTrue(s.is_valid(raise_exception=True))67 def testValidateDict(self):68 s = DataNodeSerializer(69 data={'contents': {'type': 'integer', 'contents': 3}},70 context={'type': 'integer'})71 self.assertTrue(s.is_valid(raise_exception=True))72 def testValidateListOfDicts(self):73 s = DataNodeSerializer(74 data={'contents': 75 [{'type': 'integer', 'contents': 3},76 {'type': 'integer', 'contents': 4}]},77 context={'type': 'string'})78 self.assertTrue(s.is_valid(raise_exception=True))79 def testNegValidationError(self):80 s = DataNodeSerializer(81 data={'contents': [[["string"],82 [{"not": "string"}]]]},83 context={'type': 'string'})84 with self.assertRaises(serializers.ValidationError):85 s.is_valid(raise_exception=True)86 def testNegValidateMixed(self):87 s = DataNodeSerializer(data={'contents': ['x',3]})88 with self.assertRaises(serializers.ValidationError):89 s.is_valid(raise_exception=True)90 def testNegValidateNonuniformDepth(self):91 s = DataNodeSerializer(data={'contents': [3,[4,5]]})92 with self.assertRaises(serializers.ValidationError):93 s.is_valid(raise_exception=True)94 def testNegValidateMismatchedTypes(self):95 s = DataNodeSerializer(data={'contents': [[3,4],['a','b']]})96 with self.assertRaises(serializers.ValidationError):97 s.is_valid(raise_exception=True)98 def testNegValidateMismatchedObjectTypes(self):99 s = DataNodeSerializer(100 data={'contents': [101 [{'type': 'integer', 'contents': 3}],102 [{'type': 'string', 'contents': 'a'}]103 ]},104 context={'type': 'string'})105 with self.assertRaises(serializers.ValidationError):106 s.is_valid(raise_exception=True)107 def testCreateString(self):108 raw_data = "[3,[]]]"109 s = DataNodeSerializer(110 data={'contents': raw_data}, 111 context={'type': 'string'})112 s.is_valid(raise_exception=True)113 m = s.save()114 data = DataNodeSerializer(m, context=get_mock_context()).data115 self.assertEqual(data['contents']['value'], raw_data)116 def testCreateInteger(self):117 raw_data = 7118 s = DataNodeSerializer(119 data={'contents': raw_data}, 120 context={'type': 'integer'})121 s.is_valid(raise_exception=True)122 m = s.save()123 data = DataNodeSerializer(m, context=get_mock_context()).data124 self.assertEqual(data['contents']['value'], raw_data)125 def testCreateFloat(self):126 raw_data = 3.2127 s = DataNodeSerializer(128 data={'contents': raw_data}, 129 context={'type': 'float'})130 s.is_valid(raise_exception=True)131 m = s.save()132 data = DataNodeSerializer(m, context=get_mock_context()).data133 self.assertEqual(data['contents']['value'], raw_data)134 def testCreateBoolean(self):135 raw_data = False136 s = DataNodeSerializer(137 data={'contents': raw_data},138 context={'type': 'boolean'})139 s.is_valid(raise_exception=True)140 m = s.save()141 data = DataNodeSerializer(m, context=get_mock_context()).data142 self.assertEqual(data['contents']['value'], raw_data)143 def testCreateReference(self):144 value = True145 s = DataObjectSerializer(data={'type': 'boolean', 'value': value})146 s.is_valid(raise_exception=True)147 do = s.save()148 contents = {'uuid': do.uuid,149 'url': 'http://127.0.0.1/data-objects/%s/' % do.uuid}150 s = DataNodeSerializer(151 data={'contents': contents},152 context={'type': 'boolean'})153 s.is_valid(raise_exception=True)154 m = s.save()155 self.assertEqual(m.data_object.value, value)156 def testCreateList(self):157 raw_data = ['word', 'draw']158 s = DataNodeSerializer(data={'contents': raw_data},159 context={'type': 'string'})160 s.is_valid(raise_exception=True)161 m = s.save()162 data = DataNodeSerializer(m, context=get_mock_context()).data163 self.assertEqual(data['contents'][0]['value'], raw_data[0])164 def testCreateListOfLists(self):165 s = DataNodeSerializer(data={166 'contents': [[['word','drow'],['word','drow']],167 [['word','drow'],['word','drow']]]},168 context={'type': 'string'})169 self.assertTrue(s.is_valid(raise_exception=True))170 def testCreateEmptyList(self):171 s = DataNodeSerializer(data={'contents': []},172 context={'type': 'string'})173 s.is_valid(raise_exception=True)174 m = s.save()175 data = DataNodeSerializer(m, context=get_mock_context()).data176 self.assertEqual(data['contents'], [])177 def testCreateDict(self):178 raw_data = {'type': 'integer', 'value': 3}179 s = DataNodeSerializer(180 data={'contents': raw_data},181 context={'type': 'integer'})182 s.is_valid(raise_exception=True)183 m = s.save()184 data = DataNodeSerializer(m, context=get_mock_context()).data185 self.assertEqual(data['contents']['value'], raw_data['value'])186 def testCreateListOfDicts(self):187 raw_data = [{'type': 'integer', 'value': 3},188 {'type': 'integer', 'value': 4}]189 s = DataNodeSerializer(190 data={'contents': raw_data},191 context={'type': 'integer'})192 s.is_valid(raise_exception=True)193 m = s.save()194 data = DataNodeSerializer(m, context=get_mock_context()).data195 self.assertEqual(data['contents'][0]['value'], raw_data[0]['value'])196 self.assertEqual(data['contents'][1]['value'], raw_data[1]['value'])197 def testDataToData(self):198 raw_data = "something"199 s = DataNodeSerializer(200 data={'contents': raw_data}, 201 context={'type': 'string',202 'request': get_mock_request()})203 s.is_valid(raise_exception=True)204 s.save()...

Full Screen

Full Screen

views.py

Source:views.py Github

copy

Full Screen

1from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, ValidationError2from django.db import IntegrityError3from django.http import JsonResponse, Http4044from django.views.decorators.csrf import csrf_exempt5from rest_framework import viewsets6from rest_framework.response import Response7from rest_framework.views import APIView8from rest_framework.viewsets import ViewSet9from diet_app.serializers import *10from diet_app.models import *11class DiaryView(APIView):12 def get(self, request):13 serializer = DiarySerializer(data=request.query_params)14 serializer.is_valid(raise_exception=True)15 data = serializer.data16 status = 200 if data else 40017 return Response(data, status)18 def post(self, request):19 serializer = DiarySerializer(data=request.data)20 serializer.is_valid(raise_exception=True)21 serializer.create(serializer.validated_data)22 return Response(serializer.data)23class ActivityView(APIView):24 def get(self, request):25 serializer = ActivityGetSerializer(data=request.query_params)26 serializer.is_valid(raise_exception=True)27 data = serializer.data28 status = 200 if data else 40029 return Response(data, status)30 def post(self, request):31 serializer = ActivityCreateSerializer(data=request.data)32 serializer.is_valid(raise_exception=True)33 serializer.create(serializer.validated_data)34 return Response(serializer.data)35 def delete(self, request):36 serializer = ActivityDeleteSerializer(data=request.data)37 serializer.is_valid(raise_exception=True)38 serializer.delete(serializer.validated_data)39 return Response(serializer.data)40class ActivitiesView(APIView):41 def get(self, request):42 serializer = ActivitiesListSerializer(data=request.query_params)43 serializer.is_valid(raise_exception=True)44 return Response(serializer.data)45class DisciplineView(APIView):46 def get(self, request):47 serializer = DisciplineSerializer(data=request.query_params)48 serializer.is_valid(raise_exception=True)49 return Response(serializer.data)50class DisciplinesView(APIView):51 def get(self, request):52 serializer = DisciplinesSerializer(data=request.query_params)53 serializer.is_valid(raise_exception=True)54 return Response(serializer.data)55class ProductView(APIView):56 def get(self, request):57 serializer = ProductGetSerializer(data=request.query_params)58 serializer.is_valid(raise_exception=True)59 return Response(serializer.data)60 def post(self, request):61 serializer = ProductCreateSerializer(data=request.data)62 serializer.is_valid(raise_exception=True)63 serializer.create(serializer.validated_data)64 return Response(serializer.data)65class ProductsView(APIView):66 def get(self, request):67 serializer = ProductsGetSerializer(data=request.query_params)68 serializer.is_valid(raise_exception=True)69 return Response(serializer.data)70class IngredientView(APIView):71 def post(self, request):72 serializer = IngredientCreateSerializer(data=request.data)73 serializer.is_valid(raise_exception=True)74 serializer.create(serializer.validated_data)75 return Response(serializer.data)76 def delete(self, request):77 serializer = IngredientDeleteSerializer(data=request.data)78 serializer.is_valid(raise_exception=True)79 serializer.delete(serializer.validated_data)80 return Response(serializer.data)81class MealView(APIView):82 def get(self, request):83 serializer = MealGetSerializer(data=request.query_params)84 serializer.is_valid(raise_exception=True)85 return Response(serializer.data)86 def post(self, request):87 serializer = MealCreateSerializer(data=request.data)88 serializer.is_valid(raise_exception=True)89 serializer.create(serializer.validated_data)90 return Response(serializer.data)91 def put(self, request):92 serializer = MealUpdateSerializer(data=request.data)93 serializer.is_valid(raise_exception=True)94 serializer.update(serializer.data, serializer.validated_data)95 return Response(serializer.data)96 def delete(self, request):97 serializer = MealDeleteSerializer(data=request.data)98 serializer.is_valid(raise_exception=True)99 serializer.delete(serializer.validated_data)100 return Response(serializer.data)101class MealTypeView(APIView):102 def get(self, request):103 serializer = MealTypeGetSerializer(data=request.query_params)104 serializer.is_valid(raise_exception=True)105 return Response(serializer.data)106 def post(self, request):107 serializer = MealTypeCreateSerializer(data=request.data)108 serializer.is_valid(raise_exception=True)109 serializer.create(serializer.validated_data)110 return Response(serializer.data)111 def delete(self, request):112 serializer = MealTypeDeleteSerializer(data=request.data)113 serializer.is_valid(raise_exception=True)114 serializer.delete(serializer.validated_data)115 return Response(serializer.data)116class MealTypesView(APIView):117 def get(self, request):118 serializer = MealTypesSerializer(data=request.query_params)119 serializer.is_valid(raise_exception=True)120 return Response(serializer.data)121class UserView(APIView):122 def post(self, request):123 serializer = UserCreateSerializer(data=request.data)124 serializer.is_valid(raise_exception=True)125 serializer.create(serializer.validated_data)126 return Response(serializer.data)127 def put(self, request):128 serializer = UserUpdateSerializer(data=request.data)129 serializer.is_valid(raise_exception=True)130 serializer.update(serializer.data, serializer.validated_data)131 return Response(serializer.data)132 def delete(self, request):133 serializer = UserDeleteSerializer(data=request.data)134 serializer.is_valid(raise_exception=True)135 serializer.delete(serializer.validated_data)136 return Response(serializer.data)137class ProfileView(APIView):138 def post(self, request):139 serializer = ProfileGetSerializer(data=request.data)140 serializer.is_valid(raise_exception=True)141 return Response(serializer.data)142class WeightsView(APIView):143 def get(self, request):144 serializer = WeightListGetSerializer(data=request.query_params)145 serializer.is_valid(raise_exception=True)146 return Response(serializer.data)147class WeightView(APIView):148 def get(self, request):149 serializer = WeightGetSerializer(data=request.query_params)150 serializer.is_valid(raise_exception=True)151 return Response(serializer.data)152 def post(self, request):153 serializer = WeightCreateSerializer(data=request.data)154 serializer.is_valid(raise_exception=True)155 serializer.create(serializer.validated_data)156 return Response(serializer.data)157 def delete(self, request):158 serializer = WeightDeleteSerializer(data=request.data)159 serializer.is_valid(raise_exception=True)160 serializer.delete(serializer.validated_data)161 return Response(serializer.data)162class LoginView(APIView):163 def post(self, request):164 serializer = LoginSerializer(data=request.data)165 serializer.is_valid(raise_exception=True)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Slash automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful