How to use _filter method in assertpy

Best Python code snippet using assertpy_python

managers.py

Source:managers.py Github

copy

Full Screen

...126 elif _filter.type == 'Datetime':127 _filter.value = datetime.strptime(_filter.value, DATETIME_FORMAT)128 elif _filter.type == 'Boolean':129 _filter.value = cast_bool_from_str(_filter.value)130def add_result_filter(field_name, _filter, filter_expressions, project):131 from django.db.models.expressions import RawSQL132 from tasks.models import Annotation, Prediction133 # new approach with contain instead of icontains134 if flag_set('ff_back_2214_annotation_result_12052022_short', project.organization.created_by):135 _class = Annotation if field_name == 'annotations_results' else Prediction136 subquery = Exists(137 _class.objects138 .annotate(json_str=RawSQL('cast(result as text)', ''))139 .filter(Q(task=OuterRef('pk')) & Q(json_str__contains=_filter.value))140 )141 if _filter.operator in [Operator.EQUAL, Operator.NOT_EQUAL]:142 try:143 value = json.loads(_filter.value)144 except:145 return 'exit'146 q = Exists(_class.objects.filter(Q(task=OuterRef('pk')) & Q(result=value)))147 filter_expressions.append(q if _filter.operator == Operator.EQUAL else ~q)148 return 'continue'149 elif _filter.operator == Operator.CONTAINS:150 filter_expressions.append(Q(subquery))151 return 'continue'152 elif _filter.operator == Operator.NOT_CONTAINS:153 filter_expressions.append(~Q(subquery))154 return 'continue'155 # old approach156 else:157 name = 'annotations__result' if field_name == 'annotations_results' else 'predictions__result'158 if _filter.operator in [Operator.EQUAL, Operator.NOT_EQUAL]:159 try:160 value = json.loads(_filter.value)161 except:162 return 'exit'163 q = Q(**{name: value})164 filter_expressions.append(q if _filter.operator == Operator.EQUAL else ~q)165 return 'continue'166 elif _filter.operator == Operator.CONTAINS:167 filter_expressions.append(Q(**{name + '__icontains': _filter.value}))168 return 'continue'169 elif _filter.operator == Operator.NOT_CONTAINS:170 filter_expressions.append(~Q(**{name + '__icontains': _filter.value}))171 return 'continue'172def add_user_filter(enabled, key, _filter, filter_expressions):173 if enabled and _filter.operator == Operator.CONTAINS:174 filter_expressions.append(Q(**{key: int(_filter.value)}))175 return 'continue'176 elif enabled and _filter.operator == Operator.NOT_CONTAINS:177 filter_expressions.append(~Q(**{key: int(_filter.value)}))178 return 'continue'179 elif enabled and _filter.operator == Operator.EMPTY:180 value = cast_bool_from_str(_filter.value)181 filter_expressions.append(Q(**{key+'__isnull': value}))182 return 'continue'183def apply_filters(queryset, filters, project):184 if not filters:185 return queryset186 # convert conjunction to orm statement187 filter_expressions = []188 custom_filter_expressions = load_func(settings.DATA_MANAGER_CUSTOM_FILTER_EXPRESSIONS)189 for _filter in filters.items:190 # we can also have annotations filters191 if not _filter.filter.startswith("filter:tasks:") or _filter.value is None:192 continue193 # django orm loop expression attached to column name194 preprocess_field_name = load_func(settings.PREPROCESS_FIELD_NAME)195 field_name, _ = preprocess_field_name(_filter.filter, project.only_undefined_field)196 # filter preprocessing, value type conversion, etc..197 preprocess_filter = load_func(settings.DATA_MANAGER_PREPROCESS_FILTER)198 _filter = preprocess_filter(_filter, field_name)199 # custom expressions for enterprise200 filter_expression = custom_filter_expressions(_filter, field_name, project)201 if filter_expression:202 filter_expressions.append(filter_expression)203 continue204 # annotators205 result = add_user_filter(field_name == 'annotators', 'annotations__completed_by', _filter, filter_expressions)206 if result == 'continue':207 continue208 # updated_by209 result = add_user_filter(field_name == 'updated_by', 'updated_by', _filter, filter_expressions)210 if result == 'continue':211 continue212 # annotations results & predictions results213 if field_name in ['annotations_results', 'predictions_results']:214 result = add_result_filter(field_name, _filter, filter_expressions, project)215 if result == 'exit':216 return queryset.none()217 elif result == 'continue':218 continue219 # annotation ids220 if field_name == 'annotations_ids':221 field_name = 'annotations__id'222 if 'contains' in _filter.operator:223 # convert string like "1 2,3" => [1,2,3]224 _filter.value = [int(value)225 for value in re.split(',|;| ', _filter.value)226 if value and value.isdigit()]227 _filter.operator = 'in_list' if _filter.operator == 'contains' else 'not_in_list'228 elif 'equal' in _filter.operator:...

Full Screen

Full Screen

person.py

Source:person.py Github

copy

Full Screen

1import json2from datetime import timedelta3from typing import Dict, List, Optional, Tuple4from dateutil.relativedelta import relativedelta5from django.utils import timezone6from posthog.constants import NON_TIME_SERIES_DISPLAY_TYPES, TRENDS_CUMULATIVE, PropertyOperatorType7from posthog.models.cohort import Cohort8from posthog.models.entity import Entity9from posthog.models.filters import Filter10from posthog.models.filters.mixins.utils import cached_property11from posthog.models.person.sql import GET_ACTORS_FROM_EVENT_QUERY12from posthog.models.property import Property13from posthog.models.team import Team14from posthog.queries.actor_base_query import ActorBaseQuery15from posthog.queries.trends.trend_event_query import TrendsEventQuery16def _handle_date_interval(filter: Filter) -> Filter:17 # adhoc date handling. parsed differently with django orm18 date_from = filter.date_from or timezone.now()19 data: Dict = {}20 if filter.interval == "month":21 data.update({"date_to": (date_from + relativedelta(months=1) - timedelta(days=1)).strftime("%Y-%m-%d")})22 elif filter.interval == "week":23 data.update({"date_to": (date_from + relativedelta(weeks=1) - timedelta(days=1)).strftime("%Y-%m-%d")})24 elif filter.interval == "day":25 data.update({"date_to": (date_from).strftime("%Y-%m-%d 23:59:59")})26 elif filter.interval == "hour":27 data.update({"date_to": date_from + timedelta(hours=1)})28 return filter.with_data(data)29class TrendsActors(ActorBaseQuery):30 entity: Entity31 _filter: Filter32 def __init__(self, team: Team, entity: Optional[Entity], filter: Filter, **kwargs):33 if not entity:34 raise ValueError("Entity is required")35 if filter.display != TRENDS_CUMULATIVE and filter.display not in NON_TIME_SERIES_DISPLAY_TYPES:36 filter = _handle_date_interval(filter)37 super().__init__(team, filter, entity, **kwargs)38 @cached_property39 def aggregation_group_type_index(self):40 if self.entity.math == "unique_group":41 return self.entity.math_group_type_index42 return None43 def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]:44 if self._filter.breakdown_type == "cohort" and self._filter.breakdown_value != "all":45 cohort = Cohort.objects.get(pk=self._filter.breakdown_value, team_id=self._team.pk)46 self._filter = self._filter.with_data(47 {48 "properties": self._filter.property_groups.combine_properties(49 PropertyOperatorType.AND, [Property(key="id", value=cohort.pk, type="cohort")]50 ).to_dict()51 }52 )53 elif (54 self._filter.breakdown_type55 and isinstance(self._filter.breakdown, str)56 and isinstance(self._filter.breakdown_value, str)57 ):58 if self._filter.using_histogram:59 lower_bound, upper_bound = json.loads(self._filter.breakdown_value)60 breakdown_props = [61 Property(62 key=self._filter.breakdown,63 value=lower_bound,64 operator="gte",65 type=self._filter.breakdown_type,66 group_type_index=self._filter.breakdown_group_type_index67 if self._filter.breakdown_type == "group"68 else None,69 ),70 Property(71 key=self._filter.breakdown,72 value=upper_bound,73 operator="lt",74 type=self._filter.breakdown_type,75 group_type_index=self._filter.breakdown_group_type_index76 if self._filter.breakdown_type == "group"77 else None,78 ),79 ]80 else:81 breakdown_props = [82 Property(83 key=self._filter.breakdown,84 value=self._filter.breakdown_value,85 type=self._filter.breakdown_type,86 group_type_index=self._filter.breakdown_group_type_index87 if self._filter.breakdown_type == "group"88 else None,89 )90 ]91 self._filter = self._filter.with_data(92 {93 "properties": self._filter.property_groups.combine_properties(94 PropertyOperatorType.AND, breakdown_props95 ).to_dict()96 }97 )98 extra_fields: List[str] = ["distinct_id", "team_id"] if not self.is_aggregating_by_groups else []99 if self._filter.include_recordings:100 extra_fields += ["uuid"]101 events_query, params = TrendsEventQuery(102 filter=self._filter,103 team=self._team,104 entity=self.entity,105 should_join_distinct_ids=not self.is_aggregating_by_groups106 and not self._team.actor_on_events_querying_enabled,107 # TODO: this fails on sessions because `$session_id` also comes from the event query, causing ambiguity :$108 extra_event_properties=["$window_id", "$session_id"] if self._filter.include_recordings else [],109 extra_fields=extra_fields,110 using_person_on_events=self._team.actor_on_events_querying_enabled,111 ).get_query()112 matching_events_select_statement = (113 ", groupUniqArray(10)((timestamp, uuid, $session_id, $window_id)) as matching_events"114 if self._filter.include_recordings115 else ""116 )117 return (118 GET_ACTORS_FROM_EVENT_QUERY.format(119 id_field=self._aggregation_actor_field,120 matching_events_select_statement=matching_events_select_statement,121 events_query=events_query,122 limit="LIMIT %(limit)s" if limit_actors else "",123 offset="OFFSET %(offset)s" if limit_actors else "",124 ),125 {**params, "offset": self._filter.offset, "limit": 200},126 )127 @cached_property128 def _aggregation_actor_field(self) -> str:129 if self.is_aggregating_by_groups:130 group_type_index = self.entity.math_group_type_index131 return f"$group_{group_type_index}"132 else:...

Full Screen

Full Screen

PathNamesDenoised.py

Source:PathNamesDenoised.py Github

copy

Full Screen

1from util import PROJECT_PATH,SEGMENTED2IMAGE_NUMBER = "1" # 1, 2, 3, 43FILE_EXTENSION = ".png"4DENOISED_FOLDER = "\\images\\denoised\\denoised_image" + IMAGE_NUMBER + "\\"5CHOSEN_FILTER = "nl_means" # bilateral, gaussian, median, tv, wavelet, fourier_butterworthLP, fourier_idealLP, fourier_gaussianLP6DEFAULT_IMAGE = PROJECT_PATH + "\\images\\default_image" +IMAGE_NUMBER + FILE_EXTENSION7GAUSSIAN_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\gaussian\\gaussian_low_" + CHOSEN_FILTER + FILE_EXTENSION8GAUSSIAN_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\gaussian\\gaussian_moderate_" + CHOSEN_FILTER + FILE_EXTENSION9GAUSSIAN_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\gaussian\\gaussian_high_" + CHOSEN_FILTER + FILE_EXTENSION10LAPLACIAN_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\laplacian\\laplacian_low_" + CHOSEN_FILTER + FILE_EXTENSION11LAPLACIAN_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\laplacian\\laplacian_moderate_" + CHOSEN_FILTER + FILE_EXTENSION12LAPLACIAN_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\laplacian\\laplacian_high_" + CHOSEN_FILTER + FILE_EXTENSION13POISSON_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\poisson\\poisson_low_" + CHOSEN_FILTER + FILE_EXTENSION14POISSON_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\poisson\\poisson_moderate_" + CHOSEN_FILTER + FILE_EXTENSION15POISSON_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\poisson\\poisson_high_" + CHOSEN_FILTER + FILE_EXTENSION16SPECKLE_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\speckle\\speckle_low_" + CHOSEN_FILTER + FILE_EXTENSION17SPECKLE_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\speckle\\speckle_moderate_" + CHOSEN_FILTER + FILE_EXTENSION18SPECKLE_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\speckle\\speckle_high_" + CHOSEN_FILTER + FILE_EXTENSION19UNIFORM_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\uniform\\uniform_low_" + CHOSEN_FILTER + FILE_EXTENSION20UNIFORM_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\uniform\\uniform_moderate_" + CHOSEN_FILTER + FILE_EXTENSION21UNIFORM_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\uniform\\uniform_high_" + CHOSEN_FILTER + FILE_EXTENSION22PEPPER_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\pepper\\pepper_low_" + CHOSEN_FILTER + FILE_EXTENSION23PEPPER_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\pepper\\pepper_moderate_" + CHOSEN_FILTER + FILE_EXTENSION24PEPPER_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\pepper\\pepper_high_" + CHOSEN_FILTER + FILE_EXTENSION25SALT_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\salt\\salt_low_" + CHOSEN_FILTER + FILE_EXTENSION26SALT_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\salt\\salt_moderate_" + CHOSEN_FILTER + FILE_EXTENSION27SALT_HIGH = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\salt\\salt_high_" + CHOSEN_FILTER + FILE_EXTENSION28SP_LOW = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\salt&pepper\\salt&pepper_low_" + CHOSEN_FILTER + FILE_EXTENSION29SP_MODERATE = PROJECT_PATH + DENOISED_FOLDER + CHOSEN_FILTER + "_filter\\salt&pepper\\salt&pepper_moderate_" + CHOSEN_FILTER + FILE_EXTENSION...

Full Screen

Full Screen

filter_constructor.py

Source:filter_constructor.py Github

copy

Full Screen

1def get_filter(**kwargs):2 import sys3 from collections import namedtuple4 from .exceptions import FilterKeywordError5 from .filter_api import recognized_kw, incompatible_pairs6 _filter = namedtuple('_filter',7 ['from_is_re',8 'from_string',9 'skip_above',10 'skip_below',11 'ignore_sign',12 'ignore_order',13 'mask',14 'num_lines',15 'to_is_re',...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run assertpy automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful