How to use marked_obj method in Slash

Best Python code snippet using slash

api_views.py

Source:api_views.py Github

copy

Full Screen

1from django.db.models import Max, Q, F2from django.http import JsonResponse3from django.utils import timezone4from rest_framework import generics5from rest_framework.response import Response6from rest_framework.views import APIView7from accounts.models import SubUser8from .serializers import *9# Create your views here.10# 영화 전체 목록 리스트11class MovieList(generics.ListAPIView):12 """13 전체 영화 목록입니다14 ---15 - id : 영화의 고유 ID16 - name : 영화의 이름17 - horizontal_image_path : 영화 가로 이미지 경로18 - vetical_image : 영화 세로 이미지(추후 변경예정)19 """20 queryset = Movie.objects.all()21 serializer_class = MovieSerializer22class HomePage(generics.ListAPIView):23 """24 맨처음 홈페이지 화면입니다25 ---26 - 맨처음 나오는 영화는 맨위에 크게 들어갈 영화 입니다.27 헤더에28 - Authorization : Token 토큰 값29 - subuserid : 프로필계정의 ID30 를 입력해 주세요 (subuserid는 언더바(_)가 없습니다)31 """32 serializer_class = HomePageSerializer33 def get_queryset(self):34 # 랜덤하게 영화 1개를 가져오기 위함35 max_id = Movie.objects.all().aggregate(max_id=Max('id'))['max_id']36 while True:37 pk = random.randint(1, max_id)38 # 랜덤으로 선택한 영화 1편39 queryset = Movie.objects.filter(pk=pk)40 if queryset:41 break42 return queryset43 def get_serializer_context(self):44 sub_user_id = self.request.META['HTTP_SUBUSERID']45 context = super().get_serializer_context()46 context['sub_user_id'] = sub_user_id47 return context48# 영화 탭을 누르면 나오는 화면에 필요한 영화들의 목록49class GenreSelectBefore(generics.ListAPIView):50 """51 영화 탭을 누르면 나오는 화면에 데이터를 전달하는 뷰 입니다52 ---53 헤더에54 - Authorization : Token 토큰 값55 - subuserid : 프로필계정의 ID56 를 입력해 주세요 (subuserid는 언더바(_)가 없습니다)57 맨 처음 나오는 영화 1개는 맨위에 크게 등록되는 영화 입니다58 - id : 영화의 id59 - name : 영화의 이름60 - horizontal_image_path : 가로 이미지의 path61 - vertical_image : 세로 이미지 파일62 """63 serializer_class = GenreSelectBeforeSerializer64 def get_queryset(self):65 # 랜덤하게 영화 1개를 가져오기 위함66 max_id = Movie.objects.all().aggregate(max_id=Max('id'))['max_id']67 while True:68 pk = random.randint(1, max_id)69 # 랜덤으로 선택한 영화 1편70 queryset = Movie.objects.filter(pk=pk)71 if queryset:72 break73 return queryset74 def get_serializer_context(self):75 sub_user_id = self.request.META['HTTP_SUBUSERID']76 genre_list = ['한국 영화', '외국 영화', '어린이', '가족', '액션', '스릴러', 'SF',77 '판타지', '범죄', '호러', '다큐멘터리', '로맨스', '코미디', '애니', '오리지널']78 context = super().get_serializer_context()79 context['genre_list'] = genre_list80 context['sub_user_id'] = sub_user_id81 return context82# 앱을 위한 뷰83class PreviewCellList(generics.ListAPIView):84 """85 앱을 위한 프리뷰셀 API입니다86 ---87 ```88 GET 으로 요청 하시면 됩니다89 리턴값 :90 - id : 영화의 고유 ID91 - name : 영화 이름92 - circle_image : 영화의 원형 이미지93 - logo_image_path : 영화의 로고 이미지 path94 - video_file : 영화 파일95 - vertical_sample_video_file : 영화의 세로 샘플 영상96 ```97 """98 serializer_class = PreviewCellListSerializer99 def get_queryset(self):100 queryset = Movie.objects.all().order_by('?')[:10]101 return queryset102# 영화 등록103# class MovieCerate(generics.CreateAPIView):104# """105# 영화 등록 API 입니다106#107# ---108# - name : 영화 이름109# - production_date : 영화 개봉 날짜110# - uploaded_date : 영화 등록(업로드) 날짜111# - synopsis : 영화 줄거리112# - running_time : 영화 러닝타임113# - view_count : 영화 조회수114# - logo_image_path : 영화 로고 이미지 경로115# - horizontal_image_path : 영화 가로 이미지 경로116# - degree : 영화 등급 (Ex.청소년 관람불가, 15세 등등)117# - directors : 영화 감독118# - actors : 배우119# - feature : 영화 특징(Ex.흥미진진)120# - author : 각본가121# - genre : 장르122#123# """124# queryset = Movie.objects.all()125# serializer_class = MovieSerializer126# 영화 장르 리스트127class GenreList(generics.ListAPIView):128 """129 영화 장르 리스트입니다130 ---131 헤더에132 - Authorization : Token 토큰 값133 를 입력해 주세요134 - id : 영화 장르 ID135 - name : 영화 장르136 137 """138 queryset = Genre.objects.all()139 serializer_class = GenreListSerializer140# 장르별 영화 리스트를 전체로 뿌려주기141class MovieListFirstGenre(generics.ListAPIView):142 """143 장르별 영화 리스트 입니다144 ---145 - 요청할때 movie/genre/'카테고리 명'/list/로 요청하시면 됩니다146 - Ex) movie/genre/액션/list/147 - Ex) movie/genre/스릴러/list/148 - name : 영화 이름149 - logo_image_path : 로고 이미지의 경로150 - horizontal_image_path : 가로 이미지 경로151 - vertical_image : 세로 이미지(차후 변경 예정)152 """153 serializer_class = MovieListSerializer154 def get_queryset(self):155 if 'kind' in self.kwargs:156 kind = self.kwargs['kind']157 else:158 kind = None159 sub_user_id = self.request.META['HTTP_SUBUSERID']160 queryset = Movie.objects.filter(genre__name__icontains=kind).exclude(like__sub_user_id=sub_user_id,161 like__like_or_dislike=2).distinct()[:18]162 return queryset163# 해당 유저의 찜 영화 목록164# 유저별 찜목록 영화 리스트165class MarkedList(generics.ListAPIView):166 """167 유저별 찜 목록 영화 리스트 입니다168 ---169 헤더에170 - Authorization : Token 토큰 값171 - subuserid : 프로필계정의 ID172 를 입력해 주세요 (subuserid는 언더바(_)가 없습니다)173 - 요청할때 "/movies/my_list" 로 요청하시면 됩니다174 - Ex) /movies/my_list/175 - id : 영화의 고유 ID 값176 - name : 영화 이름177 - horizontal_image_path : 가로 이미지 경로178 - vertical_image : 세로 이미지(차후 변경 예정)179 """180 serializer_class = MarkedListSerializer181 def get_queryset(self):182 sub_user_id = self.request.META['HTTP_SUBUSERID']183 queryset = Movie.objects.filter(like__sub_user=sub_user_id, like__marked=True)184 return queryset185# 영화 상세정보 뷰186class MovieDetail(generics.RetrieveAPIView):187 """188 영화 디테일 페이지 url 입니다.189 ---190 헤더에191 - Authorization : Token 토큰 값192 - subuserid : 프로필계정의 ID193 를 입력해 주세요 (subuserid는 언더바(_)가 없습니다)194 - 요청할때 "/movie/'영화 ID값'" 으로 요청하시면 됩니다.195 - Ex) /movie/2196 - Ex) /movie/7197 - id : 영화의 고유 ID 값198 - name : 영화 이름199 - video_file : 비디오파일200 - sample_video_file : 샘플 비디오 파일201 - production_date : 영화 개봉 날짜202 - uploaded_date : 영화 등록(업로드) 날짜203 - synopsis : 영화 줄거리204 - running_time : 영화 러닝타임205 - view_count : 영화 조회수206 - logo_image_path : 로고 이미지의 경로207 - horizontal_image_path : 가로 이미지 경로208 - vertical_image : 세로 이미지(차후 변경 예정)209 - circle_image : 원형 이미지(차후 변경예정)210 - degree : 영화 등급 (Ex.청소년 관람불가, 15세 등등)211 - directors : 영화 감독212 - actors : 배우213 - feature : 영화 특징(Ex.흥미진진)214 - author : 각본가215 - genre : 장르216 - marked : 유저가 찜한 영화인217 - like : 유저가 좋아요한 영화인지, 싫어요한 영화인지 (평가안함 = 0 , 좋아요 = 1, 싫어요 = 2)218 - total_minute : 시간을 분으로 환산한 값219 - match_rate : 일치율(현재 70~97 랜덤, 추후 업데이트 예정)220 - to_be_continue : 유저가 재생을 멈춘시간221 - remaining_time : running_time - to_be_continue222 - can_i_store : 저장가능 여부223 """224 queryset = Movie.objects.all()225 serializer_class = MovieDetailSerializer226 def get_serializer_context(self):227 context = super().get_serializer_context()228 sub_user_id = self.request.META['HTTP_SUBUSERID']229 context['sub_user_id'] = sub_user_id230 return context231# 시청중인 목록 뷰232class FollowUpMovies(generics.ListAPIView):233 """234 메인화면에서 보여줄 시청 중인 영화리스트 url 입니다.235 ---236 - 요청할때 /movie/followup/ 으로 요청하시면 됩니다.237 - 헤더에 subuserid : 서브유저 id 값(int) 을 넣어주셔야 합니다.238 - id : 영화의 고유 ID 값239 - name : 영화 이름240 - video_file : 비디오파일241 - logo_image_path : 로고 이미지의 경로242 - horizontal_image_path : 가로 이미지 경로243 - vertical_image : 세로 이미지(차후 변경 예정)244 - to_be_continue : 유저가 재생을 멈춘시간245 """246 # queryset = Movie.objects.all()247 serializer_class = MovieContinueSerializer248 def get_queryset(self):249 sub_user_id = self.request.META['HTTP_SUBUSERID']250 queryset = MovieContinue.objects.filter(sub_user_id=sub_user_id)251 return queryset252# 장르별 영화 리스트253class MovieListByGenre(APIView):254 """255 영화 페이지에서 장르를 선택하면 보여줄 영화리스트 url 입니다.256 ---257 - 요청할때 /movies/list_by_genre/'genre_key'/ 로 요청하시면 됩니다.258 - Ex) /movies/list_by_genre/액션/259 - Ex) /movies/list_by_genre/외국/260 genre_key 종류261 '한국', '미국', '어린이', '액션', '스릴러', 'sf', '판타지',262 '범죄', '호러', '다큐', '로맨스', '코미디', '애니', '외국',263 - 헤더에 subuserid : 서브유저 id 값(int) 을 넣어주셔야 합니다.264 - id : 영화의 고유 ID 값265 - name : 영화 이름266 - sample_video_file : 미리보기 비디오파일267 - logo_image_path : 로고 이미지의 경로268 - horizontal_image_path : 가로 이미지 경로269 - vertical_image : 세로 이미지270 """271 def get(self, request, format=None, **kwargs):272 vertical_genre = self.kwargs['genre_key']273 sub_user = self.request.META['HTTP_SUBUSERID']274 genre_list = [275 '한국',276 '미국',277 '어린이',278 '액션',279 '스릴러',280 'sf',281 '판타지',282 '범죄',283 '호러',284 '다큐',285 '로맨스',286 '코미디',287 '애니',288 '외국',289 ]290 context = {}291 vertical_q = Q(genre__name__icontains=vertical_genre)292 for genre in genre_list:293 if vertical_genre == genre:294 continue295 else:296 horizontal_q = Q(genre__name__icontains=genre)297 if vertical_genre == '외국':298 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \299 .exclude(genre__name__icontains='한국').filter(horizontal_q)300 else:301 if genre == '외국':302 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \303 .exclude(genre__name__icontains='한국').filter(vertical_q)304 else:305 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2) \306 .filter(vertical_q).filter(horizontal_q)307 if queryset.count() < 3:308 continue309 serializer_data = MovieListByGenreSerializer(queryset.distinct(), many=True).data310 random.shuffle(serializer_data)311 context[f'{genre}'] = serializer_data312 if vertical_genre == '외국':313 vertical_queryset = Movie.objects.exclude(like__sub_user=1, like__like_or_dislike=2) \314 .exclude(genre__name__icontains='한국').distinct()315 else:316 vertical_queryset = Movie.objects.exclude(like__sub_user=1, like__like_or_dislike=2) \317 .filter(vertical_q).distinct()318 vertical_serializer_data = MovieListByGenreSerializer(vertical_queryset.order_by('?'), many=True).data319 random.shuffle(vertical_serializer_data)320 context[f'{vertical_genre}'] = vertical_serializer_data321 return Response(context)322# 프로필 생성후 좋아하는 영화 3개 선택하기(무작위 50개) -> 성능 개선 필요323class RecommendMovieAfterCreateSubUser(generics.ListAPIView):324 """325 프로필계정 가입후 좋아하는 영화 목록3개 선택하기입니다. 영화 60개를 리턴합니다.326 ---327 너무 느려서 성능 개선이 필수입니다328 header에329 Authorization: Token "토큰값"330 을 넣어주세요331 리턴값:332 [333 {334 "id": 영화의 ID,335 "name": 영화 제목336 "horizontal_image_path": 영화의 가로 이미지 path337 "vertical_image": 영화의 세로 이미지 path338 },339 ... 이하 59개 동일340 ]341 """342 serializer_class = MovieSerializer343 def get_queryset(self):344 queryset = Movie.objects.all().order_by('?')[:60]345 return queryset346 # def get_queryset(self):347 # # 등록된 영화의 최대 ID값을 구함348 # max_id = Movie.objects.all().aggregate(max_id=Max("id"))['max_id']349 # # queryset를 아래에서 사용하기 위해 미리 1개를 뽑아놓음350 # queryset = Movie.objects.filter(pk=random.randint(1, max_id))351 #352 # # queryset의 갯수가 60개 이상일때 까지353 # while queryset.count() <= 60:354 # # 영화의 ID값 중에 하나를 골라옴355 # pk = random.randint(1, max_id)356 # # ID값에 해당하는 영화를 가져옴357 # movie = Movie.objects.filter(pk=pk)358 # if movie:359 # # 쿼리셋에 붙임360 # queryset |= movie361 #362 # return queryset363# 좋아요 목록에 추가하기364class AddLike(APIView):365 """366 좋아요 목록에 추가하는 API뷰 입니다367 ---368 Header369 Authorization: Token 토큰값370 Body371 movieid : 영화의 ID372 subuserid : 서브유저의 ID373 를 넣어서 POST로 요청해 주세요374 리턴값375 좋아요 등록 성공 OR 좋아요 취소 성공376 """377 def post(self, request, *args, **kwargs):378 movie_id = request.data.get('movieid')379 sub_user_id = request.data.get('subuserid')380 sub_user = SubUser.objects.get(id=sub_user_id)381 movie = Movie.objects.get(id=movie_id)382 obj, created = LikeDisLikeMarked.objects.update_or_create(383 movie__name=movie.name,384 sub_user__name=sub_user.name,385 defaults={'movie': Movie.objects.get(name=movie.name),386 'sub_user': SubUser.objects.get(id=sub_user.id),387 # 'like_or_dislike': 1,388 # 'marked': False,389 # 'created': timezone.now(),390 'updated': timezone.now(),391 'movie_id': movie_id,392 'sub_user_id': sub_user_id})393 if obj.like_or_dislike == 1:394 obj.like_or_dislike = 0395 movie.like_count = F('like_count') - 1396 movie.save()397 obj.save()398 return JsonResponse({'response': "좋아요 취소 성공"}, status=201)399 if created or obj.like_or_dislike != 1:400 obj.like_or_dislike = 1401 movie.like_count = F('like_count') + 1402 movie.save()403 obj.save()404 return JsonResponse({'response': "좋아요 등록 성공"}, status=201)405# 싫어요 목록에 추가하기406class AddDisLike(APIView):407 """408 싫어 목록에 추가하는 API뷰 입니다409 ---410 Header411 Authorization: Token 토큰값412 Body413 movieid : 영화의 ID414 subuserid : 서브유저의 ID415 를 넣어서 POST로 요청해 주세요416 리턴값417 싫어요 등록 성공 OR 싫어요 취소 성공418 """419 def post(self, request, *args, **kwargs):420 movie_id = request.data.get('movieid')421 sub_user_id = request.data.get('subuserid')422 sub_user = SubUser.objects.get(id=sub_user_id)423 movie = Movie.objects.get(id=movie_id)424 obj, created = LikeDisLikeMarked.objects.update_or_create(425 movie__name=movie.name,426 sub_user__name=sub_user.name,427 defaults={'movie': Movie.objects.get(name=movie.name),428 'sub_user': SubUser.objects.get(id=sub_user.id),429 # 'like_or_dislike': 2,430 # 'marked': False,431 # 'created': timezone.now(),432 'updated': timezone.now(),433 'movie_id': movie_id,434 'sub_user_id': sub_user_id})435 if obj.like_or_dislike == 2:436 obj.like_or_dislike = 0437 movie.like_count = F('like_count') + 1438 movie.save()439 obj.save()440 return JsonResponse({'response': "싫어요 취소 성공"}, status=201)441 if created or obj.like_or_dislike != 2:442 obj.like_or_dislike = 2443 movie.like_count = F('like_count') - 1444 movie.save()445 obj.save()446 return JsonResponse({'response': "싫어요 등록 성공"}, status=201)447# 찜 목록에 추가하기448class MyList(APIView):449 """450 찜 목록에 추가하는 API뷰 입니다451 ---452 Header453 Authorization: Token 토큰값454 Body455 movieid : 영화의 ID456 subuserid : 서브유저의 ID457 를 넣어서 POST로 요청해 주세요458 리턴값459 찜목록 추가 성공 OR 찜목록 제거 성공460 """461 def post(self, request, *args, **kwargs):462 movie_id = request.data.get('movieid')463 sub_user_id = request.data.get('subuserid')464 sub_user = SubUser.objects.get(id=sub_user_id)465 movie = Movie.objects.get(id=movie_id)466 obj, created = LikeDisLikeMarked.objects.update_or_create(467 movie__name=movie.name,468 sub_user__name=sub_user.name,469 defaults={'movie': Movie.objects.get(name=movie.name),470 'sub_user': SubUser.objects.get(id=sub_user.id),471 # 'like_or_dislike': 0,472 # 'marked': True,473 # 'created': timezone.now(),474 'updated': timezone.now(),475 'movie_id': movie_id,476 'sub_user_id': sub_user_id})477 if created:478 obj.marked = True479 obj.save()480 return JsonResponse({'response': "찜목록 추가 성공"}, status=201)481 # 이미 좋아요나 싫어요 표시를 하여 목록에 있음482 else:483 if obj.marked:484 obj.marked = False485 obj.save()486 return JsonResponse({'response': "찜목록 제거 성공"}, status=201)487 else:488 obj.marked = True489 obj.save()490 return JsonResponse({'response': "찜목록 추가 성공"}, status=201)491# 최신 등록 영화 10개492class BrandNewMovieList(generics.ListAPIView):493 """494 최신 등록 영화 url 입니다.495 ---496 - /movies/brand_new/ 로 요청하시면 됩니다.497 - 헤더에 subuserid : 서브유저 id 값(int) 을 넣어주셔야 합니다.498 - id : 영화의 고유 ID 값499 - name : 영화 이름500 - sample_video_file : 미리보기 비디오파일 경로501 - logo_image_path : 로고 이미지의 경로502 - horizontal_image_path : 가로 이미지 경로503 - vertical_image : 세로 이미지 경로504 """505 serializer_class = MovieListByGenreSerializer506 def get_queryset(self):507 sub_user = self.request.META['HTTP_SUBUSERID']508 queryset = Movie.objects.exclude(like__sub_user=sub_user, like__like_or_dislike=2).order_by('-created')[:10]509 return queryset510# 절찬 스트리밍 중511class BigSizeVideo(generics.RetrieveAPIView):512 """513 절찬 스트리밍중 (동영상 하나) url 입니다.514 ---515 - /movies/big_size_video/ 로 요청하시면 됩니다.516 - 헤더에 subuserid : 서브유저 id 값(int) 을 넣어주셔야 합니다.517 - id : 영화의 고유 ID 값518 - name : 영화 이름519 - video_file : 비디오파일 경로520 - logo_image_path : 로고 이미지의 경로521 - horizontal_image_path : 가로 이미지 경로522 - marked : 내가 찜한 콘텐츠 인지 여부 (True or False)523 """524 serializer_class = BigSizeVideoSerializer525 def get_object(self):526 movie_id = 354527 obj = Movie.objects.get(pk=movie_id)528 return obj529 def get_serializer_context(self):530 sub_user_id = self.request.META['HTTP_SUBUSERID']531 context = super().get_serializer_context()532 context['sub_user_id'] = sub_user_id533 return context534# 좋아요 상위 10개535class MostLikesMoives(generics.ListAPIView):536 """537 좋아요 상위 10개 영화 url 입니다.538 ---539 - /movies/most_likes/ 로 요청하시면 됩니다.540 - 헤더에 subuserid : 서브유저 id 값(int) 을 넣어주셔야 합니다.541 - id : 영화의 고유 ID 값542 - name : 영화 이름543 - sample_video_file : 미리보기 비디오파일 경로544 - logo_image_path : 로고 이미지의 경로545 - horizontal_image_path : 가로 이미지 경로546 - vertical_image : 세로 이미지 경로547 """548 serializer_class = MovieListByGenreSerializer549 def get_queryset(self):550 sub_user_id = self.request.META['HTTP_SUBUSERID']551 queryset = Movie.objects.exclude(like__sub_user=sub_user_id, like__like_or_dislike=2).order_by(552 '-like_count')[:10]553 return queryset554class SavePausedVideoTime(APIView):555 """556 비디오 재생시간 저장 url 입니다.557 ---558 - /movies/paused_time/ 로 요청하시면 됩니다.559 - body에560 sub_user_id : 서브유저 id (int)561 movie_id : 저장할 영화 id (int)562 paused_time : "00:00:00" (str) 형식의 저장할 시간563 을 넣어주셔야 합니다.564 저장에 성공했을 경우565 {'saved' : True} 가 반환됩니다.566 """567 def post(self, *args, **kwargs):568 paused_time = self.request.data.get('paused_time')569 sub_user_id = self.request.data.get('sub_user_id')570 movie_id = self.request.data.get('movie_id')571 movie_obj = Movie.objects.get(pk=movie_id)572 sub_user_obj = SubUser.objects.get(pk=sub_user_id)573 movie = MovieContinue.objects.get_or_create(movie=movie_obj, sub_user=sub_user_obj)[0]574 movie.to_be_continue = paused_time575 movie.save()576 return Response({'saved': True})577class Search(APIView):578 def get(self, *agrs, **kwargs):579 search_key = self.request.GET.get('search_key', None)580 if search_key:581 search_key = search_key.replace(" ", "")582 space = '\s*'583 re_search_key = space.join(search_key)584 print(re_search_key)585 first_movies = Movie.objects.filter(name__iregex=re_search_key)586 print(first_movies)587 movies_name = Movie.objects.filter(name__iregex=re_search_key)588 movie_genre = Movie.objects.prefetch_related('genre').filter(genre__name__iregex=re_search_key)589 print(movie_genre)590 movie_actor = Movie.objects.prefetch_related('actors').filter(actors__name__iregex=re_search_key)591 print(movie_actor)592 queryset = (first_movies | movies_name | movie_genre | movie_actor).distinct()593 queryset_serializer = MovieSerializer(queryset, many=True)594 return JsonResponse({'movie_list': queryset_serializer.data}, status=201)595 else:596 return JsonResponse({'search_error': False}, status=403)597class MatchRate(APIView):598 sub_user_id = 8599 sub_user = SubUser.objects.get(pk=sub_user_id)...

Full Screen

Full Screen

build_and_run.py

Source:build_and_run.py Github

copy

Full Screen

1#!/usr/bin/env python32import os3import pickle4import re5import shutil6import socket7import sys8from copy import deepcopy9from glob import glob10from itertools import chain11from pathlib import Path12from subprocess import STDOUT, CalledProcessError, check_call, check_output13import requests14from kerncraft.incore_model import (15 asm_instrumentation,16 iaca_analyse_instrumented_binary,17 llvm_mca_analyse_instrumented_assembly,18 osaca_analyse_instrumented_assembly,19 parse_asm,20)21from kerncraft.models import benchmark22from osaca.osaca import reduce_to_section23# Scaling of inner dimension for 1D, 2D and 3D kernels24# * consider kernels to be compiled with multiple compilers and different options25# * find best performing run (min cy/it over all runs)26# * statistics on performance overall (cy/it over inner length)27# * validate that L2 traffic is neglegible28# * measure other performance metrics, such as port utilization (optionally)29# * scale to highlevel iterations30# Collect inner loop body assembly for each kernel/compiler/options combination31# * analyze with OSACA, IACA and LLVM-MCA32hosts_arch_map = {33 r"skylakesp2": "SKX",34 r"ivyep1": "IVB",35 r"naples1": "ZEN",36 r"rome1": "ZEN2",37 r"warmup": "TX2",38 r"qp4-node-[0-9]+": "A64FX",39}40arch_info = {41 "SKX": {42 "prepare": ["likwid-setFrequencies -f 2.4 -t 0".split()],43 "IACA": "SKX",44 "OSACA": "SKX",45 "LLVM-MCA": "-mcpu=skylake-avx512",46 "Ithemal": "skl",47 "isa": "x86",48 "perfevents": [],49 "cflags": {50 "icc": {51 "Ofast": (52 "-Ofast -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "53 "-ffreestanding -falign-loops"54 ).split(),55 "O3": (56 "-O3 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "57 "-ffreestanding -falign-loops"58 ).split(),59 "O2": (60 "-O2 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "61 "-ffreestanding -falign-loops"62 ).split(),63 "O1": (64 "-O1 -fno-alias -xCORE-AVX512 -qopt-zmm-usage=high -nolib-inline "65 "-ffreestanding -falign-loops"66 ).split(),67 },68 "clang": {69 "Ofast": "-Ofast -march=skylake-avx512 -ffreestanding".split(),70 "O3": "-O3 -march=skylake-avx512 -ffreestanding".split(),71 "O2": "-O2 -march=skylake-avx512 -ffreestanding".split(),72 "O1": "-O1 -march=skylake-avx512 -ffreestanding".split(),73 },74 "gcc": {75 "Ofast": "-Ofast -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),76 "O3": "-O3 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),77 "O2": "-O2 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),78 "O1": "-O1 -march=skylake-avx512 -lm -ffreestanding -falign-loops=16".split(),79 },80 },81 },82 "IVB": {83 "prepare": ["likwid-setFrequencies -f 3.0 -t 0".split()],84 "IACA": "IVB",85 "OSACA": "IVB",86 "LLVM-MCA": "-mcpu=ivybridge",87 "Ithemal": "ivb",88 "isa": "x86",89 "perfevents": [],90 "cflags": {91 "icc": {92 "Ofast": (93 "-Ofast -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops"94 ).split(),95 "O3": "-O3 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),96 "O2": "-O2 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),97 "O1": "-O1 -xAVX -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),98 },99 "clang": {100 "Ofast": "-Ofast -mavx -ffreestanding".split(),101 "O3": "-O3 -mavx -ffreestanding".split(),102 "O2": "-O2 -mavx -ffreestanding".split(),103 "O1": "-O1 -mavx -ffreestanding".split(),104 },105 "gcc": {106 "Ofast": "-Ofast -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),107 "O3": "-O3 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),108 "O2": "-O2 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),109 "O1": "-O1 -march=corei7-avx -lm -ffreestanding -falign-loops=16".split(),110 },111 },112 },113 "ZEN": {114 "prepare": ["likwid-setFrequencies -f 2.3 -t 0".split()],115 "IACA": None,116 "OSACA": "ZEN1",117 "LLVM-MCA": "-mcpu=znver1",118 "Ithemal": None,119 "isa": "x86",120 "perfevents": [],121 "cflags": {122 "clang": {123 "Ofast": "-Ofast -march=znver1 -ffreestanding".split(),124 "O3": "-O3 -march=znver1 -ffreestanding".split(),125 "O2": "-O2 -march=znver1 -ffreestanding".split(),126 "O1": "-O1 -march=znver1 -ffreestanding".split(),127 },128 "gcc": {129 "Ofast": "-Ofast -march=znver1 -ffreestanding -falign-loops=16".split(),130 "O3": "-O3 -march=znver1 -ffreestanding -falign-loops=16".split(),131 "O2": "-O2 -march=znver1 -ffreestanding -falign-loops=16".split(),132 "O1": "-O1 -march=znver1 -ffreestanding -falign-loops=16".split(),133 },134 "icc": {135 "Ofast": (136 "-Ofast -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops"137 ).split(),138 "O3": "-O3 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),139 "O2": "-O2 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),140 "O1": "-O1 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),141 },142 },143 },144 "ZEN2": {145 "prepare": ["likwid-setFrequencies -f 2.35 -t 0".split()],146 "IACA": None,147 "OSACA": "ZEN2",148 "LLVM-MCA": "-mcpu=znver2",149 "Ithemal": None,150 "isa": "x86",151 "perfevents": [],152 "cflags": {153 "clang": {154 "Ofast": "-Ofast -march=znver2 -ffreestanding".split(),155 "O3": "-O3 -march=znver2 -ffreestanding".split(),156 "O2": "-O2 -march=znver2 -ffreestanding".split(),157 "O1": "-O1 -march=znver2 -ffreestanding".split(),158 },159 "gcc": {160 "Ofast": "-Ofast -march=znver2 -ffreestanding -falign-loops=16".split(),161 "O3": "-O3 -march=znver2 -ffreestanding -falign-loops=16".split(),162 "O2": "-O2 -march=znver2 -ffreestanding -falign-loops=16".split(),163 "O1": "-O1 -march=znver2 -ffreestanding -falign-loops=16".split(),164 },165 "icc": {166 "Ofast": (167 "-Ofast -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops"168 ).split(),169 "O3": "-O3 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),170 "O2": "-O2 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),171 "O1": "-O1 -xAVX2 -fno-alias -nolib-inline -ffreestanding -falign-loops".split(),172 },173 },174 },175 "TX2": {176 "Clock [MHz]": 2200, # reading out via perf. counters is not supported177 "IACA": None,178 "OSACA": "TX2",179 "assign_optimal_throughput": True,180 "LLVM-MCA": "-mcpu=thunderx2t99 -march=aarch64",181 "Ithemal": None,182 "isa": "aarch64",183 "perfevents": [],184 "cflags": {185 "clang": {186 "Ofast": "-Ofast -target aarch64-unknown-linux-gnu -ffreestanding".split(),187 "O3": "-O3 -target aarch64-unknown-linux-gnu -ffreestanding".split(),188 "O2": "-O2 -target aarch64-unknown-linux-gnu -ffreestanding".split(),189 "O1": "-O1 -target aarch64-unknown-linux-gnu -ffreestanding".split(),190 },191 "gcc": {192 "Ofast": "-Ofast -march=armv8.1-a -ffreestanding".split(),193 "O3": "-O3 -march=armv8.1-a -ffreestanding".split(),194 "O2": "-O2 -march=armv8.1-a -ffreestanding".split(),195 "O1": "-O1 -march=armv8.1-a -ffreestanding".split(),196 },197 },198 },199 "A64FX": {200 "Clock [MHz]": 1800, # reading out via perf. counters is not supported201 "L2_volume_metric": "L1<->L2 data volume [GBytes]",202 "IACA": None,203 "OSACA": "A64FX",204 "assign_optimal_throughput": False,205 "LLVM-MCA": "-mcpu=a64fx -march=aarch64",206 "Ithemal": None,207 "isa": "aarch64",208 "perfevents": [],209 "cflags": {210 "gcc": {211 "Ofast": "-Ofast -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),212 "O3": "-O3 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),213 "O2": "-O2 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),214 "O1": "-O1 -msve-vector-bits=512 -march=armv8.2-a+sve -ffreestanding".split(),215 },216 "clang": {217 "Ofast": "-Ofast -target aarch64-unknown-linux-gnu -ffreestanding".split(),218 "O3": "-O3 -target aarch64-unknown-linux-gnu -ffreestanding".split(),219 "O2": "-O2 -target aarch64-unknown-linux-gnu -ffreestanding".split(),220 "O1": "-O1 -target aarch64-unknown-linux-gnu -ffreestanding".split(),221 },222 },223 },224}225def get_current_arch():226 hostname = socket.gethostname()227 if hostname in hosts_arch_map:228 return hosts_arch_map[hostname]229 for matchstr, arch in hosts_arch_map.items():230 if re.match(matchstr, hostname):231 return arch232 # raise KeyError(f"{hostname} not matched in hosts_arch_map.")233 return None234def get_kernels(kernels=None):235 if kernels is None:236 kernels = []237 for f in glob("kernels/*.c"):238 f = f.rsplit(".", 1)[0].split("/", 1)[1]239 if f == "dummy":240 continue241 kernels.append(f)242 return kernels243# Columns:244# arch245# kernel246# compiler247# cflags_name248# element_size249# pointer_increment250# IACA_raw251# IACA_scaled [dict with cy/it]252# IACA_scaled_max [float with cy/it]253# OSACA_raw254# OSACA_scaled [dict with cy/it]255# OSACA_scaled_max [float with cy/it]256# LLVM-MCA_raw257# LLVM-MCA_scaled [dict with cy/it]258# LLVM-MCA_scaled_max [float with cy/it]259# best_length260# best_runtime [cy/it]261# L2_traffic [B/it]262# allruns [list (length, repetitions, cy/it, L2 B/it)]263# perfevents [dict event: counter/it]264def build_mark_run_all_kernels(measurements=True, osaca=True, iaca=True, llvm_mca=True):265 arch = get_current_arch()266 if arch is None:267 arches = arch_info.keys()268 islocal = False269 else:270 islocal = True271 arches = [arch]272 ainfo = arch_info.get(arch)273 if "prepare" in ainfo:274 for cmd in ainfo["prepare"]:275 check_call(cmd)276 for arch in arches:277 ainfo = arch_info.get(arch)278 print(arch)279 data_path = Path(f"build/{arch}/data.pkl")280 if data_path.exists():281 with data_path.open("rb") as f:282 data = pickle.load(f)283 else:284 data = []285 data_lastsaved = deepcopy(data)286 for compiler, compiler_cflags in ainfo["cflags"].items():287 if not shutil.which(compiler) and islocal:288 print(compiler, "not found in path! Skipping...")289 continue290 for cflags_name, cflags in compiler_cflags.items():291 for kernel in get_kernels():292 print(293 f"{kernel:<15} {arch:>5} {compiler:>5} {cflags_name:>6}",294 end=": ",295 flush=True,296 )297 row = list(298 [299 r300 for r in data301 if r["arch"] == arch302 and r["kernel"] == kernel303 and r["compiler"] == compiler304 and r["cflags_name"] == cflags_name305 ]306 )307 if row:308 row = row[0]309 else:310 row = {311 "arch": arch,312 "kernel": kernel,313 "compiler": compiler,314 "cflags_name": cflags_name,315 "element_size": 8,316 }317 data.append(row)318 # Build319 print("build", end="", flush=True)320 asm_path, exec_path, overwrite = build_kernel(321 kernel,322 arch,323 compiler,324 cflags,325 cflags_name,326 dontbuild=not islocal,327 )328 if overwrite:329 # clear all measurment information330 row["best_length"] = None331 row["best_runtime"] = None332 row["L2_traffic"] = None333 row["allruns"] = None334 row["perfevents"] = None335 # Mark for IACA, OSACA and LLVM-MCA336 print("mark", end="", flush=True)337 try:338 (339 marked_asmfile,340 marked_objfile,341 row["pointer_increment"],342 overwrite,343 ) = mark(344 asm_path,345 compiler,346 cflags,347 isa=ainfo["isa"],348 overwrite=overwrite,349 )350 row["marking_error"] = None351 except ValueError as e:352 row["marking_error"] = str(e)353 print(":", e)354 continue355 if overwrite:356 # clear all model generated information357 for model in ["IACA", "OSACA", "LLVM-MCA", "Ithemal"]:358 for k in [359 "ports",360 "prediction",361 "throughput",362 "cp",363 "lcd",364 "raw",365 ]:366 row[model + "_" + k] = None367 for model in ["IACA", "OSACA", "LLVM-MCA", "Ithemal"]:368 for k in [369 "ports",370 "prediction",371 "throughput",372 "cp",373 "lcd",374 "raw",375 ]:376 if model + "_" + k not in row:377 row[model + "_" + k] = None378 # Analyze with IACA, if requested and configured379 if iaca and ainfo["IACA"] is not None:380 print("IACA", end="", flush=True)381 if not row.get("IACA_ports"):382 row["IACA_raw"] = iaca_analyse_instrumented_binary(383 marked_objfile, micro_architecture=ainfo["IACA"]384 )385 row["IACA_ports"] = {386 k: v / (row["pointer_increment"] / row["element_size"])387 for k, v in row["IACA_raw"]["port cycles"].items()388 }389 row["IACA_prediction"] = row["IACA_raw"]["throughput"] / (390 row["pointer_increment"] / row["element_size"]391 )392 row["IACA_throughput"] = max(row["IACA_ports"].values())393 print(". ", end="", flush=True)394 else:395 print("! ", end="", flush=True)396 # Analyze with OSACA, if requested397 if osaca:398 print("OSACA", end="", flush=True)399 if not row.get("OSACA_ports"):400 row["OSACA_raw"] = osaca_analyse_instrumented_assembly(401 marked_asmfile,402 micro_architecture=ainfo["OSACA"],403 assign_optimal_throughput=ainfo.get(404 "assign_optimal_throughput", True405 ),406 )407 row["OSACA_ports"] = {408 k: v / (row["pointer_increment"] / row["element_size"])409 for k, v in row["OSACA_raw"]["port cycles"].items()410 }411 row["OSACA_prediction"] = row["OSACA_raw"]["throughput"] / (412 row["pointer_increment"] / row["element_size"]413 )414 row["OSACA_throughput"] = max(row["OSACA_ports"].values())415 row["OSACA_cp"] = row["OSACA_raw"]["cp_latency"] / (416 row["pointer_increment"] / row["element_size"]417 )418 row["OSACA_lcd"] = row["OSACA_raw"]["lcd"] / (419 row["pointer_increment"] / row["element_size"]420 )421 print(". ", end="", flush=True)422 else:423 print("! ", end="", flush=True)424 # Analyze with LLVM-MCA, if requested and configured425 if llvm_mca and ainfo["LLVM-MCA"] is not None:426 print("LLVM-MCA", end="", flush=True)427 if not row.get("LLVM-MCA_ports"):428 row["LLVM-MCA_raw"] = llvm_mca_analyse_instrumented_assembly(429 marked_asmfile,430 micro_architecture=ainfo["LLVM-MCA"],431 isa=ainfo["isa"],432 )433 row["LLVM-MCA_ports"] = {434 k: v / (row["pointer_increment"] / row["element_size"])435 for k, v in row["LLVM-MCA_raw"]["port cycles"].items()436 }437 row["LLVM-MCA_prediction"] = row["LLVM-MCA_raw"]["throughput"] / (438 row["pointer_increment"] / row["element_size"]439 )440 row["LLVM-MCA_throughput"] = max(row["LLVM-MCA_ports"].values())441 row["LLVM-MCA_cp"] = row["LLVM-MCA_raw"]["cp_latency"] / (442 row["pointer_increment"] / row["element_size"]443 )444 row["LLVM-MCA_lcd"] = row["LLVM-MCA_raw"]["lcd"] / (445 row["pointer_increment"] / row["element_size"]446 )447 print(". ", end="", flush=True)448 else:449 print("! ", end="", flush=True)450 # Analyze with Ithemal, if not running local and configured451 if ainfo["Ithemal"] is not None and not islocal:452 print("Ithemal", end="", flush=True)453 if not row.get("Ithemal_prediction"):454 with open(marked_asmfile) as f:455 parsed_code = parse_asm(f.read(), ainfo["isa"])456 kernel = reduce_to_section(parsed_code, ainfo["isa"])457 row["Ithemal_prediction"] = get_ithemal_prediction(458 get_intel_style_code(marked_objfile),459 model=ainfo["Ithemal"],460 )461 print(". ", end="", flush=True)462 else:463 print("! ", end="", flush=True)464 if measurements and islocal:465 # run measurements if on same hardware466 print("scale", end="", flush=True)467 if not row.get("allruns"):468 # find best length with concurrent L2 measurement469 scaling_runs, best = scalingrun(exec_path)470 row["best_length"] = best[0]471 row["best_runtime"] = best[2]472 row["L2_traffic"] = best[3]473 row["allruns"] = scaling_runs474 print(f"({best[0]}). ", end="", flush=True)475 else:476 print(477 f"({row.get('best_length', None)})! ",478 end="",479 flush=True,480 )481 print()482 # dump to file483 if data != data_lastsaved:484 print("saving... ", end="", flush=True)485 with data_path.open("wb") as f:486 try:487 pickle.dump(data, f)488 data_lastsaved = deepcopy(data)489 print("saved!")490 except KeyboardInterrupt:491 f.seek(0)492 pickle.dump(data, f)493 print("saved!")494 sys.exit()495def scalingrun(kernel_exec, total_iterations=25000000, lengths=range(8, 1 * 1024 + 1)):496 # print('{:>8} {:>10} {:>10}'.format("x", "cy/it", "L2 B/it"))497 parameters = chain(*[[total_iterations // i, i] for i in lengths])498 # TODO use arch specific events and grooup499 r, o = perfctr(chain([kernel_exec], map(str, parameters)), 1, group="L2")500 global_infos = {}501 for m in [re.match(r"(:?([a-z_\-0-9]+):)?([a-z]+): ([a-z\_\-0-9]+)", line) for line in o]:502 if m is not None:503 try:504 v = int(m.group(4))505 except ValueError:506 v = m.group(4)507 if m.group(1) is None:508 global_infos[m.group(3)] = v509 else:510 r[m.group(2)][m.group(3)] = v511 results = []512 best = (float("inf"), None)513 for markername, mmetrics in r.items():514 kernelname, repetitions, *_, xlength = markername.split("_")515 repetitions = int(repetitions)516 xlength = int(xlength)517 total_iterations = mmetrics["repetitions"] * mmetrics["iterations"]518 if "Clock [MHz]" in mmetrics:519 clock_hz = mmetrics["Clock [MHz]"] * 1e6520 else:521 clock_hz = arch_info[get_current_arch()]["Clock [MHz]"] * 1e6522 cyperit = mmetrics["Runtime (RDTSC) [s]"] * clock_hz / total_iterations523 # TODO use arch specific events and grooup524 if "L2D load data volume [GBytes]" in mmetrics:525 l2perit = (526 (527 mmetrics["L2D load data volume [GBytes]"]528 + mmetrics.get("L2D evict data volume [GBytes]", 0)529 )530 * 1e9531 / total_iterations532 )533 else:534 l2perit = (535 mmetrics[arch_info[get_current_arch()]["L2_volume_metric"]]536 * 1e9537 / total_iterations538 )539 results.append((xlength, repetitions, cyperit, l2perit))540 if cyperit < best[0]:541 best = cyperit, results[-1]542 return results, best[1]543def mark(asm_path, compiler, cflags, isa, overwrite=False):544 # Mark assembly for IACA, OSACA and LLVM-MCA545 marked_asm_path = Path(asm_path).with_suffix(".marked.s")546 if not marked_asm_path.exists() or overwrite:547 overwrite = True548 with open(asm_path) as fa, open(marked_asm_path, "w") as fm:549 try:550 _, pointer_increment = asm_instrumentation(fa, fm, isa=isa)551 except KeyboardInterrupt:552 fm.close()553 marked_asm_path.unlink()554 print(". ", end="", flush=True)555 else:556 # use maked assembly and extract asm_block and pointer_increment557 with open(marked_asm_path) as f:558 marked_asm = f.read()559 m = re.search(r"pointer_increment=([0-9]+)", marked_asm)560 if m:561 pointer_increment = int(m.group(1))562 else:563 os.unlink(marked_asm_path)564 raise ValueError(565 "Could not find `pointer_increment=<byte increment>`. Plase place into file."566 )567 print("! ", end="", flush=True)568 # Compile marked assembly to object for IACA569 marked_obj = Path(asm_path).with_suffix(".marked.o")570 if not marked_obj.exists():571 check_call([compiler] + ["-c", str(marked_asm_path), "-o", str(marked_obj)])572 return str(marked_asm_path), str(marked_obj), pointer_increment, overwrite573def build_kernel(574 kernel,575 architecture,576 compiler,577 cflags,578 cflags_name,579 overwrite=False,580 dontbuild=False,581):582 build_path = f"build/{architecture}/{compiler}/{cflags_name}"583 kernel_assembly = f"{build_path}/{kernel}.s"584 kernel_object = f"{build_path}/{kernel}.o"585 executable = f"{build_path}/{kernel}"586 Path(build_path).mkdir(parents=True, exist_ok=True)587 if not overwrite:588 # Overwrite if any kernel specific file is missing589 overwrite = (590 not os.path.exists(kernel_object)591 or not os.path.exists(kernel_assembly)592 or not os.path.exists(executable)593 )594 if dontbuild and overwrite:595 raise ValueError("Must build, but not allowed.")596 if not Path(f"{build_path}/dummy.o").exists():597 check_call([compiler] + cflags + ["-c", "kernels/dummy.c", "-o", f"{build_path}/dummy.o"])598 if not Path(f"{build_path}/compiler_version").exists():599 # Document compiler version600 with open(f"{build_path}/compiler_version", "w") as f:601 f.write(check_output([compiler, "-v"], encoding="utf8", stderr=STDOUT))602 if overwrite:603 # build object + assembly604 check_call([compiler] + cflags + ["-c", f"kernels/{kernel}.c", "-o", kernel_object])605 check_call(606 [compiler] + cflags + ["-c", f"kernels/{kernel}.c", "-S", "-o", kernel_assembly]607 )608 # build main and link executable609 executable_cflags = [610 os.environ["LIKWID_DEFINES"],611 os.environ["LIKWID_INC"],612 os.environ["LIKWID_LIB"],613 ] + ["-Ofast"]614 check_call(615 [compiler]616 + executable_cflags617 + [618 f"{build_path}/dummy.o",619 kernel_object,620 "-DMAIN",621 f"kernels/{kernel}.c",622 "-llikwid",623 "-o",624 executable,625 ]626 )627 print(". ", end="", flush=True)628 else:629 print("! ", end="", flush=True)630 return kernel_assembly, executable, overwrite631def perfctr(cmd, cores, group="MEM", code_markers=True, verbose=0):632 """633 Run *cmd* with likwid-perfctr and returns result as dict.634 *group* may be a performance group known to likwid-perfctr or an event string.635 if CLI argument cores > 1, running with multi-core, otherwise single-core636 """637 # Making sure likwid-perfctr is available:638 if benchmark.find_executable("likwid-perfctr") is None:639 print(640 "likwid-perfctr was not found. Make sure likwid is installed and found in PATH.",641 file=sys.stderr,642 )643 sys.exit(1)644 # FIXME currently only single core measurements support!645 perf_cmd = ["likwid-perfctr", "-f", "-O", "-g", group]646 cpu = "S0:0"647 if cores > 1:648 cpu += "-" + str(cores - 1)649 # Pinned and measured on cpu650 perf_cmd += ["-C", cpu]651 # code must be marked using likwid markers652 perf_cmd.append("-m")653 perf_cmd += cmd654 if verbose > 1:655 print(" ".join(perf_cmd))656 try:657 with benchmark.fix_env_variable("OMP_NUM_THREADS", None):658 output = check_output(perf_cmd).decode("utf-8").split("\n")659 except CalledProcessError as e:660 print("Executing benchmark failed: {!s}".format(e), file=sys.stderr)661 sys.exit(1)662 # TODO multicore output is different and needs to be considered here!663 results = {}664 cur_region_name = None665 cur_region_data = {}666 for line in output:667 if line == "STRUCT,Info,3" and cur_region_name is not None:668 results[cur_region_name] = cur_region_data669 cur_region_name = None670 cur_region_data = {}671 m = re.match(r"TABLE,Region ([a-z\-0-9_]+),", line)672 if m:673 cur_region_name = m.group(1)674 line = line.split(",")675 try:676 # Metrics677 cur_region_data[line[0]] = float(line[1])678 continue679 except ValueError:680 # Would not convert to float681 pass682 except IndexError:683 # Not a parable line (did not contain any commas)684 continue685 try:686 # Event counters687 if line[2] == "-" or line[2] == "nan":688 counter_value = 0689 else:690 counter_value = int(line[2])691 if re.fullmatch(r"[A-Z0-9_]+", line[0]) and re.fullmatch(692 r"[A-Z0-9]+(:[A-Z0-9]+=[0-9A-Fa-fx]+)*", line[1]693 ):694 cur_region_data.setdefault(line[0], {})695 cur_region_data[line[0]][line[1]] = counter_value696 continue697 except (IndexError, ValueError):698 pass699 if line[0].endswith(":") and len(line) == 3 and line[2] == "":700 # CPU information strings701 cur_region_data[line[0]] = line[1]702 continue703 results[cur_region_name] = cur_region_data704 return results, output705def remove_html_tags(text):706 return re.sub("<.*?>", "", text)707def get_intel_style_code(marked_objfile):708 # Disassembl with Intel syntax709 cmd = (710 "objdump -d --demangle --no-leading-addr --no-leading-headers --no-show-raw-insn "711 "--x86-asm-syntax=intel"712 ).split(" ") + [marked_objfile]713 asm_raw = check_output(cmd).decode()714 asm_raw = "\n".join([line.strip() for line in asm_raw.split("\n")])715 kernel_raw = asm_raw[716 asm_raw.index("mov\tebx, 111\nnop")717 + len("mov\tebx, 111\nnop") : asm_raw.index("mov\tebx, 222\nnop")718 ]719 kernel_lines = kernel_raw.split("\n")720 # Ignore label and jump721 return "\n".join(kernel_lines[:-2])722def get_ithemal_prediction(code, model="skl"):723 url = "http://3.18.198.23/predict"724 assert model in ["skl", "hsw", "ivb"]725 r = requests.post(url, {"code": code, "model": model})726 raw_text = remove_html_tags(r.text)727 m = re.search("Could not generate a prediction: (.*)", raw_text)728 if m:729 print(" error:", m.group(1).strip(), end=" ")730 return float("nan")731 m = re.search("Prediction: ([0-9.]+) cycles per iteration", raw_text)732 if m:733 return float(m.group(1))734 else:735 return float("nan")736def main():737 # Check for correct LLVM-MCA version738 try:739 llvm_mca = "LLVM version 12.0.0" in check_output(["llvm-mca", "-version"]).decode()740 except FileNotFoundError:741 llvm_mca = False742 build_mark_run_all_kernels(measurements="--no-measurements" not in sys.argv, llvm_mca=llvm_mca)743 sys.exit()744if __name__ == "__main__":...

Full Screen

Full Screen

main.py

Source:main.py Github

copy

Full Screen

1'''2some classes3'''4import json5from collections import deque6class DemoEntityLinking(object):7 def __init__(self, span, input=None, result=None):8 self.raw = input9 self.beg, self.end = span10 if result:11 self.entities = result['entities']12 self.entity_idxs = result['idx']13 self.entity_types = result['type']14 self.entity_spans = result['spans']15 self.__bad_entity = -116 self.statistics = {}17 def decorate_entity(self, html_id, idx, entity_text):18 eidx = self.entity_idxs[idx]19 e_type = self.entity_types[idx].lower()20 html_id = 'e_%d' % html_id21 is_new_entity = self._is_new_entity(idx)22 # if e_type == 'per':23 # print 'hehe:', is_new_entity24 # new, total = self.statistics.get(e_type, (0, 0))25 sup = eidx26 if is_new_entity:27 # new += 128 if eidx == -1:29 sup = ''30 e_class = 'marked_span newe'31 else:32 e_class = 'marked_span olde'33 # total += 134 # self.statistics.update({e_type: (new, total)})35 decorated_text = '<span id="%s" class="%s et_%s">%s<sup>%s</sup></span>' \36 % (html_id, e_class, e_type, entity_text, sup)37 return decorated_text38 def _format_desc(self, desc, name, id, type):39 if not name or not id:40 return ('<span class="%s">%s'41 '[<b>%s</b>]</span>') % (type.lower(), desc, type)42 return ('<span class="%s">%s'43 '[<i>%s</i> (%s), <b>%s</b>]. </span>') % (type.lower(), desc, name, id, type)44 def build_entity_infobox(self, html_id, eidx):45 box_id = 'bub_%d' % html_id46 infobox = '<div id="%s" class="bub_div">' % box_id47 # build mesh link48 hint = 'Click to edit.'49 chebi_desc_info = ''50 mesh_desc_info = ''51 wiki_desc_info = ''52 # chebi_syno_info = self._build_list_group([hint])53 # mesh_syno_info = self._build_list_group([hint])54 # wiki_link_info = self._build_list_group([hint])55 chebi_syno_info = ''56 mesh_syno_info = ''57 wiki_link_info = ''58 tree_info = {'mesh': [], 'wiki': []}59 tree_info = json.dumps(tree_info)60 if eidx != self.__bad_entity:61 entity = self.entities[str(eidx)]62 mesh_id = entity.get('mesh-id', None)63 chebi_id = entity.get('chebi-id', None)64 if chebi_id:65 chebi_name = entity['chebi-name']66 chebi_desc = entity['chebi-description']67 chebi_desc_info = self._format_desc(chebi_desc, chebi_name, chebi_id, 'ChEBI')68 chebi_syno_info = self._build_list_group(entity['chebi-synonyms'])69 if mesh_id:70 mesh_name = entity['mesh-name']71 mesh_desc = entity['mesh-description']72 mesh_desc_info = self._format_desc(mesh_desc, mesh_name, mesh_id, 'MeSH')73 if entity['mesh-synonyms']:74 mesh_syno_info = self._build_list_group(entity['mesh-synonyms'])75 # building tree76 mesh_tn = entity['mesh-tn']77 tree_info = []78 for tn in mesh_tn:79 mesh_parents = entity['mesh-parents'][tn]80 leaf = [[mesh_id, mesh_name, tn]]81 subtree = self._build_tree(leaf, mesh_parents)82 tree_info += subtree83 # leafs.append([mesh_id, mesh_name, tn])84 # entity_node = [[mesh_id, mesh_name, mesh_tn]]85 # tree_info = self._build_tree(leafs, mesh_parents)86 # extra tree information (from wikipedia is-a rule)87 extra_parents = entity.get('extra-parent', [])88 extra_trees = []89 if extra_parents:90 for e_parents in extra_parents:91 if e_parents:92 extra_tree = self._build_tree([], e_parents)93 head = extra_tree[0]['text']94 extra_tree[0].update({'text': head})95 extra_trees += extra_tree96 # tree_info += extra_trees97 tree_info = {'mesh': tree_info, 'wiki': extra_trees}98 tree_info = json.dumps(tree_info)99 # build wikipedia link100 # print eidx101 wiki_id = entity.get('wid', None)102 if wiki_id:103 wiki_title = entity.get('wiki-title', None)104 if wiki_title:105 wiki_url = 'https://en.wikipedia.org/w/index.php?curid=%s' % wiki_id106 wiki_desc = entity.get('wiki-text', None)107 wiki_name = '<a href="%s"><i>%s</i></a>' % (wiki_url, wiki_title)108 wiki_desc_info = self._format_desc(wiki_desc, wiki_name, wiki_id, 'Wikipedia')109 wiki_link_list = []110 for l in entity['wiki-links']:111 s = '_'.join(l.split())112 wiki_link_list.append('https://en.wikipedia.org/wiki/%s' % s)113 wiki_link_info = self._build_list_group(entity['wiki-links'], wiki_link_list)114 # wiki_link_info = '<div id="wiki">%s</div>' % wiki_link_info115 # syno_info = '<div id="mesh">%s</div>' % syno_info116 mesh_syno_info = '<div id="mesh">%s</div>' % mesh_syno_info117 chebi_syno_info = '<div id="chebi">%s</div>' % chebi_syno_info118 wiki_link_info = '<div id="wiki">%s</div>' % wiki_link_info119 all_desc_info = '%s%s%s' % (mesh_desc_info, chebi_desc_info, wiki_desc_info)120 if all_desc_info == '':121 all_desc_info = '<span>Click to edit</span>'122 all_desc_info = ('<div id="edesc"><p class="editable" data-type="textarea">'123 '%s</p><hr></div>') % (all_desc_info,)124 all_syno_info = '<div id="esyno">%s%s%s</div>' % (mesh_syno_info, chebi_syno_info, wiki_link_info)125 tree_info = '<div id="etree">%s<hr></div>' % tree_info126 infobox += all_desc_info + all_syno_info + tree_info + '</div>'127 return infobox128 def _build_list_group(self, text, link=None):129 links = ''130 for i, t in enumerate(text):131 if not link:132 href = '<li class="list-group-item">%s</li>' % t133 else:134 href = '<a class="list-group-item" href="%s">%s</a>' % (link[i], t)135 links += href136 if not link:137 list_group = '<ul class="list-group">%s</ul>' % links138 else:139 list_group = '<div class="list-group">%s</div>' % links140 return list_group141 def _build_tree(self, leafs, nodes):142 # nodes.append(entity)143 all_nodes = nodes + leafs144 fake_tree = {}145 tree_nodes = {}146 for node in all_nodes:147 fake_tree.update({node[2]: []})148 real_node = {'text': node[1],149 'href': '#%s (%s)' % (node[2], node[0]),150 'nodes': [] }151 if node in leafs:152 real_node = {'text': node[1],153 'href': '#%s (%s)' % (node[2], node[0]),154 'color': '#D9534F'}155 tree_nodes.update({node[2]: real_node})156 roots = []157 for node in all_nodes:158 steps = node[2].split('.')159 if len(steps) == 1:160 roots.append(node[2])161 continue162 parent_key = '.'.join(steps[0:-1])163 children = fake_tree.get(parent_key, [])164 children.append(node[2])165 leafs = deque()166 for root, children in fake_tree.items():167 if not len(children):168 leafs.append(root)169 while len(leafs):170 leaf = leafs.popleft()171 steps = leaf.split('.')172 if len(steps) == 1:173 continue174 parent_key = '.'.join(steps[0:-1])175 leaf_node = tree_nodes[leaf]176 children = tree_nodes[parent_key]['nodes']177 children.append(leaf_node)178 tree_nodes[parent_key].update({'nodes': children})179 fake_tree[parent_key].remove(leaf)180 if not len(fake_tree[parent_key]):181 leafs.append(parent_key)182 tree_info = []183 for root in roots:184 tree_nodes[root].update({'color': '#489cdf'})185 tree_info.append(tree_nodes[root])186 return tree_info187 def _is_span_inpage(self, w_beg, w_end):188 if w_beg >= self.beg and w_beg < self.end:189 return True190 return False191 def _is_new_entity(self, i):192 if self.entity_idxs[i] == self.__bad_entity:193 return True194 else:195 eidx = str(self.entity_idxs[i])196 mesh_id = self.entities[eidx].get('mesh-id', None)197 chebi_id = self.entities[eidx].get('chebi-id', None)198 has_mesh_link = False199 has_chebi_link = False200 if mesh_id: has_mesh_link = True201 if chebi_id: has_chebi_link = True202 if not has_mesh_link and not has_chebi_link:203 return True204 return False205 def do_stat(self):206 total_len = len(self.entity_idxs)207 for i in xrange(total_len):208 is_new_entity = self._is_new_entity(i)209 e_type = self.entity_types[i].lower()210 new, total = self.statistics.get(e_type, (0, 0))211 if is_new_entity: new += 1212 total += 1213 self.statistics.update({e_type: (new, total)})214 return self.statistics215 def do_demo(self):216 demo_text = ''217 pointer = 0218 html_id = 0219 for i, span in enumerate(self.entity_spans):220 if not self._is_span_inpage(*span):221 continue222 # filter...223 if self.entity_types[i].lower() == 'gpe':224 continue225 # is_new_entity = self._is_new_entity(i)226 entity_beg = span[0] - self.beg227 entity_end = span[1] - self.beg228 demo_text += self.raw[pointer : entity_beg]229 entity_text = self.raw[entity_beg : entity_end + 1]230 # if span[0] == 4120:231 # print 'from main:', entity_text232 decorated_text = self.decorate_entity(html_id, i,233 entity_text)234 demo_text += decorated_text235 infobox_text = self.build_entity_infobox(html_id, self.entity_idxs[i])236 if isinstance(infobox_text, str):237 infobox_text = infobox_text.decode('utf-8')238 demo_text += infobox_text239 pointer = entity_end + 1240 html_id += 1241 demo_text += self.raw[pointer:]242 # html_id = 0243 # for eidx, span in zip(self.entity_idxs, self.entity_spans):244 # if not self._is_span_inpage(*span):245 # continue246 # is_new_entity = self._is_new_entity()247 # infobox_text = self.build_entity_infobox(html_id, str(eidx))248 # # print type(infobox_text)249 # demo_text += infobox_text.encode('utf-8')250 # html_id += 1251 return demo_text252class DemoRelatinDiscovery(object):253 def __init__(self, span, input=None, result=None):254 self.beg, self.end = span255 self.raw = input256 self.sf_sents = result257 def _is_span_inpage(self, w_beg, w_end):258 if w_beg >= self.beg and w_end < self.end:259 return True260 return False261 def decorate_sent(self, idx, spans, sent_text):262 subj_span, obj_span = spans263 sx, sy = subj_span264 ox, oy = obj_span265 # print subj_span, obj_span266 subj_text = sent_text[sx: sy]267 obj_text = sent_text[ox: oy]268 subj_text = '<span id="subj_%s" class="%s">%s</span>' % (idx, 'marked_subj', subj_text)269 obj_text = '<span id="obj_%s" class="%s">%s</span>' % (idx, 'marked_obj', obj_text)270 if sx < ox:271 demo_sent = sent_text[:sx]272 demo_sent += subj_text + sent_text[sy:ox]273 demo_sent += obj_text + sent_text[oy:]274 else:275 demo_sent = sent_text[:ox]276 demo_sent += subj_text + sent_text[oy:sx]277 demo_sent += obj_text + sent_text[sy:]278 demo_sent = '<span id="sf_sent_%s" class="%s">%s</span>' % (idx, 'marked_sf_sent', demo_sent)279 return demo_sent280 def _correct_span(self, span):281 return (span[0] - self.beg, span[1] - self.beg)282 def do_demo(self):283 demo_sf_text = ''284 pointer = 0285 idx = 0286 for sent in self.sf_sents:287 sent_span = sent['sentences']288 if not self._is_span_inpage(*sent_span):289 continue290 subj_span = self._correct_span(sent['chemical'])291 obj_span = self._correct_span(sent['disease'])292 sent_span = self._correct_span(sent_span)293 b, e = sent_span294 demo_sf_text += self.raw[pointer: b]295 sent_text = self.raw[b: e]296 # print 'hehe:', sent_text297 xs, ys = subj_span298 xo, yo = obj_span299 subj_span = xs - b, ys - b300 obj_span = xo - b, yo - b301 spans = (subj_span, obj_span)302 d_sent_text = self.decorate_sent(idx,303 spans,304 sent_text)305 demo_sf_text += d_sent_text306 # sf_infobox_text = self.build_sf_infobox(idx, sent)307 pointer = e308 idx += 1309 demo_sf_text += self.raw[pointer:]...

Full Screen

Full Screen

test_object_marker.py

Source:test_object_marker.py Github

copy

Full Screen

...46 def func(self):47 pass48 return Blap.func49@pytest.fixture(params=mark_factories)50def marked_obj(request, mark_name, mark_value):51 returned = request.param(mark_name, mark_value)52 return returned53@pytest.fixture(params=['mark_name'])54def mark_name(request):55 return request.param56@pytest.fixture(params=['mark_value', 1, True, 1.0])57def mark_value(request):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Slash automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful