diff --git a/app/dependencies/__init__.py b/app/dependencies/__init__.py new file mode 100644 index 0000000..8fdda1c --- /dev/null +++ b/app/dependencies/__init__.py @@ -0,0 +1,11 @@ +# app/dependencies/__init__.py + +from datetime import datetime + +# 글로벌 변수 초기화 +globals_dict = {} +model_initializing = False # 모델 초기화 상태를 추적하는 전역 변수 +last_initialization_attempt = None # 마지막 초기화 시도 시간 + +def get_globals_dict(): + return globals_dict \ No newline at end of file diff --git a/app/router/evaluation.py b/app/router/evaluation.py new file mode 100644 index 0000000..edda854 --- /dev/null +++ b/app/router/evaluation.py @@ -0,0 +1,166 @@ +# app/routers/evaluation.py + +from fastapi import APIRouter, Depends, HTTPException +from app.schema.recommendation_schema import RecommendationEvaluationResponse +from app.services.evaluation.evaluator import evaluate_recommendation_model, evaluate_with_cross_validation +from app.services.evaluation.diversity_metrics import evaluate_recommendation_diversity +from app.services.model_trainer.hyperparameter_tuning import optimize_recommendation_parameters +from app.dependencies import get_globals_dict +from app.services.model_trainer import evaluate_model, optimize_recommendation_parameters + +# 각 알고리즘별 결과 평가 +from app.services.model_trainer.recommenation.basic import generate_recommendations +from app.services.model_trainer.recommenation.hybrid import generate_hybrid_recommendations + +import json +import logging +import numpy as np + +router = APIRouter( + prefix="/evaluate", + tags=["evaluation"], + responses={404: {"description": "Not found"}}, +) + +logger = logging.getLogger(__name__) + +@router.get("/basic", response_model=RecommendationEvaluationResponse) +async def evaluate_basic(globals_dict=Depends(get_globals_dict)): + """기본 추천 시스템 평가 수행""" + try: + metrics = evaluate_recommendation_model(globals_dict) + + # 추가 다양성 지표 계산 + df_model = globals_dict.get("df_model") + + if "test_interactions" in globals_dict and df_model is not None: + test_interactions = globals_dict.get("test_interactions") + + # 추천 결과를 사용자-식당 매핑 딕셔너리로 변환 + recommendations_dict = {} + for user_id, metrics in globals_dict.get("recommendations_results", {}).items(): + recommendations_dict[user_id] = [r['restaurant_id'] for r in metrics.get('recommendations', [])] + + diversity_metrics = evaluate_recommendation_diversity( + recommendations_dict, + user_history=test_interactions, + restaurant_data=df_model + ) + + # 기본 지표와 다양성 지표 병합 + metrics.update(diversity_metrics) + + return {"metrics": metrics, "status": "success"} + + except Exception as e: + logger.error(f"평가 중 오류 발생: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get("/cross-validation", response_model=RecommendationEvaluationResponse) +async def evaluate_with_cross_val(n_splits: int = 5, globals_dict=Depends(get_globals_dict)): + """교차 검증 평가 수행""" + try: + metrics = evaluate_with_cross_validation(globals_dict, n_splits=n_splits) + return {"metrics": metrics, "status": "success"} + + except Exception as e: + logger.error(f"교차 검증 평가 중 오류 발생: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post("/optimize-params", response_model=dict) +async def optimize_parameters(n_trials: int = 30, timeout: int = 300, globals_dict=Depends(get_globals_dict)): + """추천 시스템 파라미터 최적화""" + try: + df_model = globals_dict.get("df_model") + user_features_df = globals_dict.get("user_features_df") + model_features = globals_dict.get("model_features") + + if df_model is None or user_features_df is None or model_features is None: + raise HTTPException(status_code=400, detail="필요한 데이터가 로드되지 않았습니다.") + + best_params = optimize_recommendation_parameters( + df_model, + user_features_df, + model_features, + n_trials=n_trials, + timeout=timeout + ) + + return {"best_parameters": best_params, "status": "success"} + + except Exception as e: + logger.error(f"파라미터 최적화 중 오류 발생: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get("/compare-algorithms", response_model=dict) +async def compare_algorithms(globals_dict=Depends(get_globals_dict)): + """서로 다른 추천 알고리즘 비교""" + try: + df_model = globals_dict.get("df_model") + user_features_df = globals_dict.get("user_features_df") + + if df_model is None or user_features_df is None: + raise HTTPException(status_code=400, detail="필요한 데이터가 로드되지 않았습니다.") + + # 샘플 사용자 선택 + sample_users = df_model['user_id'].sample(min(20, df_model['user_id'].nunique())).unique() + + basic_metrics = {} + hybrid_metrics = {} + + for user_id in sample_users: + # 기본 추천 알고리즘 평가 + stacking_reg = globals_dict.get("stacking_reg") + scaler = globals_dict.get("scaler") + model_features = globals_dict.get("model_features") + + if all([stacking_reg, scaler, model_features]): + # 기본 추천 생성 + basic_result = json.loads(generate_recommendations( + df_model.copy(), + stacking_reg, + model_features, + user_id, + scaler, + user_features=user_features_df + )) + + # 하이브리드 추천 생성 + hybrid_result = generate_hybrid_recommendations( + df_model.copy(), + df_model.copy(), + user_id, + n=15, + alpha=0.7 + ) + + # 평가 지표 계산 및 저장 (여기서는 간소화를 위해 추천된 식당 수만 계산) + basic_metrics[user_id] = { + 'num_recommendations': len(basic_result.get('recommendations', [])), + 'is_new_user': basic_result.get('is_new_user', True) + } + + hybrid_metrics[user_id] = { + 'num_recommendations': len(hybrid_result.get('recommendations', [])), + 'is_new_user': hybrid_result.get('is_new_user', True) + } + + # 결과 정리 + result = { + 'basic_algorithm': { + 'avg_recommendations': np.mean([m['num_recommendations'] for m in basic_metrics.values()]), + 'new_user_ratio': np.mean([m['is_new_user'] for m in basic_metrics.values()]), + 'coverage': len(basic_metrics) / len(sample_users) if sample_users.size > 0 else 0 + }, + 'hybrid_algorithm': { + 'avg_recommendations': np.mean([m['num_recommendations'] for m in hybrid_metrics.values()]), + 'new_user_ratio': np.mean([m['is_new_user'] for m in hybrid_metrics.values()]), + 'coverage': len(hybrid_metrics) / len(sample_users) if sample_users.size > 0 else 0 + } + } + + return {"comparison_results": result, "status": "success"} + + except Exception as e: + logger.error(f"알고리즘 비교 중 오류 발생: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/app/router/recommendation_api.py b/app/router/recommendation_api.py index 268c878..6536030 100644 --- a/app/router/recommendation_api.py +++ b/app/router/recommendation_api.py @@ -11,8 +11,11 @@ from app.services.preprocess.restaurant.data_loader import load_restaurant_json_files, load_user_json_files from app.services.preprocess.restaurant.preprocessor import preprocess_data from app.services.model_trainer import train_model -from app.services.model_trainer.recommendation import generate_recommendations +from app.services.model_trainer.recommenation.basic import generate_recommendations from app.services.preprocess.user.user_preprocess import user_preprocess_data # 사용자 데이터 전처리 모듈 추가 +from app.services.evaluation.evaluator import evaluate_recommendation_model +from app.services.model_trainer import train_model, optimize_recommendation_parameters +from app.dependencies import globals_dict, model_initializing, last_initialization_attempt from typing import List, Dict, Any from datetime import datetime @@ -20,11 +23,6 @@ logger = logging.getLogger("recommendation_api") router = APIRouter() -# 글로벌 변수 초기화 -globals_dict = {} -model_initializing = False # 모델 초기화 상태를 추적하는 전역 변수 -last_initialization_attempt = None # 마지막 초기화 시도 시간 - # 초기 데이터 로딩 및 모델 학습 def initialize_model(force=False): global globals_dict, model_initializing, last_initialization_attempt @@ -173,6 +171,32 @@ async def check_model_status(): return status +# 평가 지표 확인 엔드포인트 추가 +@router.get("/evaluate", response_model=Dict[str, Any]) +async def evaluate_model(): + """현재 추천 모델의 성능 지표를 계산합니다.""" + global globals_dict, model_initializing + + try: + # 모델 초기화 상태 확인 + if not globals_dict or "stacking_reg" not in globals_dict or "df_model" not in globals_dict: + raise HTTPException( + status_code=503, + detail="모델이 초기화되지 않았습니다. 먼저 모델을 초기화하세요.", + headers={"Retry-After": "30"} + ) + + # 모델 평가 + metrics = evaluate_recommendation_model(globals_dict) + + return metrics + + except HTTPException: + raise + except Exception as e: + logger.error(f"모델 평가 중 오류 발생: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + # recommend 함수 내부 수정 @router.post("", response_model=Dict[str, Any], diff --git a/app/schema/recommendation_schema.py b/app/schema/recommendation_schema.py index 4983d21..6734de1 100644 --- a/app/schema/recommendation_schema.py +++ b/app/schema/recommendation_schema.py @@ -1,7 +1,7 @@ # app/models/recommendation_schema.py from pydantic import BaseModel, Field -from typing import List, Annotated +from typing import Dict, Any, List, Optional, Annotated class RecommendationItem(BaseModel): category_id: int @@ -30,3 +30,15 @@ class UserData(BaseModel): user_id: str # 선호 카테고리는 최소 1개, 최대 3개 (Pydantic v2 방식) preferred_categories: Annotated[List[str], Field(min_length=1, max_length=3)] + +class RecommendationEvaluationResponse(BaseModel): + metrics: Dict[str, Any] + status: str + +class HyperparameterOptimizationRequest(BaseModel): + n_trials: Optional[int] = 30 + timeout: Optional[int] = 300 + +class HyperparameterOptimizationResponse(BaseModel): + best_parameters: Dict[str, Any] + status: str \ No newline at end of file diff --git a/app/services/evaluation/__init__.py b/app/services/evaluation/__init__.py new file mode 100644 index 0000000..7f50127 --- /dev/null +++ b/app/services/evaluation/__init__.py @@ -0,0 +1,19 @@ +# app/services/evaluation/__init__.py + +from app.services.evaluation.evaluator import ( + evaluate_recommendation_model, + evaluate_with_cross_validation +) +from app.services.evaluation.data_generation import ( + create_test_interactions, + create_stratified_train_test_split +) +from app.services.evaluation.utils import default_empty_metrics + +__all__ = [ + 'evaluate_recommendation_model', + 'evaluate_with_cross_validation', + 'create_test_interactions', + 'create_stratified_train_test_split', + 'default_empty_metrics' +] \ No newline at end of file diff --git a/app/services/evaluation/data_generation.py b/app/services/evaluation/data_generation.py new file mode 100644 index 0000000..1a0c194 --- /dev/null +++ b/app/services/evaluation/data_generation.py @@ -0,0 +1,220 @@ +# app/services/evaluation/data_generation.py + +import logging +import numpy as np +import pandas as pd + +logger = logging.getLogger(__name__) + +def create_test_interactions(globals_dict): + """ + 더 현실적인 사용자-식당 상호작용 테스트 데이터 생성 + + Args: + globals_dict: 전역 변수 딕셔너리 + + Returns: + pd.DataFrame: 상호작용 테스트 데이터 + """ + try: + df_model = globals_dict.get("df_model") + user_features_df = globals_dict.get("user_features_df") + + if df_model is None or user_features_df is None: + logger.warning("테스트 데이터 생성에 필요한 데이터가 부족합니다.") + return pd.DataFrame(columns=['user_id', 'restaurant_id', 'score']) + + # 모든 사용자와 식당 데이터 가져오기 + all_users = user_features_df['user_id'].tolist() + all_restaurants = df_model['restaurant_id'].unique().tolist() + + # 사용자별 프로필 특성 (있다면) 활용 + user_preferences = {} + + if 'preferred_category' in user_features_df.columns: + for _, user_row in user_features_df.iterrows(): + user_id = user_row['user_id'] + preferred_category = user_row.get('preferred_category') + user_preferences[user_id] = preferred_category + + # 테스트 상호작용 생성 + test_interactions = [] + + for user_id in all_users[:50]: # 테스트를 위해 50명 사용자만 선택 + # 사용자별 특성에 맞는 식당 선택 + user_preferred_category = user_preferences.get(user_id) + + if user_preferred_category: + # 사용자 선호 카테고리가 있으면 해당 카테고리의 식당들 중에서 선택 + category_restaurants = df_model[ + df_model['category_id'] == user_preferred_category + ]['restaurant_id'].tolist() + + if category_restaurants: + # 선호 카테고리 식당 선택 (60%) + preferred_count = max(1, int(np.random.randint(3, 6) * 0.6)) + preferred_restaurants = np.random.choice( + category_restaurants, + size=min(preferred_count, len(category_restaurants)), + replace=False + ) + + # 기타 랜덤 식당 선택 (40%) + other_restaurants = list(set(all_restaurants) - set(category_restaurants)) + other_count = np.random.randint(1, 3) + other_selected = np.random.choice( + other_restaurants, + size=min(other_count, len(other_restaurants)), + replace=False + ) + + selected_restaurants = np.concatenate([preferred_restaurants, other_selected]) + else: + # 선호 카테고리 식당이 없으면 랜덤 선택 + selected_restaurants = np.random.choice( + all_restaurants, + size=np.random.randint(3, 6), + replace=False + ) + else: + # 사용자 선호도 정보가 없으면 랜덤 선택 + selected_restaurants = np.random.choice( + all_restaurants, + size=np.random.randint(3, 6), + replace=False + ) + + # 선택된 식당에 대한 가상 평점 생성 (3.0 ~ 5.0) + for restaurant_id in selected_restaurants: + # 선호 카테고리 식당은 더 높은 평점 확률 + if user_preferred_category and restaurant_id in category_restaurants: + score = np.random.uniform(4.0, 5.0) # 선호 카테고리 식당은 평점 높게 + else: + score = np.random.uniform(3.0, 5.0) # 기타 식당은 일반적인 분포 + + test_interactions.append({ + 'user_id': user_id, + 'restaurant_id': restaurant_id, + 'score': round(score, 1) # 소수점 첫째 자리까지 + }) + + return pd.DataFrame(test_interactions) + + except Exception as e: + logger.error(f"테스트 데이터 생성 중 오류: {e}", exc_info=True) + return pd.DataFrame(columns=['user_id', 'restaurant_id', 'score']) + +def create_stratified_train_test_split(df_model, test_ratio=0.2, random_state=42): + """ + 사용자와 식당 분포를 보존하는 층화 훈련/테스트 분할 + + Args: + df_model (DataFrame): 분할할 데이터프레임 + test_ratio (float): 테스트 데이터 비율 (0~1) + random_state (int): 랜덤 시드 + + Returns: + tuple: (훈련 데이터프레임, 테스트 데이터프레임) + """ + try: + from sklearn.model_selection import GroupShuffleSplit + + logger.debug("층화 데이터 분할 시작...") + + # 필수 컬럼 확인 + required_cols = ['user_id', 'restaurant_id'] + for col in required_cols: + if col not in df_model.columns: + logger.error(f"데이터에 필수 컬럼이 없습니다: {col}") + raise ValueError(f"데이터에 필수 컬럼이 없습니다: {col}") + + # 사용자별 데이터 분할 + users = df_model['user_id'].unique() + logger.info(f"총 {len(users)}명의 사용자 데이터에 대해 분할 수행") + + # 사용자별 활동 수준 파악 (상호작용 횟수 기준) + user_activity = df_model.groupby('user_id').size() + low_activity = user_activity[user_activity <= 5].index.tolist() + medium_activity = user_activity[(user_activity > 5) & (user_activity <= 20)].index.tolist() + high_activity = user_activity[user_activity > 20].index.tolist() + + logger.debug(f"저활동 사용자: {len(low_activity)}명, 중활동: {len(medium_activity)}명, 고활동: {len(high_activity)}명") + + # 각 활동 그룹별로 층화 분할 + train_indices = [] + test_indices = [] + + for activity_group in [low_activity, medium_activity, high_activity]: + if not activity_group: + continue + + # 해당 활동 그룹의 사용자 데이터만 선택 + group_data = df_model[df_model['user_id'].isin(activity_group)] + group_users = group_data['user_id'].unique() + + # 사용자를 훈련/테스트로 분할 + np.random.seed(random_state) + test_size = int(len(group_users) * test_ratio) + test_users = np.random.choice(group_users, size=test_size, replace=False) + train_users = np.array([u for u in group_users if u not in test_users]) + + # 각 사용자별 데이터 인덱스 수집 + for user in train_users: + user_indices = group_data[group_data['user_id'] == user].index.tolist() + + # 활동량이 많은 사용자는 일부 데이터만 훈련에 사용 + if user in high_activity and len(user_indices) > 30: + np.random.shuffle(user_indices) + train_indices.extend(user_indices[:30]) # 최대 30개 상호작용만 사용 + else: + train_indices.extend(user_indices) + + for user in test_users: + user_indices = group_data[group_data['user_id'] == user].index.tolist() + + # 테스트용 사용자도 최대 10개 상호작용만 테스트에 사용 + if len(user_indices) > 10: + np.random.shuffle(user_indices) + test_indices.extend(user_indices[:10]) + else: + test_indices.extend(user_indices) + + # 훈련/테스트 데이터프레임 생성 + train_df = df_model.loc[train_indices].copy() + test_df = df_model.loc[test_indices].copy() + + # 균형 확인 + logger.info(f"훈련 데이터: {len(train_df)} 상호작용, {train_df['user_id'].nunique()} 사용자") + logger.info(f"테스트 데이터: {len(test_df)} 상호작용, {test_df['user_id'].nunique()} 사용자") + + # 점수(score) 컬럼이 없는 경우 가상의 점수 생성 (평가를 위해) + if 'score' not in test_df.columns: + logger.warning("테스트 데이터에 'score' 컬럼이 없어 가상 점수를 생성합니다") + + # 카테고리별 평균 점수 계산 (카테고리 정보가 있는 경우) + if 'category_id' in test_df.columns and 'category_id' in train_df.columns: + category_avg_scores = train_df.groupby('category_id')['score'].mean().to_dict() + default_score = train_df['score'].mean() + + # 카테고리 기반 가상 점수 할당 + test_df['score'] = test_df['category_id'].map( + lambda x: category_avg_scores.get(x, default_score) + ) + + # 약간의 무작위성 추가 (±0.5) + np.random.seed(random_state) + test_df['score'] += np.random.uniform(-0.5, 0.5, size=len(test_df)) + test_df['score'] = test_df['score'].clip(1, 5) # 1~5 범위로 제한 + else: + # 카테고리 정보가 없으면 3~5 사이 무작위 점수 + test_df['score'] = np.random.uniform(3, 5, size=len(test_df)) + + logger.debug("층화 데이터 분할 완료") + return train_df, test_df + + except Exception as e: + logger.error(f"층화 데이터 분할 중 오류 발생: {e}", exc_info=True) + # 기본 분할 방식 적용 + from sklearn.model_selection import train_test_split + train_df, test_df = train_test_split(df_model, test_size=test_ratio, random_state=random_state) + return train_df, test_df \ No newline at end of file diff --git a/app/services/evaluation/diversity_metrics.py b/app/services/evaluation/diversity_metrics.py new file mode 100644 index 0000000..f9ef416 --- /dev/null +++ b/app/services/evaluation/diversity_metrics.py @@ -0,0 +1,201 @@ +# app/services/evaluation/diversity_metrics.py + +import numpy as np +import pandas as pd +import scipy.stats +import logging + +logger = logging.getLogger(__name__) + +def evaluate_recommendation_diversity(recommendations, user_history=None, restaurant_data=None): + """ + 추천 결과의 다양성을 평가하는 함수 + + Args: + recommendations: 사용자별 추천 결과 딕셔너리 {user_id: [restaurant_ids]} + user_history: 사용자 과거 상호작용 데이터 (옵션) + restaurant_data: 식당 메타데이터 (옵션) + + Returns: + dict: 다양성 평가 결과 + """ + try: + diversity_metrics = {} + + # 추천 결과를 DataFrame으로 변환 + recommendations_list = [] + for user_id, rest_ids in recommendations.items(): + for rank, rest_id in enumerate(rest_ids): + recommendations_list.append({ + 'user_id': user_id, + 'restaurant_id': rest_id, + 'rank': rank + 1 + }) + + recs_df = pd.DataFrame(recommendations_list) + if recs_df.empty: + logger.warning("추천 결과가 없어 다양성 평가를 진행할 수 없습니다.") + return { + "category_diversity": 0, + "user_coverage": 0, + "item_coverage": 0, + "novelty": 0, + "serendipity": 0, + "intra_list_similarity": 0 + } + + # 1. 카테고리 다양성 평가 + if restaurant_data is not None and 'category_id' in restaurant_data.columns: + # 식당별 카테고리 매핑 + restaurant_categories = restaurant_data.set_index('restaurant_id')['category_id'].to_dict() + + # 추천된 식당에 카테고리 매핑 + recs_df['category_id'] = recs_df['restaurant_id'].map(restaurant_categories) + + # 카테고리 엔트로피 계산 + if 'category_id' in recs_df.columns and not recs_df['category_id'].isna().all(): + category_counts = recs_df['category_id'].value_counts(normalize=True) + category_entropy = scipy.stats.entropy(category_counts) + category_diversity = 1 - (1 / (1 + category_entropy)) # 0~1 범위로 정규화 + diversity_metrics['category_diversity'] = category_diversity + else: + diversity_metrics['category_diversity'] = 0 + else: + diversity_metrics['category_diversity'] = 0 + + # 2. 사용자 커버리지 (전체 사용자 중 추천을 받은 사용자 비율) + if user_history is not None: + all_users = user_history['user_id'].unique() + recommended_users = set(recs_df['user_id'].unique()) + user_coverage = len(recommended_users) / len(all_users) if len(all_users) > 0 else 0 + diversity_metrics['user_coverage'] = user_coverage + else: + diversity_metrics['user_coverage'] = 1.0 # 사용자 히스토리가 없으면 100%로 가정 + + # 3. 아이템 커버리지 (전체 식당 중 추천된 식당 비율) + if restaurant_data is not None: + all_restaurants = restaurant_data['restaurant_id'].unique() + recommended_restaurants = set(recs_df['restaurant_id'].unique()) + item_coverage = len(recommended_restaurants) / len(all_restaurants) if len(all_restaurants) > 0 else 0 + diversity_metrics['item_coverage'] = item_coverage + else: + diversity_metrics['item_coverage'] = 0 + + # 4. 새로움 (Novelty) - 인기 있는 아이템보다 덜 알려진 아이템 추천 정도 + if user_history is not None and restaurant_data is not None: + # 식당별 인기도 (상호작용 횟수) 계산 + restaurant_popularity = user_history['restaurant_id'].value_counts() + total_interactions = len(user_history) + + # 각 식당의 희소성 = -log(인기도) + restaurant_rarity = restaurant_popularity.map( + lambda x: -np.log(x / total_interactions) if x > 0 else 0 + ) + + # 추천된 식당의 평균 희소성 (높을수록 새로운 항목을 추천) + recommended_rarity = [] + for rest_id in recs_df['restaurant_id'].unique(): + if rest_id in restaurant_rarity: + recommended_rarity.append(restaurant_rarity[rest_id]) + + novelty = np.mean(recommended_rarity) if recommended_rarity else 0 + + # 0~1 범위로 정규화 (높을수록 새로움이 큼) + max_possible_novelty = -np.log(1 / total_interactions) if total_interactions > 0 else 1 + normalized_novelty = novelty / max_possible_novelty if max_possible_novelty > 0 else 0 + + diversity_metrics['novelty'] = normalized_novelty + else: + diversity_metrics['novelty'] = 0 + + # 5. 세렌디피티 (Serendipity) - 의외성, 예상치 못한 발견 + if user_history is not None and restaurant_data is not None: + # 사용자별 과거 상호작용 카테고리 + user_categories = {} + for user_id in recs_df['user_id'].unique(): + user_hist = user_history[user_history['user_id'] == user_id] + user_rest_ids = user_hist['restaurant_id'].tolist() + + # 해당 식당들의 카테고리 추출 + user_rest_categories = [ + restaurant_categories.get(rest_id) + for rest_id in user_rest_ids + if rest_id in restaurant_categories + ] + + user_categories[user_id] = set(user_rest_categories) + + # 각 사용자의 추천 중 새로운 카테고리 비율 계산 + serendipity_scores = [] + + for user_id in recs_df['user_id'].unique(): + if user_id not in user_categories: + continue + + user_recs = recs_df[recs_df['user_id'] == user_id] + + # 추천된 식당들의 카테고리 + rec_categories = [ + restaurant_categories.get(rest_id) + for rest_id in user_recs['restaurant_id'] + if rest_id in restaurant_categories + ] + + # 사용자가 과거에 경험하지 않은 카테고리 비율 + new_categories = [cat for cat in rec_categories if cat not in user_categories[user_id]] + serendipity = len(new_categories) / len(rec_categories) if rec_categories else 0 + + serendipity_scores.append(serendipity) + + # 전체 사용자의 평균 세렌디피티 + diversity_metrics['serendipity'] = np.mean(serendipity_scores) if serendipity_scores else 0 + else: + diversity_metrics['serendipity'] = 0 + + # 6. 추천 목록 내 유사성 (낮을수록 다양한 추천) + if restaurant_data is not None and 'category_id' in restaurant_data.columns: + intra_similarities = [] + + for user_id in recs_df['user_id'].unique(): + user_recs = recs_df[recs_df['user_id'] == user_id] + + # 사용자에게 추천된 식당의 카테고리 목록 + rec_categories = [ + restaurant_categories.get(rest_id) + for rest_id in user_recs['restaurant_id'] + if rest_id in restaurant_categories + ] + + # 카테고리 쌍 간의 유사성 계산 (동일 카테고리면 1, 다르면 0) + similarities = [] + for i in range(len(rec_categories)): + for j in range(i+1, len(rec_categories)): + if rec_categories[i] is not None and rec_categories[j] is not None: + sim = 1 if rec_categories[i] == rec_categories[j] else 0 + similarities.append(sim) + + # 사용자별 평균 유사성 + user_similarity = np.mean(similarities) if similarities else 0 + intra_similarities.append(user_similarity) + + # 전체 사용자의 평균 유사성 (낮을수록 다양함) + avg_intra_similarity = np.mean(intra_similarities) if intra_similarities else 0 + + # 다양성 지표로 변환 (1 - 유사성) + diversity_metrics['intra_list_similarity'] = 1 - avg_intra_similarity + else: + diversity_metrics['intra_list_similarity'] = 0 + + logger.info(f"추천 다양성 평가 완료: {diversity_metrics}") + return diversity_metrics + + except Exception as e: + logger.error(f"추천 다양성 평가 중 오류: {e}", exc_info=True) + return { + "category_diversity": 0, + "user_coverage": 0, + "item_coverage": 0, + "novelty": 0, + "serendipity": 0, + "intra_list_similarity": 0 + } \ No newline at end of file diff --git a/app/services/evaluation/evaluator.py b/app/services/evaluation/evaluator.py new file mode 100644 index 0000000..e4a759f --- /dev/null +++ b/app/services/evaluation/evaluator.py @@ -0,0 +1,247 @@ +# app/services/evaluation/evaluator.py + +import logging +import json +import pandas as pd +import numpy as np +from app.services.model_trainer.recommenation.basic import generate_recommendations +from app.services.evaluation.metrics import calculate_ranking_metrics +from app.services.evaluation.data_generation import ( + create_test_interactions, + create_stratified_train_test_split +) +from app.services.evaluation.utils import ( + validate_required_objects, + default_empty_metrics +) + +logger = logging.getLogger(__name__) + +def evaluate_recommendation_model(globals_dict, df_model=None, user_features_df=None): + """ + 추천 모델 평가 함수 + + Args: + globals_dict: 전역 변수 딕셔너리 + df_model: 식당 데이터 (옵션) + user_features_df: 사용자 특성 데이터 (옵션) + + Returns: + dict: 추천 시스템 평가 지표 + """ + try: + # 필요 객체 유효성 검증 + is_valid, missing_objects = validate_required_objects(globals_dict, df_model, user_features_df) + + if not is_valid: + logger.error(f"모델 평가에 필요한 객체가 없습니다: {', '.join(missing_objects)}") + return default_empty_metrics() + + # 인자로 전달된 값 우선 사용, 없으면 globals_dict에서 가져오기 + df_model = df_model or globals_dict.get("df_model") + user_features_df = user_features_df or globals_dict.get("user_features_df") + stacking_reg = globals_dict.get("stacking_reg") + scaler = globals_dict.get("scaler") + model_features = globals_dict.get("model_features") + + # 테스트 데이터 생성 방식 선택 + # 1. 층화 분할 방식 (실제 데이터 기반) + if 'user_id' in df_model.columns: + _, test_interactions = create_stratified_train_test_split(df_model) + # 2. 랜덤 생성 방식 (완전 새 데이터) + else: + test_interactions = create_test_interactions(globals_dict) + + if isinstance(test_interactions, pd.DataFrame) and test_interactions.empty: + logger.warning("상호작용 테스트 데이터가 없어 평가를 수행할 수 없습니다.") + return default_empty_metrics() + + # 데이터 검증 + logger.info(f"생성된 테스트 데이터: {len(test_interactions)}개 상호작용") + logger.info(f"고유 사용자 수: {test_interactions['user_id'].nunique()}") + logger.info(f"고유 식당 수: {test_interactions['restaurant_id'].nunique()}") + + # 추천 결과 생성 + recommendations_dict = generate_recommendations_for_users( + df_model, stacking_reg, model_features, + test_interactions['user_id'].unique(), scaler, user_features_df + ) + + # 추천 결과 검증 + if not recommendations_dict: + logger.error("모든 사용자에 대한 추천 생성 실패") + return default_empty_metrics() + + logger.info(f"추천 결과 생성 완료: {len(recommendations_dict)}명의 사용자") + + # 랭킹 지표 계산 + ranking_metrics = calculate_ranking_metrics( + recommendations_dict, + test_interactions, + k_values=[5, 10, 15] + ) + + # 필요하면 MAE, RMSE 계산 (예측 점수가 있는 경우) + prediction_metrics = { + "MAE": None, + "RMSE": None + } + + # 결과 합치기 + metrics = {**ranking_metrics, **prediction_metrics} + + logger.info(f"평가 지표: {metrics}") + return metrics + + except Exception as e: + logger.error(f"모델 평가 중 오류 발생: {e}", exc_info=True) + return default_empty_metrics() + +def generate_recommendations_for_users(df_model, stacking_reg, model_features, sample_users, scaler, user_features_df): + """ + 여러 사용자에 대한 추천 결과 생성 + + Args: + df_model: 식당 데이터 + stacking_reg: 학습된 스태킹 회귀 모델 + model_features: 모델 특성 리스트 + sample_users: 추천 대상 사용자 리스트 + scaler: 특성 스케일러 + user_features_df: 사용자 특성 데이터 + + Returns: + dict: 사용자별 추천 식당 ID 목록 + """ + recommendations_dict = {} + + for user_id in sample_users: + try: + # 추천 결과 생성 + # 주의: user_id가 문자열이면 정수로 변환 + user_id_for_rec = int(user_id) if isinstance(user_id, str) else user_id + + result_json = generate_recommendations( + df_model.copy(), + stacking_reg, + model_features, + user_id_for_rec, + scaler, + user_features=user_features_df + ) + + # JSON 파싱 + try: + result_data = json.loads(result_json) + except json.JSONDecodeError: + logger.error(f"JSON 파싱 오류: {result_json[:100]}...") + continue + + # 추천 식당 ID 리스트 추출 + recommended_items = [item.get('restaurant_id') for item in result_data.get('recommendations', [])] + + # ID 타입 일관성 확인 + recommended_items = [int(item) if not isinstance(item, int) else item for item in recommended_items] + + # 유효한 추천 결과만 저장 + if recommended_items: + recommendations_dict[user_id] = recommended_items + else: + logger.warning(f"사용자 {user_id}에 대한 추천 결과가 없습니다.") + + except Exception as e: + logger.error(f"사용자 {user_id} 추천 생성 중 오류: {e}", exc_info=True) + + return recommendations_dict + +def evaluate_with_cross_validation(globals_dict, n_splits=5, k_values=[5, 10, 15]): + """ + 교차 검증을 통한 더 견고한 평가 + + Args: + globals_dict: 전역 변수 딕셔너리 + n_splits: 폴드 수 + k_values: 평가할 k 값 리스트 + + Returns: + dict: 평균 평가 지표 + """ + try: + from sklearn.model_selection import KFold + + # 필요 객체 유효성 검증 + is_valid, missing_objects = validate_required_objects(globals_dict) + + if not is_valid: + logger.error(f"평가에 필요한 객체가 없습니다: {', '.join(missing_objects)}") + return default_empty_metrics() + + df_model = globals_dict.get("df_model") + user_features_df = globals_dict.get("user_features_df") + stacking_reg = globals_dict.get("stacking_reg") + scaler = globals_dict.get("scaler") + model_features = globals_dict.get("model_features") + + # 사용자별 데이터 그룹화 + if 'user_id' not in df_model.columns: + logger.error("교차 검증을 위한 user_id 컬럼이 없습니다.") + return default_empty_metrics() + + user_groups = df_model.groupby('user_id').apply(lambda x: x.index.tolist()) + users = list(user_groups.index) + + # 사용자를 기준으로 폴드 분할 + kf = KFold(n_splits=n_splits, shuffle=True, random_state=42) + + # 평가 지표 저장 + all_metrics = [] + + fold_idx = 1 + for train_users_idx, test_users_idx in kf.split(users): + logger.info(f"폴드 {fold_idx}/{n_splits} 평가 중...") + + # 훈련/테스트 사용자 분할 + train_users = [users[i] for i in train_users_idx] + test_users = [users[i] for i in test_users_idx] + + # 훈련/테스트 데이터 분할 + train_indices = [idx for user in train_users for idx in user_groups.get(user, [])] + test_indices = [idx for user in test_users for idx in user_groups.get(user, [])] + + train_df = df_model.iloc[train_indices] + test_df = df_model.iloc[test_indices] + + # 추천 생성 및 평가 + recommendations_dict = generate_recommendations_for_users( + df_model, stacking_reg, model_features, test_users, scaler, user_features_df + ) + + # 평가 지표 계산 + if recommendations_dict: + metrics = calculate_ranking_metrics(recommendations_dict, test_df, k_values=k_values) + all_metrics.append(metrics) + logger.info(f"폴드 {fold_idx} 평가 결과: {metrics}") + else: + logger.warning(f"폴드 {fold_idx}에서 추천 결과가 생성되지 않았습니다.") + + fold_idx += 1 + + # 평균 지표 계산 + if not all_metrics: + logger.error("모든 폴드에서 평가 지표 계산에 실패했습니다.") + return default_empty_metrics() + + avg_metrics = {} + for metric in all_metrics[0].keys(): + avg_metrics[metric] = np.mean([m[metric] for m in all_metrics]) + + avg_metrics.update({ + "MAE": None, + "RMSE": None + }) + + logger.info(f"교차 검증 평균 평가 지표: {avg_metrics}") + return avg_metrics + + except Exception as e: + logger.error(f"교차 검증 평가 중 오류 발생: {e}", exc_info=True) + return default_empty_metrics() \ No newline at end of file diff --git a/app/services/evaluation/metrics.py b/app/services/evaluation/metrics.py new file mode 100644 index 0000000..dd13022 --- /dev/null +++ b/app/services/evaluation/metrics.py @@ -0,0 +1,285 @@ +# app/services/evaluation/metrics.py + +import numpy as np +import pandas as pd +from sklearn.metrics import mean_absolute_error, mean_squared_error + +# 1. 평점 예측 기반 지표: MAE, RMSE +def calculate_rating_metrics(y_true, y_pred): + """ + 평점 예측 성능 지표 계산 + + Args: + y_true: 실제 평점 값 + y_pred: 예측 평점 값 + + Returns: + dict: MAE, RMSE 지표 + """ + mae = mean_absolute_error(y_true, y_pred) + rmse = np.sqrt(mean_squared_error(y_true, y_pred)) + + return { + 'MAE': mae, + 'RMSE': rmse + } + +# 2. 랭킹 기반 지표: Precision@K, Recall@K, NDCG@K, Hit Rate@K +def precision_at_k(recommended_items, relevant_items, k): + """ + Precision@K 계산 + + Args: + recommended_items: 추천 아이템 리스트 + relevant_items: 관련 있는(실제로 상호작용한) 아이템 리스트 + k: 상위 K개 아이템 고려 + + Returns: + float: Precision@K 값 + """ + if len(recommended_items) == 0: + return 0.0 + + # 상위 K개 아이템만 고려 + recommended_k = recommended_items[:k] + + # 관련 있는 아이템 중 추천된 것의 수 + relevant_and_recommended = len(set(recommended_k) & set(relevant_items)) + + return relevant_and_recommended / min(k, len(recommended_items)) + +def recall_at_k(recommended_items, relevant_items, k): + """ + Recall@K 계산 + + Args: + recommended_items: 추천 아이템 리스트 + relevant_items: 관련 있는(실제로 상호작용한) 아이템 리스트 + k: 상위 K개 아이템 고려 + + Returns: + float: Recall@K 값 + """ + if len(relevant_items) == 0: + return 0.0 + + # 상위 K개 아이템만 고려 + recommended_k = recommended_items[:k] + + # 관련 있는 아이템 중 추천된 것의 수 + relevant_and_recommended = len(set(recommended_k) & set(relevant_items)) + + return relevant_and_recommended / len(relevant_items) + +def ndcg_at_k(recommended_items, relevant_items, k): + """ + NDCG@K (Normalized Discounted Cumulative Gain) 계산 + + Args: + recommended_items: 추천 아이템 리스트 + relevant_items: 관련 있는(실제로 상호작용한) 아이템 리스트 + k: 상위 K개 아이템 고려 + + Returns: + float: NDCG@K 값 + """ + if len(relevant_items) == 0: + return 0.0 + + # 상위 K개 아이템만 고려 + recommended_k = recommended_items[:k] + + # DCG 계산 (Discounted Cumulative Gain) + dcg = 0 + for i, item in enumerate(recommended_k): + # 추천 아이템이 관련 있는 경우 1, 아니면 0 + rel = 1 if item in relevant_items else 0 + # i+1을 사용하는 이유: 인덱스는 0부터 시작하지만 순위는 1부터 시작 + dcg += rel / np.log2(i + 2) # log_2(rank + 1) + + # 이상적인 추천 순서 생성 (관련 아이템이 먼저 추천됨) + ideal_ordering = sorted(recommended_k, key=lambda x: 1 if x in relevant_items else 0, reverse=True) + + # IDCG 계산 (Ideal DCG) + idcg = 0 + for i, item in enumerate(ideal_ordering): + rel = 1 if item in relevant_items else 0 + idcg += rel / np.log2(i + 2) + + # IDCG가 0이면 NDCG는 0 + if idcg == 0: + return 0.0 + + return dcg / idcg + +def hit_rate_at_k(recommended_items, relevant_items, k): + """ + Hit Rate@K 계산 + + Args: + recommended_items: 추천 아이템 리스트 + relevant_items: 관련 있는(실제로 상호작용한) 아이템 리스트 + k: 상위 K개 아이템 고려 + + Returns: + float: Hit Rate@K 값 (1 또는 0) + """ + # 상위 K개 아이템만 고려 + recommended_k = recommended_items[:k] + + # 관련 아이템 중 하나라도 추천 목록에 있으면 1, 아니면 0 + return 1.0 if len(set(recommended_k) & set(relevant_items)) > 0 else 0.0 + +# 모든 랭킹 지표를 종합적으로 계산하는 함수 +def calculate_ranking_metrics(recommendations_dict, test_interactions, k_values=[5, 10]): + """ + 사용자별 추천 결과에 대한 랭킹 지표 계산 + + Args: + recommendations_dict: 사용자별 추천 아이템 딕셔너리 {user_id: [restaurant_id, ...]} + test_interactions: 테스트 데이터 (사용자가 실제로 상호작용한 아이템) + k_values: 평가할 K 값 리스트 + + Returns: + dict: 계산된 모든 랭킹 지표 + """ + # 사용자별 관련 아이템 딕셔너리 구성 + user_relevant_items = {} + for _, row in test_interactions.iterrows(): + user_id = row['user_id'] + restaurant_id = row['restaurant_id'] + + if user_id not in user_relevant_items: + user_relevant_items[user_id] = [] + + user_relevant_items[user_id].append(restaurant_id) + + # 결과 저장을 위한 딕셔너리 + results = {} + + for k in k_values: + precision_sum = 0 + recall_sum = 0 + ndcg_sum = 0 + hit_rate_sum = 0 + user_count = 0 + + for user_id, recommended_items in recommendations_dict.items(): + if user_id in user_relevant_items: + relevant_items = user_relevant_items[user_id] + + precision_sum += precision_at_k(recommended_items, relevant_items, k) + recall_sum += recall_at_k(recommended_items, relevant_items, k) + ndcg_sum += ndcg_at_k(recommended_items, relevant_items, k) + hit_rate_sum += hit_rate_at_k(recommended_items, relevant_items, k) + user_count += 1 + + if user_count > 0: + results[f'Precision@{k}'] = precision_sum / user_count + results[f'Recall@{k}'] = recall_sum / user_count + results[f'NDCG@{k}'] = ndcg_sum / user_count + results[f'Hit_Rate@{k}'] = hit_rate_sum / user_count + else: + results[f'Precision@{k}'] = 0.0 + results[f'Recall@{k}'] = 0.0 + results[f'NDCG@{k}'] = 0.0 + results[f'Hit_Rate@{k}'] = 0.0 + + return results + +def calculate_segment_performance(recommendations_dict, test_interactions, k_values=[5, 10, 15]): + """ + 사용자 세그먼트별 추천 시스템 성능 평가 + + Args: + recommendations_dict: 사용자별 추천 아이템 딕셔너리 {user_id: [restaurant_id, ...]} + test_interactions: 테스트 데이터 (사용자가 실제로 상호작용한 아이템) + k_values: 평가할 K 값 리스트 + + Returns: + dict: 사용자 세그먼트별 성능 지표 + """ + # 사용자 세그먼트 정의 함수 + def categorize_user(user_id, interactions): + """ + 사용자 세그먼트 분류 + + Args: + user_id: 사용자 ID + interactions: 사용자 상호작용 데이터 + + Returns: + str: 사용자 세그먼트 ('new', 'active', 'inactive') + """ + user_interactions = interactions[interactions['user_id'] == user_id] + + if len(user_interactions) == 0: + return 'new' + elif len(user_interactions) > 10: + return 'active' + else: + return 'inactive' + + # 사용자별 관련 아이템 딕셔너리 구성 + user_relevant_items = {} + for _, row in test_interactions.iterrows(): + user_id = row['user_id'] + restaurant_id = row['restaurant_id'] + + if user_id not in user_relevant_items: + user_relevant_items[user_id] = [] + + user_relevant_items[user_id].append(restaurant_id) + + # 세그먼트별 성능 저장 딕셔너리 + segment_metrics = { + 'new': {f'Precision@{k}': [] for k in k_values} | + {f'Recall@{k}': [] for k in k_values} | + {f'NDCG@{k}': [] for k in k_values} | + {f'Hit_Rate@{k}': [] for k in k_values}, + 'active': {f'Precision@{k}': [] for k in k_values} | + {f'Recall@{k}': [] for k in k_values} | + {f'NDCG@{k}': [] for k in k_values} | + {f'Hit_Rate@{k}': [] for k in k_values}, + 'inactive': {f'Precision@{k}': [] for k in k_values} | + {f'Recall@{k}': [] for k in k_values} | + {f'NDCG@{k}': [] for k in k_values} | + {f'Hit_Rate@{k}': [] for k in k_values} + } + + # 각 사용자별 세그먼트 성능 계산 + for user_id, recommended_items in recommendations_dict.items(): + # 사용자 세그먼트 분류 + segment = categorize_user(user_id, test_interactions) + + # 해당 사용자의 관련 아이템 + if user_id not in user_relevant_items: + continue + + relevant_items = user_relevant_items[user_id] + + # K 값별 성능 계산 + for k in k_values: + segment_metrics[segment][f'Precision@{k}'].append( + precision_at_k(recommended_items, relevant_items, k) + ) + segment_metrics[segment][f'Recall@{k}'].append( + recall_at_k(recommended_items, relevant_items, k) + ) + segment_metrics[segment][f'NDCG@{k}'].append( + ndcg_at_k(recommended_items, relevant_items, k) + ) + segment_metrics[segment][f'Hit_Rate@{k}'].append( + hit_rate_at_k(recommended_items, relevant_items, k) + ) + + # 세그먼트별 평균 성능 계산 + segment_performance = {} + for segment, metrics in segment_metrics.items(): + segment_performance[segment] = {} + for metric, values in metrics.items(): + segment_performance[segment][metric] = ( + np.mean(values) if values else 0.0 + ) + + return segment_performance \ No newline at end of file diff --git a/app/services/evaluation/utils.py b/app/services/evaluation/utils.py new file mode 100644 index 0000000..86c8888 --- /dev/null +++ b/app/services/evaluation/utils.py @@ -0,0 +1,61 @@ +# app/services/evaluation/utils.py + +import logging +import pandas as pd + +logger = logging.getLogger(__name__) + +def default_empty_metrics(): + """기본 빈 지표 반환""" + return { + "MAE": None, + "RMSE": None, + "Precision@5": 0, + "Recall@5": 0, + "NDCG@5": 0, + "Hit_Rate@5": 0, + "Precision@10": 0, + "Recall@10": 0, + "NDCG@10": 0, + "Hit_Rate@10": 0, + "Precision@15": 0, + "Recall@15": 0, + "NDCG@15": 0, + "Hit_Rate@15": 0 + } + +def validate_required_objects(globals_dict, df_model=None, user_features_df=None): + """ + 모델 평가에 필요한 객체들의 유효성 검증 + + Args: + globals_dict: 전역 변수 딕셔너리 + df_model: 식당 데이터 (옵션) + user_features_df: 사용자 특성 데이터 (옵션) + + Returns: + tuple: (유효성 여부, 누락 객체 리스트) + """ + # 인자로 전달된 값 우선 사용, 없으면 globals_dict에서 가져오기 + df_model = df_model or globals_dict.get("df_model") + user_features_df = user_features_df or globals_dict.get("user_features_df") + + # 필요한 객체 추출 + stacking_reg = globals_dict.get("stacking_reg") + scaler = globals_dict.get("scaler") + model_features = globals_dict.get("model_features") + + # DataFrame 객체 유효성 검사 + missing_objects = [] + if stacking_reg is None: + missing_objects.append("stacking_reg") + if scaler is None: + missing_objects.append("scaler") + if model_features is None: + missing_objects.append("model_features") + if df_model is None or (isinstance(df_model, pd.DataFrame) and df_model.empty): + missing_objects.append("df_model") + if user_features_df is None or (isinstance(user_features_df, pd.DataFrame) and user_features_df.empty): + missing_objects.append("user_features_df") + + return len(missing_objects) == 0, missing_objects \ No newline at end of file diff --git a/app/services/model_trainer/__init__.py b/app/services/model_trainer/__init__.py index b204488..3c40f85 100644 --- a/app/services/model_trainer/__init__.py +++ b/app/services/model_trainer/__init__.py @@ -1,7 +1,6 @@ # app/services/model_trainer/__init__.py - from .data_preparation import prepare_data, impute_and_clip, scale_and_split from .model_training import train_ridge, train_rf, train_xgb, train_lgb, train_cat, train_mlp, train_stacking from .model_evaluation import evaluate_model -from .recommendation import compute_composite_score, sigmoid_transform, generate_recommendations -from .train_model import train_model # 추가: train_model 함수를 export +from .train_model import train_model +from .hyperparameter_tuning import optimize_recommendation_parameters \ No newline at end of file diff --git a/app/services/model_trainer/data_preparation.py b/app/services/model_trainer/data_preparation.py index 3ee948d..4b217fa 100644 --- a/app/services/model_trainer/data_preparation.py +++ b/app/services/model_trainer/data_preparation.py @@ -13,27 +13,53 @@ logger = logging.getLogger(__name__) def prepare_data(df: pd.DataFrame, required_cols: list) -> pd.DataFrame: - """ - 필수 컬럼에 결측치가 있는 행 제거 및 기본 전처리. - """ try: + # 기존 코드 + 추가 데이터 품질 검증 df_clean = df.dropna(subset=required_cols).copy() - # 평점 미제공 처리: score와 review가 모두 0이면 score를 NaN으로 설정 - df_clean.loc[(df_clean['score'] == 0) & (df_clean['review'] == 0), 'score'] = np.nan - df_clean['score_provided'] = df_clean['score'].notna().astype(int) + + # 이상치 처리 강화 + df_clean['review'] = df_clean['review'].clip(lower=0) + df_clean['score'] = df_clean['score'].clip(lower=0, upper=5) + + # 데이터 정규성 검증 + df_clean['log_review'] = np.log1p(df_clean['review']) + df_clean['log_score'] = np.log1p(df_clean['score']) + + # 상호작용 특성 추가 + df_clean['review_duration'] = df_clean['review'] * df_clean['duration_hours'] + return df_clean except Exception as e: logger.error(f"prepare_data 오류: {e}", exc_info=True) raise e -def impute_and_clip(df: pd.DataFrame, impute_cols: list) -> pd.DataFrame: +def impute_and_clip(df: pd.DataFrame, impute_cols: list, min_values=None, max_values=None) -> pd.DataFrame: """ - IterativeImputer를 사용하여 결측치 보완 후, score의 최댓값을 5.0으로 클리핑. + IterativeImputer를 사용하여 결측치 보완 후, 지정된 범위로 클리핑. + + Args: + df: 입력 데이터프레임 + impute_cols: 결측치를 보완할 컬럼 리스트 + min_values: 컬럼별 최소값 딕셔너리 (예: {'score': 0}) + max_values: 컬럼별 최대값 딕셔너리 (예: {'score': 5}) """ try: + # 기본값 설정 + min_values = min_values or {'score': 0} + max_values = max_values or {'score': 5.0} + imputer = IterativeImputer(estimator=BayesianRidge(), random_state=42, max_iter=10, initial_strategy='median') df[impute_cols] = imputer.fit_transform(df[impute_cols]) - df.loc[df['score'] > 5, 'score'] = 5.0 + + # 각 컬럼별로 클리핑 적용 + for col, min_val in min_values.items(): + if col in df.columns: + df[col] = df[col].clip(lower=min_val) + + for col, max_val in max_values.items(): + if col in df.columns: + df[col] = df[col].clip(upper=max_val) + return df except Exception as e: logger.error(f"impute_and_clip 오류: {e}", exc_info=True) diff --git a/app/services/model_trainer/hyperparameter_tuning.py b/app/services/model_trainer/hyperparameter_tuning.py new file mode 100644 index 0000000..a1711f9 --- /dev/null +++ b/app/services/model_trainer/hyperparameter_tuning.py @@ -0,0 +1,183 @@ +# app/services/model_trainer/hyperparameter_tuning.py + +import numpy as np +import pandas as pd +import logging +import optuna +import scipy.stats +from sklearn.metrics import mean_squared_error, mean_absolute_error +from sklearn.model_selection import cross_val_score, KFold +import time + +logger = logging.getLogger(__name__) + +def optimize_recommendation_parameters(df_model, user_features_df, model_features, n_trials=50, timeout=600, checkpoint_file=None): + """ + 추천 시스템의 파라미터를 최적화하는 함수 + + Args: + df_model: 훈련 데이터 (식당 및 평점 데이터) + user_features_df: 사용자 특성 데이터 + model_features: 모델 피처 리스트 + n_trials: 최적화 시도 횟수 + timeout: 최적화 제한 시간 (초) + + Returns: + dict: 최적화된 파라미터 + """ + try: + logger.info(f"추천 시스템 파라미터 최적화 시작 (최대 시도: {n_trials}, 제한 시간: {timeout}초)") + + # 훈련/검증 분할 + from sklearn.model_selection import train_test_split + train_df, valid_df = train_test_split(df_model, test_size=0.2, random_state=42) + + # 사용자 ID 추출 + all_users = train_df['user_id'].unique() + + # Optuna 최적화 목표 함수 + def objective(trial): + # 최적화할 파라미터 정의 + params = { + # 카테고리 다양성 가중치 + 'diversity_weight': trial.suggest_float('diversity_weight', 0.05, 0.5), + + # 리뷰 관련 가중치 + 'review_weight': trial.suggest_float('review_weight', 0.1, 0.9), + + # 인기도 로그 변환 시 베이스 + 'popularity_log_base': trial.suggest_float('popularity_log_base', 1.5, 10.0), + + # 카테고리 유사성 가중치 + 'category_similarity_weight': trial.suggest_float('category_similarity_weight', 0.1, 0.8), + + # 시그모이드 변환 파라미터 + 'sigmoid_a': trial.suggest_float('sigmoid_a', 0.5, 5.0), + 'sigmoid_b': trial.suggest_float('sigmoid_b', 0.0, 3.0), + + # 콜드 스타트 추천에서 카테고리 다양성 가중치 + 'cold_start_diversity_weight': trial.suggest_float('cold_start_diversity_weight', 0.1, 0.4), + + # 콜드 스타트 추천에서 인기도 가중치 + 'cold_start_popularity_weight': trial.suggest_float('cold_start_popularity_weight', 0.1, 0.5), + + # 하이브리드 추천에서 협업 필터링 가중치 + 'hybrid_cf_weight': trial.suggest_float('hybrid_cf_weight', 0.4, 0.9) + } + + # 현재 파라미터로 샘플 사용자에 대한 추천 생성 및 평가 + eval_users = np.random.choice(all_users, min(50, len(all_users)), replace=False) + + # RMSE 및 정확도 지표를 저장할 리스트 + user_rmse = [] + user_precision = [] + user_diversity = [] + + for user_id in eval_users: + # 사용자 데이터 분할 (훈련/테스트) + user_data = train_df[train_df['user_id'] == user_id] + user_valid = valid_df[valid_df['user_id'] == user_id] + + if len(user_data) < 5 or len(user_valid) < 2: + continue # 데이터가 너무 적은 사용자는 건너뜀 + + # 현재 파라미터 세트를 사용하여 추천 생성 + try: + # 임시 모델 학습 (파라미터 적용) + from sklearn.ensemble import GradientBoostingRegressor + + model = GradientBoostingRegressor( + n_estimators=100, + learning_rate=0.1, + random_state=42 + ) + + # 선택된 특성으로 모델 피팅 + X_train = user_data[model_features] + y_train = user_data['score'] + + model.fit(X_train, y_train) + + # 테스트 데이터에 대한 예측 + X_valid = user_valid[model_features] + y_valid = user_valid['score'] + y_pred = model.predict(X_valid) + + # RMSE 계산 + rmse = np.sqrt(mean_squared_error(y_valid, y_pred)) + user_rmse.append(rmse) + + # 평가 지표: 추천 정확도 (정규화된 예측 오차의 역수) + max_possible_error = 5.0 # 평점 범위가 1~5라고 가정 + prediction_accuracy = 1 - (rmse / max_possible_error) + user_precision.append(prediction_accuracy) + + # 다양성 계산 (카테고리 기준) + if 'category_id' in user_valid.columns: + recommended_categories = user_valid['category_id'].value_counts(normalize=True) + category_entropy = scipy.stats.entropy(recommended_categories) + normalized_entropy = 1 - (1 / (1 + category_entropy)) # 0~1 범위로 정규화 + user_diversity.append(normalized_entropy) + + except Exception as inner_e: + logger.debug(f"사용자 {user_id} 평가 중 오류: {inner_e}") + continue + + # 평균 지표 계산 + avg_rmse = np.mean(user_rmse) if user_rmse else 5.0 # 오류 시 최대 오류값 + avg_precision = np.mean(user_precision) if user_precision else 0.0 + avg_diversity = np.mean(user_diversity) if user_diversity else 0.0 + + # 목표 함수: 정확도와 다양성의 가중 평균 + objective_score = (0.7 * avg_precision) + (0.3 * avg_diversity) + + # 진행 상황 로깅 + logger.debug(f"Trial {trial.number}: Params={params}, Score={objective_score:.4f} " + f"(Precision={avg_precision:.4f}, Diversity={avg_diversity:.4f})") + + return objective_score + + # Optuna 연구 생성 및 최적화 실행 + # 중간 결과 저장 및 메모리 최적화 추가 + + # 중간 결과 저장 기능 추가 + if checkpoint_file: + study = optuna.create_study(direction='maximize', + storage=f'sqlite:///{checkpoint_file}', + study_name='recommendation_params', + load_if_exists=True) + else: + study = optuna.create_study(direction='maximize') + + # 메모리 최적화를 위한 콜백 추가 + def gc_after_trial(study, trial): + # 주기적으로 메모리 정리 + if trial.number % 5 == 0: + import gc + gc.collect() + + study.optimize(objective, n_trials=n_trials, timeout=timeout, callbacks=[gc_after_trial]) + + # 최적 파라미터 및 결과 + best_params = study.best_params + best_value = study.best_value + + logger.info(f"파라미터 최적화 완료: 최적 점수={best_value:.4f}") + logger.info(f"최적 파라미터: {best_params}") + + return best_params + + except Exception as e: + logger.error(f"파라미터 최적화 중 오류 발생: {e}", exc_info=True) + # 오류 발생 시 기본 파라미터 반환 + return { + 'diversity_weight': 0.15, + 'review_weight': 0.5, + 'popularity_log_base': 5.0, + 'category_similarity_weight': 0.3, + 'sigmoid_a': 2.0, + 'sigmoid_b': 1.5, + 'cold_start_diversity_weight': 0.2, + 'cold_start_popularity_weight': 0.3, + 'hybrid_cf_weight': 0.7 + } \ No newline at end of file diff --git a/app/services/model_trainer/model_training.py b/app/services/model_trainer/model_training.py index 880f344..c2df13b 100644 --- a/app/services/model_trainer/model_training.py +++ b/app/services/model_trainer/model_training.py @@ -9,6 +9,9 @@ from sklearn.neural_network import MLPRegressor from sklearn.model_selection import GridSearchCV, cross_val_score from sklearn.linear_model import Ridge as FinalRidge +from sklearn.model_selection import RandomizedSearchCV +from sklearn.model_selection import StratifiedKFold +import numpy as np import os import logging @@ -25,11 +28,11 @@ logger = logging.getLogger(__name__) -def train_ridge(X, y): +def train_ridge(X, y, n_jobs=-1): try: param_grid = {'alpha': [0.0001, 0.001, 0.01, 0.1, 1, 10]} ridge = Ridge() - grid = GridSearchCV(ridge, param_grid, cv=3, scoring='r2', n_jobs=1) + grid = GridSearchCV(ridge, param_grid, cv=3, scoring='r2', n_jobs=n_jobs) grid.fit(X, y) return grid.best_estimator_ except Exception as e: @@ -52,24 +55,58 @@ def train_rf(X, y): def train_xgb(X, y): try: - param_grid = {'n_estimators': [50, 100], - 'max_depth': [3, 5], - 'learning_rate': [0.01, 0.1]} - xgb = XGBRegressor(objective='reg:squarederror', random_state=42) - grid = GridSearchCV(xgb, param_grid, cv=3, scoring='r2', n_jobs=1) - grid.fit(X, y) - return grid.best_estimator_ + # 하이퍼파라미터 분포 정의 + param_distributions = { + 'n_estimators': np.random.randint(50, 300, 20), + 'max_depth': np.random.randint(3, 10, 10), + 'learning_rate': np.random.uniform(0.01, 0.3, 10), + 'subsample': np.random.uniform(0.6, 1.0, 10), + 'colsample_bytree': np.random.uniform(0.6, 1.0, 10), + 'min_child_weight': np.random.randint(1, 7, 10) + } + + # XGBoost 모델 생성 + xgb = XGBRegressor( + objective='reg:squarederror', + random_state=42 + ) + + # 랜덤 서치를 사용한 하이퍼파라미터 튜닝 + random_search = RandomizedSearchCV( + estimator=xgb, + param_distributions=param_distributions, + n_iter=50, # 탐색할 하이퍼파라미터 조합 수 + cv=3, # 3-fold 교차 검증 + scoring='r2', + n_jobs=-1, # 병렬 처리 + random_state=42 + ) + + # 모델 훈련 + random_search.fit(X, y) + + # 최적의 모델 반환 + return random_search.best_estimator_ + except Exception as e: logger.error(f"train_xgb 오류: {e}", exc_info=True) raise e -def train_lgb(X, y): +# 조기 종료 조건 추가 (LightGBM) +def train_lgb(X, y, n_jobs=-1): try: param_grid = {'n_estimators': [50, 100], - 'max_depth': [3, 5, 7, -1], - 'learning_rate': [0.01, 0.1]} - lgb_model = lgb.LGBMRegressor(random_state=42, verbose=-1, min_split_gain=0) - grid = GridSearchCV(lgb_model, param_grid, cv=3, scoring='r2', n_jobs=1) + 'max_depth': [3, 5, 7, -1], + 'learning_rate': [0.01, 0.1]} + + # 조기 종료 조건 추가 + lgb_model = lgb.LGBMRegressor( + random_state=42, + verbose=-1, + min_split_gain=0 + ) + + grid = GridSearchCV(lgb_model, param_grid, cv=3, scoring='r2', n_jobs=n_jobs) grid.fit(X, y) return grid.best_estimator_ except Exception as e: diff --git a/app/services/model_trainer/recommenation/__init__.py b/app/services/model_trainer/recommenation/__init__.py new file mode 100644 index 0000000..9a8ac0a --- /dev/null +++ b/app/services/model_trainer/recommenation/__init__.py @@ -0,0 +1,14 @@ +# 추가: train_model 함수를 export +# app/services/model_trainer/recommendation/__init__.py + +from .basic import calculate_category_diversity_bonus, generate_recommendations +from .cold_start import enhance_cold_start_recommendations +from .hybrid import build_hybrid_recommender, generate_hybrid_recommendations + +__all__ = [ + 'generate_recommendations', + 'calculate_category_diversity_bonus', + 'enhance_cold_start_recommendations', + 'build_hybrid_recommender', + 'generate_hybrid_recommendations' +] \ No newline at end of file diff --git a/app/services/model_trainer/recommendation.py b/app/services/model_trainer/recommenation/basic.py similarity index 83% rename from app/services/model_trainer/recommendation.py rename to app/services/model_trainer/recommenation/basic.py index 5b4e0e1..6f3a1f7 100644 --- a/app/services/model_trainer/recommendation.py +++ b/app/services/model_trainer/recommenation/basic.py @@ -1,14 +1,28 @@ -# app/services/model_trainer/recommendation.py +# app/services/model_trainer/recommendation/basic.py from app.setting import A_VALUE, B_VALUE, REVIEW_WEIGHT, CAUTION_WEIGHT, CONVENIENCE_WEIGHT import numpy as np import json import pandas as pd import logging +from .diversity import calculate_category_diversity_bonus +from .cold_start import enhance_cold_start_recommendations logger = logging.getLogger(__name__) def compute_composite_score(row, review_weight, caution_weight, convenience_weight): + """ + 복합 점수 계산 함수 + + Args: + row: 데이터 행 + review_weight: 리뷰 가중치 + caution_weight: 주의사항 가중치 + convenience_weight: 편의시설 가중치 + + Returns: + float: 계산된 복합 점수 + """ try: base = row['final_score'] review_val = float(row['review']) @@ -26,12 +40,24 @@ def compute_composite_score(row, review_weight, caution_weight, convenience_weig def sigmoid_transform(x, a, b): + """ + 시그모이드 변환 함수 + + Args: + x: 입력 값 + a: 시그모이드 기울기 파라미터 + b: 시그모이드 중심점 파라미터 + + Returns: + float: 변환된 값 + """ try: return 5 * (1 / (1 + np.exp(-a * (x - b)))) except Exception as e: logger.error(f"sigmoid_transform 오류: {e}", exc_info=True) - raise + raise e + def generate_recommendations(data_filtered: pd.DataFrame, stacking_reg, model_features: list, user_id: str, scaler, user_features: pd.DataFrame = None) -> dict: """ @@ -60,10 +86,7 @@ def generate_recommendations(data_filtered: pd.DataFrame, stacking_reg, model_fe if user_features is not None and not user_features.empty: # 디버깅을 위한 사용자 ID 목록과 타입 로깅 - logger.debug(f"사용자 특성 데이터 크기: {user_features.shape}") sample_ids = user_features['user_id'].head(5).values - logger.debug(f"사용자 ID 샘플(상위 5개): {sample_ids}") - logger.debug(f"사용자 ID 타입: {type(user_features['user_id'].iloc[0])}") # 모든 ID를 문자열로 변환하여 비교 user_features['user_id_str'] = user_features['user_id'].astype(str) @@ -74,13 +97,11 @@ def generate_recommendations(data_filtered: pd.DataFrame, stacking_reg, model_fe if not matching_rows.empty: is_new_user = False user_row = matching_rows.iloc[0:1] # 첫 번째 일치 행만 사용 - logger.info(f"ID {user_id_str}의 사용자 데이터 찾음 - 개인화 추천 생성") # 가격 필터링 (기존 사용자만) if 'max_price' in user_row.columns and 'price' in data_filtered.columns: max_price = user_row['max_price'].values[0] if max_price > 0: - logger.debug(f"사용자 최대 가격 {max_price}원 이하의 식당으로 필터링") data_filtered = data_filtered[data_filtered['price'] <= max_price].copy() else: logger.info(f"ID {user_id_str}의 사용자 데이터를 찾을 수 없음 - 카테고리 기반 기본 추천 생성") @@ -97,17 +118,20 @@ def generate_recommendations(data_filtered: pd.DataFrame, stacking_reg, model_fe category_col = f"category_{i}" if category_col in user_row.columns and user_row[category_col].values[0] == 1: data_filtered.loc[data_filtered['category_id'] == i, 'category_bonus'] = 0.3 - logger.debug(f"카테고리 {i}에 기본 보너스 0.3 적용") if i in [4, 7, 9, 10]: # 중요 카테고리 data_filtered.loc[data_filtered['category_id'] == i, 'category_bonus'] += 0.2 - logger.debug(f"중요 카테고리 {i}에 추가 보너스 0.2 적용") else: # 신규 사용자: 필터링된 모든 식당은 사용자가 선택한 카테고리에 해당 # 모든 식당에 동일한 카테고리 보너스 부여 - logger.info(f"신규 사용자용 카테고리 기반 추천 생성") data_filtered['category_bonus'] = 0.3 + # 카테고리 다양성 보너스 추가 + data_filtered = calculate_category_diversity_bonus(data_filtered) + + # 카테고리 다양성 보너스 통합 (10% 가중) + data_filtered['category_bonus'] += data_filtered.get('category_diversity_bonus', 0) * 0.1 + # 모델 예측을 위한 피처 준비 (기존/신규 사용자 모두 동일) for feature in model_features: if feature not in data_filtered.columns: @@ -141,23 +165,27 @@ def generate_recommendations(data_filtered: pd.DataFrame, stacking_reg, model_fe if not is_new_user and 'completed_reservations' in user_row.columns: completed_reservations = user_row['completed_reservations'].values[0] if completed_reservations > 3: - logger.debug(f"예약 경험이 많은 사용자에게 예약 가능 식당 가중치 부여") data_filtered.loc[data_filtered['caution_예약가능'] == 1, 'composite_score'] += 0.2 if 'like_to_reservation_ratio' in user_row.columns: ratio = user_row['like_to_reservation_ratio'].values[0] - if ratio > 2.0: - logger.debug(f"찜 대비 예약 비율이 높은 사용자에게 인기 식당 가중치 부여") - data_filtered['popularity_bonus'] = 0.15 * (np.log(data_filtered['review'] + 1) / np.log(1000)) - data_filtered['composite_score'] += data_filtered['popularity_bonus'] + # 찜/예약 비율에 따른 세분화된 보너스 로직 + if ratio < 1.0: + bonus_multiplier = 0.05 + elif 1.0 <= ratio < 2.0: + bonus_multiplier = 0.1 + else: + bonus_multiplier = 0.15 + + data_filtered['popularity_bonus'] = bonus_multiplier * ( + np.log(data_filtered['review'] + 1) / np.log(1000) + ) + data_filtered['composite_score'] += data_filtered['popularity_bonus'] # 신규 사용자를 위한 추가 처리: 리뷰 수에 약간의 가중치 elif is_new_user: - # 신규 사용자는 인기 있는(리뷰가 많은) 식당에 약간의 가중치 - logger.debug("신규 사용자를 위해 리뷰가 많은 식당에 추가 가중치 부여") - data_filtered['popularity_bonus'] = 0.1 * (np.log(data_filtered['review'] + 1) / np.log(1000)) - data_filtered['composite_score'] += data_filtered['popularity_bonus'] - + data_filtered = enhance_cold_start_recommendations(data_filtered, user_id, user_features) + # 최종 점수 시그모이드 변환 data_filtered['composite_score'] = data_filtered['composite_score'].apply( lambda x: sigmoid_transform(x, A_VALUE, B_VALUE) diff --git a/app/services/model_trainer/recommenation/cold_start.py b/app/services/model_trainer/recommenation/cold_start.py new file mode 100644 index 0000000..eb13afc --- /dev/null +++ b/app/services/model_trainer/recommenation/cold_start.py @@ -0,0 +1,111 @@ +# app/services/model_trainer/recommendation/cold_start.py + +import numpy as np +import pandas as pd +import logging + +logger = logging.getLogger(__name__) + +def enhance_cold_start_recommendations(data_filtered, user_id, user_features_df=None): + """ + 신규 사용자를 위한 추천 로직을 강화하는 함수 + + Args: + data_filtered: 필터링된 식당 데이터 + user_id: 사용자 ID + user_features_df: 사용자 특성 데이터 (옵션) + + Returns: + pd.DataFrame: 향상된 추천 점수가 포함된 데이터프레임 + """ + try: + logger.debug(f"신규 사용자 {user_id}를 위한 강화된 추천 로직 적용") + + # 1. 카테고리 다양성 강화 + # 각 카테고리별 고르게 추천하도록 조정 + category_counts = data_filtered['category_id'].value_counts() + total_restaurants = len(data_filtered) + + # 다양성 점수 계산 (희소 카테고리에 높은 점수) + diversity_score = 1 - (category_counts / total_restaurants) + category_diversity_map = diversity_score.to_dict() + + # 다양성 점수 적용 (기존 category_diversity_bonus 강화) + data_filtered['enhanced_diversity_bonus'] = data_filtered['category_id'].map( + category_diversity_map + ).fillna(0) * 0.15 # 다양성 가중치 증가 + + # 2. 사용자 선호도 분석 (있는 경우) + user_preferred_category = None + if user_features_df is not None: + # 문자열로 변환하여 비교 + user_id_str = str(user_id) + user_features_df['user_id_str'] = user_features_df['user_id'].astype(str) + user_data = user_features_df[user_features_df['user_id_str'] == user_id_str] + + if not user_data.empty: + # 선호 카테고리 정보 추출 + preferred_cols = [col for col in user_data.columns if col.startswith('preferred_category')] + for col in preferred_cols: + if col in user_data.columns and not pd.isna(user_data[col].iloc[0]): + user_preferred_category = user_data[col].iloc[0] + break + + # 선호 카테고리 보너스 적용 + if user_preferred_category is not None: + # 선호 카테고리 식당에 가중치 부여 + data_filtered['preferred_category_bonus'] = 0.0 + data_filtered.loc[data_filtered['category_id'] == user_preferred_category, 'preferred_category_bonus'] = 0.4 + + # 3. 인기도 기반 보너스 (신규 사용자용) + # 리뷰 수 기반 인기도 - 로그 스케일링으로 극단값 완화 + max_review = data_filtered['review'].max() + if max_review > 0: + data_filtered['popularity_bonus'] = ( + np.log1p(data_filtered['review']) / np.log1p(max_review) + ) * 0.2 + else: + data_filtered['popularity_bonus'] = 0 + + # 4. 운영 시간 보너스 + if 'duration_hours' in data_filtered.columns: + # 운영 시간이 긴 식당 가중치 + max_duration = data_filtered['duration_hours'].max() + if max_duration > 0: + data_filtered['duration_bonus'] = ( + data_filtered['duration_hours'] / max_duration + ) * 0.1 + else: + data_filtered['duration_bonus'] = 0 + else: + data_filtered['duration_bonus'] = 0 + + # 5. 편의시설 보너스 + convenience_cols = [col for col in data_filtered.columns if col.startswith('conv_') + and col != 'conv_편의시설 정보 없음'] + + if convenience_cols: + # 편의시설 수 기반 보너스 + data_filtered['convenience_bonus'] = data_filtered[convenience_cols].sum(axis=1) * 0.05 + else: + data_filtered['convenience_bonus'] = 0 + + # 6. 모든 보너스 합산하여 최종 점수에 추가 + cold_start_bonus = ( + data_filtered.get('enhanced_diversity_bonus', 0) + + data_filtered.get('preferred_category_bonus', 0) + + data_filtered.get('popularity_bonus', 0) + + data_filtered.get('duration_bonus', 0) + + data_filtered.get('convenience_bonus', 0) + ) + + # 기존 composite_score에 추가 + data_filtered['cold_start_bonus'] = cold_start_bonus + data_filtered['composite_score'] += cold_start_bonus + + logger.debug(f"신규 사용자 추천 강화 완료: 평균 보너스 점수 {cold_start_bonus.mean():.4f}") + return data_filtered + + except Exception as e: + logger.error(f"신규 사용자 추천 강화 중 오류: {e}", exc_info=True) + return data_filtered \ No newline at end of file diff --git a/app/services/model_trainer/recommenation/diversity.py b/app/services/model_trainer/recommenation/diversity.py new file mode 100644 index 0000000..4b9b369 --- /dev/null +++ b/app/services/model_trainer/recommenation/diversity.py @@ -0,0 +1,36 @@ +# app/services/model_trainer/recommendation/diversity.py + +import pandas as pd +import logging + +logger = logging.getLogger(__name__) + +def calculate_category_diversity_bonus(data_filtered): + """ + 카테고리별 다양성을 고려한 보너스 점수 계산 + + Args: + data_filtered (pd.DataFrame): 식당 데이터 + + Returns: + pd.DataFrame: 다양성 보너스가 추가된 데이터프레임 + """ + try: + # 카테고리별 식당 수 계산 + category_counts = data_filtered['category_id'].value_counts() + total_restaurants = len(data_filtered) + + # 카테고리별 희소성 계산 (희소한 카테고리에 더 높은 보너스) + diversity_bonus = 1 - (category_counts / total_restaurants) + + # 보너스 점수 매핑 + category_bonus_map = diversity_bonus.to_dict() + + # 각 식당의 카테고리에 따라 보너스 점수 할당 + data_filtered['category_diversity_bonus'] = data_filtered['category_id'].map(category_bonus_map).fillna(0) + + return data_filtered + + except Exception as e: + logger.error(f"calculate_category_diversity_bonus 오류: {e}", exc_info=True) + return data_filtered \ No newline at end of file diff --git a/app/services/model_trainer/recommenation/hybrid.py b/app/services/model_trainer/recommenation/hybrid.py new file mode 100644 index 0000000..3f4dd29 --- /dev/null +++ b/app/services/model_trainer/recommenation/hybrid.py @@ -0,0 +1,366 @@ +import numpy as np +import pandas as pd +import logging +from sklearn.metrics.pairwise import cosine_similarity + +logger = logging.getLogger(__name__) + +def build_hybrid_recommender(df_ratings, df_restaurants): + """ + 협업 필터링과 콘텐츠 기반 추천을 결합한 하이브리드 추천 모델 구축 + + Args: + df_ratings: 사용자-식당 평점 데이터 (user_id, restaurant_id, score 컬럼 필요) + df_restaurants: 식당 메타데이터 (restaurant_id, category_id 등 특성 포함) + + Returns: + callable: 하이브리드 추천 함수 + """ + try: + logger.info("하이브리드 추천 모델 구축 시작") + + # 1. 협업 필터링 (사용자-아이템 매트릭스 기반) + logger.debug("협업 필터링 모델 구축 중...") + # 평점 데이터 확인 + if df_ratings.empty or 'score' not in df_ratings.columns: + raise ValueError("평점 데이터가 비어있거나 필수 컬럼이 없습니다") + + # 사용자-아이템 평점 매트릭스 생성 + user_item_matrix = df_ratings.pivot_table( + index='user_id', + columns='restaurant_id', + values='score', + fill_value=0 + ) + + # 협업 필터링 유사도 계산 + # 메모리 관리를 위해 실제 구현 시 이 부분을 최적화할 수 있음 + cf_user_similarity = cosine_similarity(user_item_matrix) + cf_user_similarity = pd.DataFrame( + cf_user_similarity, + index=user_item_matrix.index, + columns=user_item_matrix.index + ) + + # 아이템 유사도 계산 + cf_item_similarity = cosine_similarity(user_item_matrix.T) + cf_item_similarity = pd.DataFrame( + cf_item_similarity, + index=user_item_matrix.columns, + columns=user_item_matrix.columns + ) + + logger.debug(f"협업 필터링 모델 구축 완료: {user_item_matrix.shape[0]}명의 사용자, {user_item_matrix.shape[1]}개의 식당") + + # 2. 콘텐츠 기반 필터링 + logger.debug("콘텐츠 기반 필터링 모델 구축 중...") + # 식당 메타 데이터 준비 + if df_restaurants.empty: + raise ValueError("식당 메타데이터가 비어있습니다") + + # 콘텐츠 기반 필터링을 위한 특성 선택 + content_features = ['category_id'] + + # 편의 시설, 주의사항 등의 특성 추가 + convenience_cols = [col for col in df_restaurants.columns if col.startswith('conv_')] + caution_cols = [col for col in df_restaurants.columns if col.startswith('caution_')] + + content_features.extend(convenience_cols) + content_features.extend(caution_cols) + + # 모든 특성이 존재하는지 확인 + valid_features = [f for f in content_features if f in df_restaurants.columns] + + # 유효한 특성이 없으면 카테고리만 사용 + if not valid_features: + if 'category_id' in df_restaurants.columns: + valid_features = ['category_id'] + else: + raise ValueError("콘텐츠 기반 필터링에 사용할 특성이 없습니다") + + # 중복 제거된 식당 데이터 준비 + restaurant_features = df_restaurants.drop_duplicates('restaurant_id') + restaurant_features = restaurant_features.set_index('restaurant_id') + + # 범주형 변수 원-핫 인코딩 + categorical_features = ['category_id'] + for feature in categorical_features: + if feature in restaurant_features.columns: + # 원-핫 인코딩 + dummies = pd.get_dummies(restaurant_features[feature], prefix=feature) + restaurant_features = pd.concat([ + restaurant_features.drop(feature, axis=1), + dummies + ], axis=1) + + # 콘텐츠 기반 유사도 계산 + # 콘텐츠 특성 선택 + content_cols = [col for col in restaurant_features.columns + if any(col.startswith(f"{feature}_") for feature in categorical_features) + or col in valid_features] + + if not content_cols: + logger.warning("콘텐츠 특성이 없어 기본 특성 사용") + content_cols = restaurant_features.columns[:5] # 첫 5개 컬럼 사용 + + # 유사도 계산 + content_similarity = cosine_similarity(restaurant_features[content_cols]) + content_similarity = pd.DataFrame( + content_similarity, + index=restaurant_features.index, + columns=restaurant_features.index + ) + + logger.debug(f"콘텐츠 기반 모델 구축 완료: {len(restaurant_features)}개 식당, {len(content_cols)}개 특성") + + # 3. 하이브리드 추천 함수 + def hybrid_recommend(user_id, n=15, alpha=0.7): + """ + 하이브리드 방식으로 식당 추천 + + Args: + user_id: 사용자 ID + n: 추천할 식당 수 + alpha: 협업 필터링 가중치 (0~1), 1-alpha는 콘텐츠 기반 가중치 + + Returns: + list: 추천된 식당 ID 리스트 + """ + try: + # 사용자 ID가 문자열이면 정수로 변환 시도 + if isinstance(user_id, str): + try: + user_id = int(user_id) + except ValueError: + pass + + # A. 협업 필터링 점수 계산 + cf_scores = {} + + # 사용자가 평점 매트릭스에 있는 경우 (기존 사용자) + if user_id in cf_user_similarity.index: + # 1. 유사 사용자 기반 추천 + similar_users = cf_user_similarity[user_id].sort_values(ascending=False).index[1:11] # 자신 제외 상위 10명 + + # 2. 유사 사용자들의 평점 가중 평균 계산 + user_ratings = user_item_matrix.loc[user_id] + similar_users_ratings = user_item_matrix.loc[similar_users] + user_similarities = cf_user_similarity[user_id].loc[similar_users] + + # 아직 평가하지 않은 식당만 추천 대상 + unrated_items = user_ratings[user_ratings == 0].index + + for item in unrated_items: + # 유사 사용자들 중 해당 식당을 평가한 사용자들만 사용 + item_ratings = similar_users_ratings[item] + relevant_users = item_ratings[item_ratings > 0].index + + if len(relevant_users) > 0: + # 유사도 가중 평균 계산 + relevant_similarities = user_similarities.loc[relevant_users] + relevant_ratings = item_ratings.loc[relevant_users] + + if relevant_similarities.sum() > 0: + cf_scores[item] = (relevant_similarities * relevant_ratings).sum() / relevant_similarities.sum() + else: + cf_scores[item] = relevant_ratings.mean() + + # 3. 아이템 기반 협업 필터링 추가 + # 사용자가 이미 평가한 식당 + rated_items = user_ratings[user_ratings > 0].index + + for item in unrated_items: + if item not in cf_scores and item in cf_item_similarity.columns: + # 이미 평가한 식당과의 유사성 기반 점수 계산 + item_similarities = cf_item_similarity[item].loc[rated_items] + item_ratings = user_ratings.loc[rated_items] + + if item_similarities.sum() > 0: + cf_scores[item] = (item_similarities * item_ratings).sum() / item_similarities.sum() + else: + # 신규 사용자는 협업 필터링 점수 없음 + logger.debug(f"사용자 {user_id}는 협업 필터링 데이터가 없습니다") + + # B. 콘텐츠 기반 점수 계산 + cb_scores = {} + + # 1. 사용자가 이미 평가한 식당이 있는 경우 + user_data = df_ratings[df_ratings['user_id'] == user_id] + + if not user_data.empty: + # 평점이 높은 순으로 사용자가 평가한 식당 정렬 + user_favorites = user_data.sort_values('score', ascending=False) + top_restaurants = user_favorites.head(5)['restaurant_id'].tolist() + + # 이미 평가한 식당과 유사한 식당 추천 + for rest_id in top_restaurants: + if rest_id in content_similarity.index: + similar_restaurants = content_similarity[rest_id].sort_values(ascending=False) + + for similar_id, similarity in similar_restaurants.items(): + if similar_id != rest_id: # 자기 자신 제외 + if similar_id not in cb_scores: + cb_scores[similar_id] = 0 + + # 평가한 식당의 평점과 유사도를 곱하여 점수 계산 + rest_score = user_data[user_data['restaurant_id'] == rest_id]['score'].iloc[0] + cb_scores[similar_id] += similarity * rest_score + else: + # 2. 사용자 평가 데이터가 없는 경우 (신규 사용자) + # 전체 평균 평점으로 인기 식당 추천 + popular_restaurants = df_ratings.groupby('restaurant_id')['score'].agg(['mean', 'count']) + popular_restaurants = popular_restaurants[popular_restaurants['count'] >= 5] # 최소 5개 이상 평가 + popular_restaurants['popularity'] = popular_restaurants['mean'] * np.log1p(popular_restaurants['count']) + + for rest_id, row in popular_restaurants.sort_values('popularity', ascending=False).head(20).iterrows(): + cb_scores[rest_id] = row['popularity'] + + # C. 하이브리드 점수 계산 + hybrid_scores = {} + + # 모든 식당 ID 수집 + all_restaurant_ids = set(list(cf_scores.keys()) + list(cb_scores.keys())) + + for rest_id in all_restaurant_ids: + # 협업 필터링 점수 (없으면 0) + cf_score = cf_scores.get(rest_id, 0) + + # 콘텐츠 기반 점수 (없으면 0) + cb_score = cb_scores.get(rest_id, 0) + + # 하이브리드 점수 계산 (알파 가중 평균) + if cf_score > 0 and cb_score > 0: + # 둘 다 점수가 있으면 가중 평균 + hybrid_scores[rest_id] = alpha * cf_score + (1 - alpha) * cb_score + elif cf_score > 0: + # 협업 필터링 점수만 있으면 그대로 사용 + hybrid_scores[rest_id] = cf_score + elif cb_score > 0: + # 콘텐츠 기반 점수만 있으면 그대로 사용 + hybrid_scores[rest_id] = cb_score + + # 이미 평가한 식당 제외 + rated_items = df_ratings[df_ratings['user_id'] == user_id]['restaurant_id'].tolist() + for item in rated_items: + if item in hybrid_scores: + del hybrid_scores[item] + + # 점수 기준 상위 n개 식당 추천 + recommended_items = sorted( + hybrid_scores.items(), + key=lambda x: x[1], + reverse=True + )[:n] + + # 식당 ID만 추출 + recommended_ids = [rest_id for rest_id, _ in recommended_items] + + # 추천 결과가 부족하면 인기 식당으로 보충 + if len(recommended_ids) < n: + needed = n - len(recommended_ids) + + # 인기 식당 계산 + popular_rest = df_ratings.groupby('restaurant_id')['score'].mean().sort_values(ascending=False) + + # 이미 추천한 식당과 평가한 식당 제외 + excluded_ids = set(recommended_ids + rated_items) + additional_ids = [ + rest_id for rest_id in popular_rest.index + if rest_id not in excluded_ids + ][:needed] + + recommended_ids.extend(additional_ids) + + logger.debug(f"사용자 {user_id}에게 {len(recommended_ids)}개 식당 하이브리드 추천 생성") + return recommended_ids + + except Exception as e: + logger.error(f"하이브리드 추천 생성 중 오류: {e}", exc_info=True) + + # 오류 발생 시 인기 식당 기반 추천으로 대체 + popular_rest = df_ratings.groupby('restaurant_id')['score'].mean().sort_values(ascending=False) + return popular_rest.head(n).index.tolist() + + logger.info("하이브리드 추천 모델 구축 완료") + return hybrid_recommend + + except Exception as e: + logger.error(f"하이브리드 추천 모델 구축 중 오류: {e}", exc_info=True) + # 오류 발생 시 기본 추천 함수 반환 + def fallback_recommend(user_id, n=15, **kwargs): + # 평점 기준 인기 식당 추천 + popular_rest = df_ratings.groupby('restaurant_id')['score'].mean().sort_values(ascending=False) + return popular_rest.head(n).index.tolist() + + return fallback_recommend + + +def generate_hybrid_recommendations(df_ratings, df_restaurants, user_id, n=15, alpha=0.7): + """ + 하이브리드 추천 모델을 사용하여 추천 생성 + + Args: + df_ratings: 사용자-식당 평점 데이터 + df_restaurants: 식당 메타데이터 + user_id: 추천 대상 사용자 ID + n: 추천할 식당 수 + alpha: 협업 필터링 가중치 (0~1) + + Returns: + dict: 추천 결과 딕셔너리 + """ + try: + # 하이브리드 추천 모델 구축 + hybrid_recommend = build_hybrid_recommender(df_ratings, df_restaurants) + + # 추천 생성 + recommended_items = hybrid_recommend(user_id, n=n, alpha=alpha) + + # 추천 식당 정보 수집 + recommendations = [] + + for i, rest_id in enumerate(recommended_items): + # 식당 정보 추출 + rest_data = df_restaurants[df_restaurants['restaurant_id'] == rest_id] + if rest_data.empty: + continue + + # 카테고리 ID + category_id = int(rest_data['category_id'].iloc[0]) if 'category_id' in rest_data.columns else -1 + + # 점수 정보 + if 'score' in rest_data.columns: + score = float(rest_data['score'].mean()) + else: + # 평점 데이터에서 점수 가져오기 + rest_ratings = df_ratings[df_ratings['restaurant_id'] == rest_id] + score = float(rest_ratings['score'].mean()) if not rest_ratings.empty else 4.0 + + # 추천 식당 정보 + recommendations.append({ + "category_id": category_id, + "restaurant_id": int(rest_id), + "score": score, + "predicted_score": score, + "composite_score": 5.0 - (i * 0.15) # 순위에 따라 점수 부여 (5.0~2.75) + }) + + # 결과 구성 + is_new_user = len(df_ratings[df_ratings['user_id'] == user_id]) == 0 + + result_dict = { + "user": int(user_id) if isinstance(user_id, (int, float)) else user_id, + "is_new_user": is_new_user, + "recommendations": recommendations + } + + return result_dict + + except Exception as e: + logger.error(f"하이브리드 추천 생성 중 오류: {e}", exc_info=True) + # 기본 결과 반환 + return { + "user": int(user_id) if isinstance(user_id, (int, float)) else user_id, + "is_new_user": True, + "recommendations": [] + } \ No newline at end of file diff --git a/app/services/model_trainer/train_model.py b/app/services/model_trainer/train_model.py index 7ac619e..ac3991a 100644 --- a/app/services/model_trainer/train_model.py +++ b/app/services/model_trainer/train_model.py @@ -18,6 +18,100 @@ logger = logging.getLogger(__name__) +def enhance_feature_engineering(df_prepared): + """ + 향상된 특성 엔지니어링 함수 + + Args: + df_prepared: 기본 전처리가 완료된 데이터프레임 + + Returns: + df_prepared: 향상된 특성이 추가된 데이터프레임 + """ + try: + logger.debug("향상된 특성 엔지니어링 시작...") + + # 1. 카테고리별 희소성 계산 + category_counts = df_prepared['category_id'].value_counts() + total_restaurants = len(df_prepared) + category_sparsity = 1 - (category_counts / total_restaurants) + + # 2. 카테고리 다양성 특성 추가 + df_prepared['category_diversity_score'] = df_prepared['category_id'].map( + category_sparsity.to_dict() + ).fillna(0) + + # 3. 카테고리 인기도 측정 + category_avg_rating = df_prepared.groupby('category_id')['score'].mean() + category_avg_reviews = df_prepared.groupby('category_id')['review'].mean() + + df_prepared['category_avg_rating'] = df_prepared['category_id'].map( + category_avg_rating.to_dict() + ).fillna(df_prepared['score'].mean()) + + df_prepared['category_avg_reviews'] = df_prepared['category_id'].map( + category_avg_reviews.to_dict() + ).fillna(df_prepared['review'].mean()) + + # 4. 식당 인기도 점수 + df_prepared['popularity_score'] = ( + df_prepared['score'] * 0.6 + + np.log1p(df_prepared['review']) * 0.4 + ) + + # 5. 리뷰 기반 상호작용 강도 - 가중치 최적화 + df_prepared['interaction_intensity'] = ( + df_prepared['review'] * 0.4 + + df_prepared['duration_hours'] * 0.25 + + np.log1p(df_prepared['review']) * 0.35 + ) + + # 6. 식당 대비 카테고리 성능 (식당이 해당 카테고리 내에서 얼마나 좋은지) + df_prepared['rating_vs_category'] = df_prepared['score'] - df_prepared['category_avg_rating'] + df_prepared['reviews_vs_category'] = df_prepared['review'] / (df_prepared['category_avg_reviews'] + 1) + + # 7. 복합 특성들 + # 복합 평점: 카테고리 다양성과 평점 결합 + df_prepared['composite_rating'] = ( + df_prepared['score'] * 0.7 + + df_prepared['category_diversity_score'] * 0.2 + + df_prepared['rating_vs_category'] * 0.1 + ) + + # 복합 인기도: 리뷰 수와 운영 시간 결합 + df_prepared['engagement_score'] = ( + np.log1p(df_prepared['review']) * 0.7 + + (df_prepared['duration_hours'] / 24) * 0.3 + ) + + # 8. 식당 특성과 카테고리 인기도의 상호작용 + df_prepared['category_quality_interaction'] = ( + df_prepared['score'] * df_prepared['category_avg_rating'] + ) + + # 9. 리뷰 밀도 (시간당 리뷰 수) + df_prepared['review_density'] = df_prepared['review'] / (df_prepared['duration_hours'] + 1) + + # 10. 편의 시설 복합 점수 + convenience_cols = [col for col in df_prepared.columns if col.startswith('conv_')] + if convenience_cols: + df_prepared['convenience_score'] = df_prepared[convenience_cols].sum(axis=1) + + # 11. 리뷰 영향력 비율 + global_avg_rating = df_prepared['score'].mean() + df_prepared['bayesian_rating'] = ( + (df_prepared['review'] * df_prepared['score'] + 10 * global_avg_rating) / + (df_prepared['review'] + 10) + ) + + logger.debug("향상된 특성 엔지니어링 완료") + return df_prepared + + except Exception as e: + logger.error(f"향상된 특성 엔지니어링 중 오류 발생: {e}", exc_info=True) + # 오류가 발생해도 원본 데이터프레임 반환 + return df_prepared + def train_model(df_final): """ 전처리된 DataFrame을 입력받아 모델 학습과 평가, 앙상블 모델 학습까지 수행합니다. @@ -62,24 +156,40 @@ def extract_hours_diff(time_str): df_prepared['duration_hours'] = df_prepared['duration_hours'].apply(extract_hours_diff) - # 이제 숫자 연산 수행 + # 기본 피처 생성 df_prepared['log_review'] = np.log(df_prepared['review'] + 1) df_prepared['review_duration'] = df_prepared['review'] * df_prepared['duration_hours'] - logger.debug("Feature 계산이 완료되었습니다.") + + # 향상된 특성 엔지니어링 적용 + df_prepared = enhance_feature_engineering(df_prepared) + + logger.debug("특성 엔지니어링이 완료되었습니다.") except Exception as e: logger.error(f"train_model - 피처 생성 오류: {e}", exc_info=True) raise e - # 3. (이미 전처리 과정에서 로그 변환, 상호작용 등 피처 엔지니어링이 이루어졌다고 가정) - # 모델 학습에 사용할 피처와 타깃을 설정합니다. + # 이미 전처리 과정에서 로그 변환, 상호작용 등 피처 엔지니어링이 이루어졌다고 가정 + # 3. 모델 학습에 사용할 피처와 타깃을 설정합니다. try: - model_features = ['review', 'duration_hours', 'conv_WIFI', 'conv_주차', 'caution_예약가능', 'log_review', 'review_duration'] + # 추가된 특성들을 모델 피처에 포함 + model_features = [ + 'review', 'duration_hours', 'conv_WIFI', 'conv_주차', + 'caution_예약가능', 'log_review', 'review_duration', + 'category_diversity_score', 'interaction_intensity', + 'composite_rating', 'popularity_score', 'rating_vs_category', + 'reviews_vs_category', 'engagement_score', 'category_quality_interaction', + 'review_density', 'bayesian_rating' + ] + + # 존재하는 피처만 선택 (일부 특성이 생성되지 않을 수 있음) + model_features = [f for f in model_features if f in df_prepared.columns] + target = 'score' X = df_prepared[model_features] y = df_prepared[target] - logger.debug(f"Featuer 및 타깃 설정이 완료되었습니다: {model_features}") + logger.debug(f"Feature 및 타깃 설정이 완료되었습니다: {model_features}") except Exception as e: - logger.error(f"train_model - Featuer 및 타깃 설정 오류: {e}", exc_info=True) + logger.error(f"train_model - Feature 및 타깃 설정 오류: {e}", exc_info=True) raise e # 4. 특성 스케일링 및 데이터 분할 @@ -128,6 +238,4 @@ def extract_hours_diff(time_str): "stacking_reg": stacking_reg, "model_features": model_features, "df_model": df_prepared - } - - + } \ No newline at end of file diff --git a/catboost_info/catboost_training.json b/catboost_info/catboost_training.json index 1d027b8..ea59c2d 100644 --- a/catboost_info/catboost_training.json +++ b/catboost_info/catboost_training.json @@ -1,104 +1,104 @@ { "meta":{"test_sets":[],"test_metrics":[],"learn_metrics":[{"best_value":"Min","name":"RMSE"}],"launch_mode":"Train","parameters":"","iteration_count":100,"learn_sets":["learn"],"name":"experiment"}, "iterations":[ -{"learn":[1.324011774],"iteration":0,"passed_time":0.0003156691663,"remaining_time":0.03125124746}, -{"learn":[1.318770956],"iteration":1,"passed_time":0.0005841296254,"remaining_time":0.02862235164}, -{"learn":[1.314911341],"iteration":2,"passed_time":0.0008645901795,"remaining_time":0.02795508247}, -{"learn":[1.310107525],"iteration":3,"passed_time":0.001136175663,"remaining_time":0.02726821592}, -{"learn":[1.308045214],"iteration":4,"passed_time":0.001408469486,"remaining_time":0.02676092024}, -{"learn":[1.303543505],"iteration":5,"passed_time":0.001724013651,"remaining_time":0.02700954721}, -{"learn":[1.29858226],"iteration":6,"passed_time":0.002018182647,"remaining_time":0.02681299803}, -{"learn":[1.295985705],"iteration":7,"passed_time":0.002328310103,"remaining_time":0.02677556619}, -{"learn":[1.290906783],"iteration":8,"passed_time":0.00261456237,"remaining_time":0.02643613063}, -{"learn":[1.289213614],"iteration":9,"passed_time":0.002927314846,"remaining_time":0.02634583362}, -{"learn":[1.284789474],"iteration":10,"passed_time":0.003275734272,"remaining_time":0.0265036682}, -{"learn":[1.282708152],"iteration":11,"passed_time":0.003557694838,"remaining_time":0.02608976214}, -{"learn":[1.277639874],"iteration":12,"passed_time":0.003853780516,"remaining_time":0.02579068499}, -{"learn":[1.275459893],"iteration":13,"passed_time":0.004104990838,"remaining_time":0.02521637229}, -{"learn":[1.272543606],"iteration":14,"passed_time":0.004427785061,"remaining_time":0.02509078201}, -{"learn":[1.270517239],"iteration":15,"passed_time":0.004726120757,"remaining_time":0.02481213397}, -{"learn":[1.269278443],"iteration":16,"passed_time":0.005064373435,"remaining_time":0.02472605854}, -{"learn":[1.267240011],"iteration":17,"passed_time":0.00537812592,"remaining_time":0.02450035141}, -{"learn":[1.265042127],"iteration":18,"passed_time":0.00578946251,"remaining_time":0.02468139281}, -{"learn":[1.261067572],"iteration":19,"passed_time":0.00612617351,"remaining_time":0.02450469404}, -{"learn":[1.25911975],"iteration":20,"passed_time":0.006357425341,"remaining_time":0.02391602866}, -{"learn":[1.258276958],"iteration":21,"passed_time":0.006695053014,"remaining_time":0.02373700614}, -{"learn":[1.254438859],"iteration":22,"passed_time":0.007047305803,"remaining_time":0.02359315421}, -{"learn":[1.252924764],"iteration":23,"passed_time":0.007409100335,"remaining_time":0.02346215106}, -{"learn":[1.251282056],"iteration":24,"passed_time":0.007788770008,"remaining_time":0.02336631002}, -{"learn":[1.249582714],"iteration":25,"passed_time":0.008070813908,"remaining_time":0.02297077805}, -{"learn":[1.248499677],"iteration":26,"passed_time":0.00841998334,"remaining_time":0.02276514014}, -{"learn":[1.245991818],"iteration":27,"passed_time":0.008713777333,"remaining_time":0.022406856}, -{"learn":[1.244752658],"iteration":28,"passed_time":0.009056113377,"remaining_time":0.02217186378}, -{"learn":[1.243751963],"iteration":29,"passed_time":0.009342157308,"remaining_time":0.02179836705}, -{"learn":[1.241413151],"iteration":30,"passed_time":0.009744243826,"remaining_time":0.02168880077}, -{"learn":[1.240646738],"iteration":31,"passed_time":0.009961495546,"remaining_time":0.02116817803}, -{"learn":[1.23939423],"iteration":32,"passed_time":0.01025116451,"remaining_time":0.02081297036}, -{"learn":[1.237386083],"iteration":33,"passed_time":0.01054266681,"remaining_time":0.02046517676}, -{"learn":[1.236070362],"iteration":34,"passed_time":0.0109619618,"remaining_time":0.02035792906}, -{"learn":[1.234747547],"iteration":35,"passed_time":0.01133700644,"remaining_time":0.02015467811}, -{"learn":[1.234073397],"iteration":36,"passed_time":0.01159509181,"remaining_time":0.01974299417}, -{"learn":[1.233597872],"iteration":37,"passed_time":0.01198376156,"remaining_time":0.01955245307}, -{"learn":[1.233102534],"iteration":38,"passed_time":0.01230684745,"remaining_time":0.01924917165}, -{"learn":[1.232415025],"iteration":39,"passed_time":0.01259622474,"remaining_time":0.01889433711}, -{"learn":[1.231473234],"iteration":40,"passed_time":0.01285226844,"remaining_time":0.01849472775}, -{"learn":[1.229097391],"iteration":41,"passed_time":0.01314943746,"remaining_time":0.01815874696}, -{"learn":[1.228026433],"iteration":42,"passed_time":0.01343098135,"remaining_time":0.017803859}, -{"learn":[1.227075197],"iteration":43,"passed_time":0.01377973411,"remaining_time":0.01753784342}, -{"learn":[1.225780409],"iteration":44,"passed_time":0.01407469478,"remaining_time":0.01720240473}, -{"learn":[1.223620062],"iteration":45,"passed_time":0.01441244746,"remaining_time":0.01691896006}, -{"learn":[1.220786981],"iteration":46,"passed_time":0.01471240817,"remaining_time":0.01659058793}, -{"learn":[1.219726538],"iteration":47,"passed_time":0.01507036933,"remaining_time":0.01632623344}, -{"learn":[1.217873408],"iteration":48,"passed_time":0.01539220522,"remaining_time":0.01602045849}, -{"learn":[1.216068233],"iteration":49,"passed_time":0.01576716652,"remaining_time":0.01576716652}, -{"learn":[1.215757099],"iteration":50,"passed_time":0.01611721096,"remaining_time":0.01548516347}, -{"learn":[1.214778519],"iteration":51,"passed_time":0.01639875485,"remaining_time":0.01513731217}, -{"learn":[1.213082793],"iteration":52,"passed_time":0.01672500744,"remaining_time":0.01483161037}, -{"learn":[1.211568954],"iteration":53,"passed_time":0.01704196828,"remaining_time":0.01451723224}, -{"learn":[1.210382972],"iteration":54,"passed_time":0.01732117882,"remaining_time":0.01417187358}, -{"learn":[1.209873654],"iteration":55,"passed_time":0.01772214033,"remaining_time":0.01392453883}, -{"learn":[1.208082599],"iteration":56,"passed_time":0.01801064262,"remaining_time":0.01358697601}, -{"learn":[1.206636397],"iteration":57,"passed_time":0.01834822862,"remaining_time":0.01328664831}, -{"learn":[1.206520698],"iteration":58,"passed_time":0.01855931363,"remaining_time":0.01289715015}, -{"learn":[1.205728868],"iteration":59,"passed_time":0.01889173293,"remaining_time":0.01259448862}, -{"learn":[1.204050605],"iteration":60,"passed_time":0.01911448469,"remaining_time":0.01222073611}, -{"learn":[1.203257493],"iteration":61,"passed_time":0.01943373722,"remaining_time":0.01191100023}, -{"learn":[1.202396195],"iteration":62,"passed_time":0.01968336419,"remaining_time":0.01156007103}, -{"learn":[1.202033885],"iteration":63,"passed_time":0.01996394975,"remaining_time":0.01122972173}, -{"learn":[1.199952786],"iteration":64,"passed_time":0.02024957701,"remaining_time":0.01090361839}, -{"learn":[1.19783942],"iteration":65,"passed_time":0.02054374601,"remaining_time":0.01058314188}, -{"learn":[1.196319706],"iteration":66,"passed_time":0.02084899842,"remaining_time":0.01026890967}, -{"learn":[1.19468497],"iteration":67,"passed_time":0.02119933453,"remaining_time":0.009976157427}, -{"learn":[1.192566538],"iteration":68,"passed_time":0.02150842031,"remaining_time":0.009663203329}, -{"learn":[1.190202735],"iteration":69,"passed_time":0.02180404765,"remaining_time":0.009344591851}, -{"learn":[1.189682748],"iteration":70,"passed_time":0.02233030182,"remaining_time":0.009120827504}, -{"learn":[1.188012636],"iteration":71,"passed_time":0.02271763822,"remaining_time":0.008834637086}, -{"learn":[1.185914252],"iteration":72,"passed_time":0.0230816411,"remaining_time":0.00853704534}, -{"learn":[1.182600258],"iteration":73,"passed_time":0.02338201848,"remaining_time":0.008215303791}, -{"learn":[1.181108745],"iteration":74,"passed_time":0.02377660494,"remaining_time":0.00792553498}, -{"learn":[1.180217364],"iteration":75,"passed_time":0.02406423222,"remaining_time":0.007599231226}, -{"learn":[1.179438417],"iteration":76,"passed_time":0.02434298442,"remaining_time":0.007271281062}, -{"learn":[1.178125443],"iteration":77,"passed_time":0.02461852827,"remaining_time":0.006943687462}, -{"learn":[1.177448465],"iteration":78,"passed_time":0.02494382252,"remaining_time":0.006630636365}, -{"learn":[1.176569161],"iteration":79,"passed_time":0.0253042837,"remaining_time":0.006326070926}, -{"learn":[1.175583817],"iteration":80,"passed_time":0.02569699515,"remaining_time":0.006027690219}, -{"learn":[1.173624315],"iteration":81,"passed_time":0.02601645601,"remaining_time":0.005710929368}, -{"learn":[1.171706155],"iteration":82,"passed_time":0.02628099977,"remaining_time":0.005382855375}, -{"learn":[1.17146971],"iteration":83,"passed_time":0.02651687664,"remaining_time":0.005050833645}, -{"learn":[1.171243396],"iteration":84,"passed_time":0.02680742061,"remaining_time":0.004730721283}, -{"learn":[1.170032635],"iteration":85,"passed_time":0.0271057563,"remaining_time":0.004412564979}, -{"learn":[1.169602335],"iteration":86,"passed_time":0.02735659162,"remaining_time":0.004087766564}, -{"learn":[1.168072114],"iteration":87,"passed_time":0.02767821917,"remaining_time":0.003774302614}, -{"learn":[1.165599366],"iteration":88,"passed_time":0.02798942997,"remaining_time":0.003459367748}, -{"learn":[1.163380104],"iteration":89,"passed_time":0.02832626597,"remaining_time":0.003147362885}, -{"learn":[1.162831988],"iteration":90,"passed_time":0.02868643548,"remaining_time":0.002837119993}, -{"learn":[1.160561351],"iteration":91,"passed_time":0.02899589627,"remaining_time":0.002521382284}, -{"learn":[1.15833905],"iteration":92,"passed_time":0.02925627333,"remaining_time":0.002202085089}, -{"learn":[1.15798798],"iteration":93,"passed_time":0.02953198385,"remaining_time":0.001885020246}, -{"learn":[1.155140531],"iteration":94,"passed_time":0.02986669483,"remaining_time":0.001571931307}, -{"learn":[1.153470092],"iteration":95,"passed_time":0.03017623895,"remaining_time":0.001257343289}, -{"learn":[1.151745456],"iteration":96,"passed_time":0.0305324501,"remaining_time":0.0009443025805}, -{"learn":[1.149529507],"iteration":97,"passed_time":0.03079882721,"remaining_time":0.0006285474941}, -{"learn":[1.148420976],"iteration":98,"passed_time":0.03103949578,"remaining_time":0.0003135302604}, -{"learn":[1.146656423],"iteration":99,"passed_time":0.03130078952,"remaining_time":0} +{"learn":[1.650496921],"iteration":0,"passed_time":0.000585375,"remaining_time":0.057952125}, +{"learn":[1.495186773],"iteration":1,"passed_time":0.001094375,"remaining_time":0.053624375}, +{"learn":[1.358413987],"iteration":2,"passed_time":0.0017725,"remaining_time":0.05731083333}, +{"learn":[1.233487253],"iteration":3,"passed_time":0.00236275,"remaining_time":0.056706}, +{"learn":[1.124112687],"iteration":4,"passed_time":0.00285175,"remaining_time":0.05418325}, +{"learn":[1.02381614],"iteration":5,"passed_time":0.003362208333,"remaining_time":0.05267459722}, +{"learn":[0.9351898961],"iteration":6,"passed_time":0.0038975,"remaining_time":0.05178107143}, +{"learn":[0.8496860633],"iteration":7,"passed_time":0.0044785,"remaining_time":0.05150275}, +{"learn":[0.7715666554],"iteration":8,"passed_time":0.004971291667,"remaining_time":0.05026528241}, +{"learn":[0.7070868453],"iteration":9,"passed_time":0.005509708333,"remaining_time":0.049587375}, +{"learn":[0.6413852062],"iteration":10,"passed_time":0.006080166667,"remaining_time":0.04919407576}, +{"learn":[0.5828741401],"iteration":11,"passed_time":0.006761208333,"remaining_time":0.04958219444}, +{"learn":[0.5352337907],"iteration":12,"passed_time":0.007279291667,"remaining_time":0.04871525962}, +{"learn":[0.4876699391],"iteration":13,"passed_time":0.007864625,"remaining_time":0.04831126786}, +{"learn":[0.4445052327],"iteration":14,"passed_time":0.008414291667,"remaining_time":0.04768098611}, +{"learn":[0.4055076559],"iteration":15,"passed_time":0.009047541667,"remaining_time":0.04749959375}, +{"learn":[0.3693713888],"iteration":16,"passed_time":0.009585375,"remaining_time":0.04679918382}, +{"learn":[0.3380597355],"iteration":17,"passed_time":0.01011441667,"remaining_time":0.04607678704}, +{"learn":[0.3078712775],"iteration":18,"passed_time":0.01064295833,"remaining_time":0.04537261184}, +{"learn":[0.2800964107],"iteration":19,"passed_time":0.01206854167,"remaining_time":0.04827416667}, +{"learn":[0.2549881315],"iteration":20,"passed_time":0.012693625,"remaining_time":0.04775220833}, +{"learn":[0.2342813964],"iteration":21,"passed_time":0.0132385,"remaining_time":0.0469365}, +{"learn":[0.2161958694],"iteration":22,"passed_time":0.01381395833,"remaining_time":0.04624673007}, +{"learn":[0.2006357872],"iteration":23,"passed_time":0.01427745833,"remaining_time":0.04521195139}, +{"learn":[0.1852029236],"iteration":24,"passed_time":0.014818,"remaining_time":0.044454}, +{"learn":[0.1703962701],"iteration":25,"passed_time":0.01549825,"remaining_time":0.04411040385}, +{"learn":[0.1578703417],"iteration":26,"passed_time":0.01603504167,"remaining_time":0.04335400154}, +{"learn":[0.1476085942],"iteration":27,"passed_time":0.01638329167,"remaining_time":0.04212846429}, +{"learn":[0.1371151507],"iteration":28,"passed_time":0.01691591667,"remaining_time":0.04141483046}, +{"learn":[0.1284097114],"iteration":29,"passed_time":0.01745375,"remaining_time":0.04072541667}, +{"learn":[0.1216206464],"iteration":30,"passed_time":0.01790954167,"remaining_time":0.03986317339}, +{"learn":[0.1144941719],"iteration":31,"passed_time":0.01850208333,"remaining_time":0.03931692708}, +{"learn":[0.1087080675],"iteration":32,"passed_time":0.01908133333,"remaining_time":0.03874088889}, +{"learn":[0.1033776181],"iteration":33,"passed_time":0.01968458333,"remaining_time":0.03821125}, +{"learn":[0.09800300117],"iteration":34,"passed_time":0.020254625,"remaining_time":0.03761573214}, +{"learn":[0.0937472692],"iteration":35,"passed_time":0.020939875,"remaining_time":0.03722644444}, +{"learn":[0.09010022625],"iteration":36,"passed_time":0.02147908333,"remaining_time":0.03657249324}, +{"learn":[0.0867498541],"iteration":37,"passed_time":0.02197175,"remaining_time":0.03584864474}, +{"learn":[0.08358724771],"iteration":38,"passed_time":0.02317225,"remaining_time":0.03624377564}, +{"learn":[0.08079846693],"iteration":39,"passed_time":0.023688,"remaining_time":0.035532}, +{"learn":[0.07863304944],"iteration":40,"passed_time":0.024101875,"remaining_time":0.03468318598}, +{"learn":[0.07578669984],"iteration":41,"passed_time":0.02462454167,"remaining_time":0.03400531944}, +{"learn":[0.07407045015],"iteration":42,"passed_time":0.02519775,"remaining_time":0.0334016686}, +{"learn":[0.07275321707],"iteration":43,"passed_time":0.025731125,"remaining_time":0.03274870455}, +{"learn":[0.07039885397],"iteration":44,"passed_time":0.02626691667,"remaining_time":0.03210400926}, +{"learn":[0.06940318451],"iteration":45,"passed_time":0.02678016667,"remaining_time":0.03143758696}, +{"learn":[0.06769944378],"iteration":46,"passed_time":0.02729791667,"remaining_time":0.03078275709}, +{"learn":[0.06616467555],"iteration":47,"passed_time":0.02784183333,"remaining_time":0.03016198611}, +{"learn":[0.06460918412],"iteration":48,"passed_time":0.02847808333,"remaining_time":0.02964045408}, +{"learn":[0.06349674561],"iteration":49,"passed_time":0.02898320833,"remaining_time":0.02898320833}, +{"learn":[0.06229578978],"iteration":50,"passed_time":0.02960495833,"remaining_time":0.02844397958}, +{"learn":[0.06111169092],"iteration":51,"passed_time":0.0301135,"remaining_time":0.02779707692}, +{"learn":[0.05980226581],"iteration":52,"passed_time":0.030669625,"remaining_time":0.02719759198}, +{"learn":[0.05873204242],"iteration":53,"passed_time":0.03112779167,"remaining_time":0.02651626698}, +{"learn":[0.05759644413],"iteration":54,"passed_time":0.03160404167,"remaining_time":0.02585785227}, +{"learn":[0.05669917182],"iteration":55,"passed_time":0.03217691667,"remaining_time":0.0252818631}, +{"learn":[0.05581603591],"iteration":56,"passed_time":0.03269308333,"remaining_time":0.02466320322}, +{"learn":[0.05476807591],"iteration":57,"passed_time":0.03330170833,"remaining_time":0.02411503017}, +{"learn":[0.05415835748],"iteration":58,"passed_time":0.033879875,"remaining_time":0.02354364195}, +{"learn":[0.05366246959],"iteration":59,"passed_time":0.03442254167,"remaining_time":0.02294836111}, +{"learn":[0.05299984626],"iteration":60,"passed_time":0.03494520833,"remaining_time":0.02234201844}, +{"learn":[0.05238463406],"iteration":61,"passed_time":0.03558795833,"remaining_time":0.02181197446}, +{"learn":[0.05163428672],"iteration":62,"passed_time":0.03619683333,"remaining_time":0.02125845767}, +{"learn":[0.05093795504],"iteration":63,"passed_time":0.03680295833,"remaining_time":0.02070166406}, +{"learn":[0.05033265541],"iteration":64,"passed_time":0.03738604167,"remaining_time":0.02013094551}, +{"learn":[0.0495926567],"iteration":65,"passed_time":0.03796895833,"remaining_time":0.01955976641}, +{"learn":[0.04904267037],"iteration":66,"passed_time":0.038538125,"remaining_time":0.01898146455}, +{"learn":[0.04853541042],"iteration":67,"passed_time":0.039066,"remaining_time":0.018384}, +{"learn":[0.04836143154],"iteration":68,"passed_time":0.039558875,"remaining_time":0.0177728279}, +{"learn":[0.04783057962],"iteration":69,"passed_time":0.04013070833,"remaining_time":0.017198875}, +{"learn":[0.04764459246],"iteration":70,"passed_time":0.04063929167,"remaining_time":0.0165991473}, +{"learn":[0.04730315287],"iteration":71,"passed_time":0.04114604167,"remaining_time":0.01600123843}, +{"learn":[0.04693227517],"iteration":72,"passed_time":0.04172491667,"remaining_time":0.01543250342}, +{"learn":[0.04634447701],"iteration":73,"passed_time":0.04236679167,"remaining_time":0.0148856295}, +{"learn":[0.04565981491],"iteration":74,"passed_time":0.04297770833,"remaining_time":0.01432590278}, +{"learn":[0.04528190729],"iteration":75,"passed_time":0.04346216667,"remaining_time":0.01372489474}, +{"learn":[0.04486598408],"iteration":76,"passed_time":0.04404758333,"remaining_time":0.01315707035}, +{"learn":[0.04442669194],"iteration":77,"passed_time":0.04460308333,"remaining_time":0.01258035684}, +{"learn":[0.04395922486],"iteration":78,"passed_time":0.045198875,"remaining_time":0.01201489082}, +{"learn":[0.04369273201],"iteration":79,"passed_time":0.045768,"remaining_time":0.011442}, +{"learn":[0.04348321459],"iteration":80,"passed_time":0.04633795833,"remaining_time":0.01086939763}, +{"learn":[0.04321093871],"iteration":81,"passed_time":0.04691329167,"remaining_time":0.01029803963}, +{"learn":[0.04295930351],"iteration":82,"passed_time":0.04742016667,"remaining_time":0.009712564257}, +{"learn":[0.04256577847],"iteration":83,"passed_time":0.04794904167,"remaining_time":0.009133150794}, +{"learn":[0.04231569496],"iteration":84,"passed_time":0.04841616667,"remaining_time":0.008544029412}, +{"learn":[0.04202583115],"iteration":85,"passed_time":0.04898829167,"remaining_time":0.007974838178}, +{"learn":[0.04139549667],"iteration":86,"passed_time":0.04944875,"remaining_time":0.007388893678}, +{"learn":[0.04134324962],"iteration":87,"passed_time":0.049964625,"remaining_time":0.006813357955}, +{"learn":[0.04083942929],"iteration":88,"passed_time":0.050576375,"remaining_time":0.00625101264}, +{"learn":[0.04077225011],"iteration":89,"passed_time":0.05111304167,"remaining_time":0.005679226852}, +{"learn":[0.04040815038],"iteration":90,"passed_time":0.051729,"remaining_time":0.005116054945}, +{"learn":[0.04003502673],"iteration":91,"passed_time":0.05231016667,"remaining_time":0.004548710145}, +{"learn":[0.03974815226],"iteration":92,"passed_time":0.05283333333,"remaining_time":0.003976702509}, +{"learn":[0.03955683456],"iteration":93,"passed_time":0.05334316667,"remaining_time":0.003404882979}, +{"learn":[0.0393602484],"iteration":94,"passed_time":0.05451804167,"remaining_time":0.002869370614}, +{"learn":[0.03901075876],"iteration":95,"passed_time":0.05502095833,"remaining_time":0.002292539931}, +{"learn":[0.038886721],"iteration":96,"passed_time":0.05560320833,"remaining_time":0.001719686856}, +{"learn":[0.03857550898],"iteration":97,"passed_time":0.05610129167,"remaining_time":0.00114492432}, +{"learn":[0.03832786438],"iteration":98,"passed_time":0.05657883333,"remaining_time":0.000571503367}, +{"learn":[0.03811578664],"iteration":99,"passed_time":0.057209375,"remaining_time":0} ]} \ No newline at end of file diff --git a/catboost_info/learn/events.out.tfevents b/catboost_info/learn/events.out.tfevents index a0fd12f..9ee1256 100644 Binary files a/catboost_info/learn/events.out.tfevents and b/catboost_info/learn/events.out.tfevents differ diff --git a/catboost_info/learn_error.tsv b/catboost_info/learn_error.tsv index 4735f22..30b8d55 100644 --- a/catboost_info/learn_error.tsv +++ b/catboost_info/learn_error.tsv @@ -1,101 +1,101 @@ iter RMSE -0 1.324011774 -1 1.318770956 -2 1.314911341 -3 1.310107525 -4 1.308045214 -5 1.303543505 -6 1.29858226 -7 1.295985705 -8 1.290906783 -9 1.289213614 -10 1.284789474 -11 1.282708152 -12 1.277639874 -13 1.275459893 -14 1.272543606 -15 1.270517239 -16 1.269278443 -17 1.267240011 -18 1.265042127 -19 1.261067572 -20 1.25911975 -21 1.258276958 -22 1.254438859 -23 1.252924764 -24 1.251282056 -25 1.249582714 -26 1.248499677 -27 1.245991818 -28 1.244752658 -29 1.243751963 -30 1.241413151 -31 1.240646738 -32 1.23939423 -33 1.237386083 -34 1.236070362 -35 1.234747547 -36 1.234073397 -37 1.233597872 -38 1.233102534 -39 1.232415025 -40 1.231473234 -41 1.229097391 -42 1.228026433 -43 1.227075197 -44 1.225780409 -45 1.223620062 -46 1.220786981 -47 1.219726538 -48 1.217873408 -49 1.216068233 -50 1.215757099 -51 1.214778519 -52 1.213082793 -53 1.211568954 -54 1.210382972 -55 1.209873654 -56 1.208082599 -57 1.206636397 -58 1.206520698 -59 1.205728868 -60 1.204050605 -61 1.203257493 -62 1.202396195 -63 1.202033885 -64 1.199952786 -65 1.19783942 -66 1.196319706 -67 1.19468497 -68 1.192566538 -69 1.190202735 -70 1.189682748 -71 1.188012636 -72 1.185914252 -73 1.182600258 -74 1.181108745 -75 1.180217364 -76 1.179438417 -77 1.178125443 -78 1.177448465 -79 1.176569161 -80 1.175583817 -81 1.173624315 -82 1.171706155 -83 1.17146971 -84 1.171243396 -85 1.170032635 -86 1.169602335 -87 1.168072114 -88 1.165599366 -89 1.163380104 -90 1.162831988 -91 1.160561351 -92 1.15833905 -93 1.15798798 -94 1.155140531 -95 1.153470092 -96 1.151745456 -97 1.149529507 -98 1.148420976 -99 1.146656423 +0 1.650496921 +1 1.495186773 +2 1.358413987 +3 1.233487253 +4 1.124112687 +5 1.02381614 +6 0.9351898961 +7 0.8496860633 +8 0.7715666554 +9 0.7070868453 +10 0.6413852062 +11 0.5828741401 +12 0.5352337907 +13 0.4876699391 +14 0.4445052327 +15 0.4055076559 +16 0.3693713888 +17 0.3380597355 +18 0.3078712775 +19 0.2800964107 +20 0.2549881315 +21 0.2342813964 +22 0.2161958694 +23 0.2006357872 +24 0.1852029236 +25 0.1703962701 +26 0.1578703417 +27 0.1476085942 +28 0.1371151507 +29 0.1284097114 +30 0.1216206464 +31 0.1144941719 +32 0.1087080675 +33 0.1033776181 +34 0.09800300117 +35 0.0937472692 +36 0.09010022625 +37 0.0867498541 +38 0.08358724771 +39 0.08079846693 +40 0.07863304944 +41 0.07578669984 +42 0.07407045015 +43 0.07275321707 +44 0.07039885397 +45 0.06940318451 +46 0.06769944378 +47 0.06616467555 +48 0.06460918412 +49 0.06349674561 +50 0.06229578978 +51 0.06111169092 +52 0.05980226581 +53 0.05873204242 +54 0.05759644413 +55 0.05669917182 +56 0.05581603591 +57 0.05476807591 +58 0.05415835748 +59 0.05366246959 +60 0.05299984626 +61 0.05238463406 +62 0.05163428672 +63 0.05093795504 +64 0.05033265541 +65 0.0495926567 +66 0.04904267037 +67 0.04853541042 +68 0.04836143154 +69 0.04783057962 +70 0.04764459246 +71 0.04730315287 +72 0.04693227517 +73 0.04634447701 +74 0.04565981491 +75 0.04528190729 +76 0.04486598408 +77 0.04442669194 +78 0.04395922486 +79 0.04369273201 +80 0.04348321459 +81 0.04321093871 +82 0.04295930351 +83 0.04256577847 +84 0.04231569496 +85 0.04202583115 +86 0.04139549667 +87 0.04134324962 +88 0.04083942929 +89 0.04077225011 +90 0.04040815038 +91 0.04003502673 +92 0.03974815226 +93 0.03955683456 +94 0.0393602484 +95 0.03901075876 +96 0.038886721 +97 0.03857550898 +98 0.03832786438 +99 0.03811578664 diff --git a/catboost_info/time_left.tsv b/catboost_info/time_left.tsv index 10778e5..fe804e3 100644 --- a/catboost_info/time_left.tsv +++ b/catboost_info/time_left.tsv @@ -1,101 +1,101 @@ iter Passed Remaining -0 0 31 -1 0 28 -2 0 27 -3 1 27 -4 1 26 -5 1 27 -6 2 26 -7 2 26 -8 2 26 -9 2 26 -10 3 26 -11 3 26 -12 3 25 -13 4 25 -14 4 25 -15 4 24 -16 5 24 -17 5 24 -18 5 24 -19 6 24 -20 6 23 -21 6 23 -22 7 23 -23 7 23 -24 7 23 -25 8 22 -26 8 22 -27 8 22 -28 9 22 -29 9 21 -30 9 21 -31 9 21 -32 10 20 -33 10 20 -34 10 20 -35 11 20 -36 11 19 -37 11 19 -38 12 19 -39 12 18 -40 12 18 -41 13 18 -42 13 17 -43 13 17 -44 14 17 -45 14 16 -46 14 16 -47 15 16 -48 15 16 -49 15 15 -50 16 15 -51 16 15 -52 16 14 -53 17 14 -54 17 14 -55 17 13 -56 18 13 -57 18 13 -58 18 12 -59 18 12 -60 19 12 -61 19 11 -62 19 11 -63 19 11 -64 20 10 -65 20 10 -66 20 10 -67 21 9 -68 21 9 -69 21 9 -70 22 9 -71 22 8 -72 23 8 -73 23 8 -74 23 7 -75 24 7 -76 24 7 -77 24 6 -78 24 6 -79 25 6 -80 25 6 -81 26 5 -82 26 5 -83 26 5 -84 26 4 -85 27 4 -86 27 4 -87 27 3 -88 27 3 -89 28 3 -90 28 2 -91 28 2 -92 29 2 -93 29 1 -94 29 1 -95 30 1 -96 30 0 -97 30 0 -98 31 0 -99 31 0 +0 0 57 +1 1 53 +2 1 57 +3 2 56 +4 2 54 +5 3 52 +6 3 51 +7 4 51 +8 4 50 +9 5 49 +10 6 49 +11 6 49 +12 7 48 +13 7 48 +14 8 47 +15 9 47 +16 9 46 +17 10 46 +18 10 45 +19 12 48 +20 12 47 +21 13 46 +22 13 46 +23 14 45 +24 14 44 +25 15 44 +26 16 43 +27 16 42 +28 16 41 +29 17 40 +30 17 39 +31 18 39 +32 19 38 +33 19 38 +34 20 37 +35 20 37 +36 21 36 +37 21 35 +38 23 36 +39 23 35 +40 24 34 +41 24 34 +42 25 33 +43 25 32 +44 26 32 +45 26 31 +46 27 30 +47 27 30 +48 28 29 +49 28 28 +50 29 28 +51 30 27 +52 30 27 +53 31 26 +54 31 25 +55 32 25 +56 32 24 +57 33 24 +58 33 23 +59 34 22 +60 34 22 +61 35 21 +62 36 21 +63 36 20 +64 37 20 +65 37 19 +66 38 18 +67 39 18 +68 39 17 +69 40 17 +70 40 16 +71 41 16 +72 41 15 +73 42 14 +74 42 14 +75 43 13 +76 44 13 +77 44 12 +78 45 12 +79 45 11 +80 46 10 +81 46 10 +82 47 9 +83 47 9 +84 48 8 +85 48 7 +86 49 7 +87 49 6 +88 50 6 +89 51 5 +90 51 5 +91 52 4 +92 52 3 +93 53 3 +94 54 2 +95 55 2 +96 55 1 +97 56 1 +98 56 0 +99 57 0 diff --git a/requirements.txt b/requirements.txt index 95b07bc..2bcba3b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,4 +13,5 @@ pymysql catboost==1.2.7 asyncio sshtunnel -pymongo \ No newline at end of file +pymongo +optuna \ No newline at end of file diff --git a/storage/input_json/user/preprocessed_user_features.csv b/storage/input_json/user/preprocessed_user_features.csv index 0771c14..ec4be24 100644 --- a/storage/input_json/user/preprocessed_user_features.csv +++ b/storage/input_json/user/preprocessed_user_features.csv @@ -1,1005 +1,1005 @@ user_id,max_price,min_price,category_1,category_2,category_3,category_4,category_5,category_6,category_7,category_8,category_9,category_10,category_11,category_12,total_reservations,completed_reservations,reservation_completion_rate,total_likes,like_to_reservation_ratio -1,286141,183646,0,1,1,0,0,0,0,0,0,0,1,0,8.0,8,1.0,6,0.75 -2,444065,170838,0,0,0,0,0,1,1,0,0,1,0,0,4.0,4,1.0,6,1.5 -3,395779,79971,0,0,1,1,0,0,1,0,0,0,0,0,2.0,2,1.0,4,2.0 -4,200419,82759,1,0,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,6,0.75 -5,341399,147700,0,0,1,0,0,1,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -6,304138,159625,0,1,1,0,1,0,0,0,0,0,0,0,10.0,10,1.0,0,0.0 -7,338378,151460,1,0,0,0,0,1,0,0,0,0,0,1,2.0,2,1.0,12,6.0 -8,376584,66889,0,0,0,0,0,0,1,1,0,1,0,0,4.0,4,1.0,2,0.5 -9,393014,91158,0,0,0,1,0,0,0,0,1,0,1,0,2.0,2,1.0,4,2.0 -10,267487,170163,0,0,1,1,0,0,0,1,0,0,0,0,,0,0.0,8,5.0 -11,470524,29163,1,0,0,0,1,0,0,0,0,0,1,0,6.0,6,1.0,4,0.67 -12,446869,142652,0,1,0,0,0,0,0,1,0,0,1,0,6.0,6,1.0,4,0.67 -13,351840,185816,0,1,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,6,0.75 -14,332509,38329,0,0,0,0,0,0,1,1,1,0,0,0,2.0,2,1.0,6,3.0 -15,266941,59156,0,1,0,0,0,0,1,0,0,0,0,1,12.0,12,1.0,10,0.83 -16,337197,194701,1,1,0,0,0,0,0,0,1,0,0,0,2.0,2,1.0,8,4.0 -17,481858,85248,0,0,1,1,0,0,0,0,0,1,0,0,2.0,2,1.0,6,3.0 -18,282926,36257,1,0,0,0,1,0,0,0,0,0,1,0,2.0,2,1.0,10,5.0 -19,327603,197901,1,0,0,0,0,0,0,0,1,0,0,1,8.0,8,1.0,4,0.5 -20,292092,166283,0,1,0,0,0,0,0,1,0,1,0,0,10.0,10,1.0,8,0.8 -21,353557,28934,0,0,1,0,0,0,0,1,0,0,0,1,2.0,2,1.0,2,1.0 -22,319083,167451,0,1,1,0,0,0,0,0,0,1,0,0,6.0,6,1.0,8,1.33 -23,295629,72379,0,0,1,0,0,0,0,1,0,0,0,1,2.0,2,1.0,8,4.0 -24,279131,95305,0,0,0,1,0,0,0,0,0,1,0,1,4.0,4,1.0,16,4.0 -25,233947,106565,0,0,1,0,0,1,0,0,0,0,0,1,6.0,6,1.0,4,0.67 -26,288981,180902,1,0,0,0,0,1,0,0,0,1,0,0,2.0,2,1.0,6,3.0 -27,254815,187129,1,1,0,0,0,0,0,0,1,0,0,0,2.0,2,1.0,8,4.0 -28,403490,169312,0,0,0,1,0,0,1,0,0,0,0,1,10.0,10,1.0,26,2.6 -29,403053,151914,0,0,0,0,0,0,1,0,1,0,0,1,4.0,4,1.0,6,1.5 -30,227881,133174,0,0,0,1,0,1,0,0,0,0,1,0,6.0,6,1.0,4,0.67 -31,207906,88090,0,0,1,0,0,0,0,0,1,1,0,0,2.0,2,1.0,10,5.0 -32,270292,97183,0,1,1,0,0,0,0,0,0,1,0,0,4.0,4,1.0,6,1.5 -33,403876,89280,0,0,0,0,0,0,0,1,1,0,1,0,6.0,6,1.0,4,0.67 -34,312422,61746,0,0,1,1,0,0,0,0,0,1,0,0,6.0,6,1.0,8,1.33 -35,369064,109942,0,0,0,0,0,1,0,0,0,1,1,0,6.0,6,1.0,8,1.33 -36,493736,110323,0,1,1,0,0,0,1,0,0,0,0,0,2.0,2,1.0,2,1.0 -37,469280,35055,1,0,0,1,0,0,0,0,0,0,0,1,2.0,2,1.0,4,2.0 -38,484502,61187,1,0,0,0,0,0,0,0,1,1,0,0,4.0,4,1.0,8,2.0 -39,311737,164628,0,0,0,1,0,0,0,0,0,1,1,0,2.0,2,1.0,12,6.0 -40,238578,199747,0,0,0,1,0,0,1,0,1,0,0,0,8.0,8,1.0,0,0.0 -41,492507,124228,0,0,0,1,0,1,1,0,0,0,0,0,14.0,14,1.0,4,0.29 -42,274639,87199,0,1,0,1,0,0,0,0,0,0,0,1,6.0,6,1.0,8,1.33 -43,380147,35046,0,0,0,1,0,0,1,0,0,0,1,0,4.0,4,1.0,8,2.0 -44,422839,141179,0,0,0,0,0,0,0,1,1,0,1,0,4.0,4,1.0,4,1.0 -45,473654,131248,0,0,0,1,0,1,1,0,0,0,0,0,4.0,4,1.0,8,2.0 -46,410500,54614,0,0,0,1,0,1,0,0,1,0,0,0,,0,0.0,8,5.0 -47,406337,96377,0,0,1,0,0,0,0,0,0,0,1,1,2.0,2,1.0,2,1.0 -48,489105,173326,0,0,1,1,0,0,0,0,0,0,0,1,10.0,10,1.0,8,0.8 -49,459629,48247,0,0,1,0,1,0,0,1,0,0,0,0,10.0,10,1.0,4,0.4 -50,364108,174482,0,0,1,1,0,0,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -51,259108,89491,0,1,1,0,0,0,1,0,0,0,0,0,2.0,2,1.0,4,2.0 -52,200179,105429,0,1,0,0,1,0,0,1,0,0,0,0,4.0,4,1.0,2,0.5 -53,236409,108492,0,0,1,0,0,0,0,0,1,1,0,0,4.0,4,1.0,14,3.5 -54,422377,81914,1,0,0,0,0,0,1,0,1,0,0,0,6.0,6,1.0,10,1.67 -55,388247,16187,0,0,1,0,0,0,0,0,0,1,0,1,2.0,2,1.0,8,4.0 -56,309257,67266,0,0,1,0,1,0,0,0,0,0,1,0,2.0,2,1.0,10,5.0 -57,203384,55275,1,0,0,0,0,0,1,1,0,0,0,0,10.0,10,1.0,6,0.6 -58,329780,83254,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,8,5.0 -59,401574,129489,0,0,1,0,0,1,0,1,0,0,0,0,2.0,2,1.0,10,5.0 -60,271891,43058,0,0,0,0,0,1,0,1,0,0,0,1,12.0,12,1.0,16,1.33 -61,256791,187570,1,0,1,0,0,0,0,1,0,0,0,0,10.0,10,1.0,10,1.0 -62,419351,21516,0,0,0,0,0,0,0,0,1,1,1,0,8.0,8,1.0,4,0.5 -63,235396,92464,0,0,1,0,0,0,1,0,0,0,1,0,8.0,8,1.0,8,1.0 -64,246214,150863,0,0,0,1,0,0,0,1,1,0,0,0,6.0,6,1.0,10,1.67 -65,485892,155450,0,0,0,1,0,1,0,0,0,0,1,0,2.0,2,1.0,2,1.0 -66,234185,79818,0,0,0,1,0,0,1,0,0,0,1,0,10.0,10,1.0,8,0.8 -67,472937,181198,0,0,0,0,0,0,1,1,0,1,0,0,6.0,6,1.0,4,0.67 -68,401695,121572,0,1,1,0,0,0,0,1,0,0,0,0,2.0,2,1.0,4,2.0 -69,492640,53616,0,1,0,0,0,0,1,1,0,0,0,0,6.0,6,1.0,2,0.33 -70,375454,119576,0,0,1,0,0,0,0,0,0,1,1,0,10.0,10,1.0,4,0.4 -71,354216,13768,0,1,0,0,0,0,0,1,0,1,0,0,2.0,2,1.0,2,1.0 -72,424256,97033,0,1,0,0,0,0,0,1,1,0,0,0,2.0,2,1.0,10,5.0 -73,345640,58444,0,0,1,0,0,0,1,0,0,0,1,0,10.0,10,1.0,2,0.2 -74,248236,120737,1,0,0,0,0,1,1,0,0,0,0,0,,0,0.0,12,5.0 -75,267707,123521,0,1,0,0,1,0,0,0,1,0,0,0,4.0,4,1.0,2,0.5 -76,470071,172019,0,0,0,0,0,1,1,1,0,0,0,0,2.0,2,1.0,2,1.0 -77,399068,90198,0,0,0,0,0,1,1,0,1,0,0,0,8.0,8,1.0,4,0.5 -78,308313,65073,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,6,1.5 -79,234246,149171,0,0,0,0,1,1,0,0,0,0,1,0,4.0,4,1.0,10,2.5 -80,267787,79665,1,1,0,0,0,0,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -81,393826,22004,0,0,1,1,0,0,0,0,0,1,0,0,4.0,4,1.0,6,1.5 -82,229961,94945,0,0,0,0,0,0,0,1,1,1,0,0,8.0,8,1.0,4,0.5 -83,444105,167915,0,1,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,4,0.5 -84,382730,132551,0,0,0,1,1,0,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -85,205497,46979,1,1,0,1,0,0,0,0,0,0,0,0,,0,0.0,6,5.0 -86,398628,185050,0,1,0,0,1,0,0,1,0,0,0,0,8.0,8,1.0,4,0.5 -87,378633,135979,0,1,0,1,1,0,0,0,0,0,0,0,4.0,4,1.0,2,0.5 -88,470318,148192,0,0,0,1,1,0,0,0,1,0,0,0,6.0,6,1.0,10,1.67 -89,238334,185320,0,0,0,0,0,0,0,1,0,0,1,1,12.0,12,1.0,6,0.5 -90,421438,26856,0,1,0,1,0,1,0,0,0,0,0,0,2.0,2,1.0,4,2.0 -91,486531,50526,0,0,1,0,1,0,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -92,364554,82141,0,0,0,0,0,1,0,0,0,0,1,1,6.0,6,1.0,4,0.67 -93,345162,66823,0,0,0,0,1,1,0,0,0,0,0,1,2.0,2,1.0,12,6.0 -94,401582,85499,1,0,0,0,0,0,0,0,1,1,0,0,6.0,6,1.0,6,1.0 -95,231653,96497,0,0,0,1,1,0,1,0,0,0,0,0,10.0,10,1.0,4,0.4 -96,452322,29299,1,0,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,8,2.0 -97,246133,27202,1,0,0,0,0,0,1,0,1,0,0,0,4.0,4,1.0,2,0.5 -98,330349,129763,0,0,1,0,1,0,0,0,1,0,0,0,8.0,8,1.0,2,0.25 -99,303372,93365,0,0,0,0,1,0,0,1,0,0,0,1,4.0,4,1.0,0,0.0 -100,437106,79353,1,0,0,1,0,0,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -101,223500,140727,0,0,0,0,0,0,1,0,0,1,0,1,2.0,2,1.0,0,0.0 -102,354243,126610,0,0,0,0,0,0,0,0,1,0,1,1,12.0,12,1.0,8,0.67 -103,426530,12059,0,0,1,0,0,0,0,1,0,1,0,0,,0,0.0,6,5.0 -104,332202,132141,0,0,0,1,0,0,0,0,1,0,0,1,,0,0.0,10,5.0 -105,499973,37922,0,0,1,0,1,0,0,1,0,0,0,0,,0,0.0,6,5.0 -106,230682,64249,0,0,0,0,1,1,0,1,0,0,0,0,10.0,10,1.0,4,0.4 -107,467101,65313,0,0,0,0,1,1,0,0,1,0,0,0,8.0,8,1.0,0,0.0 -108,244275,169377,0,0,1,1,0,0,0,0,0,0,1,0,8.0,8,1.0,0,0.0 -109,258548,15342,0,0,0,1,0,1,0,1,0,0,0,0,10.0,10,1.0,12,1.2 -110,355408,97494,0,0,0,0,1,1,0,0,0,0,1,0,6.0,6,1.0,6,1.0 -111,490070,36258,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,8,5.0 -112,263578,115332,0,1,0,0,0,0,1,0,0,1,0,0,6.0,6,1.0,10,1.67 -113,312245,127054,0,0,0,1,0,0,0,0,1,0,0,1,4.0,4,1.0,8,2.0 -114,327001,166239,0,0,1,1,1,0,0,0,0,0,0,0,4.0,4,1.0,2,0.5 -115,285426,113164,0,0,0,0,0,0,1,1,0,0,1,0,8.0,8,1.0,8,1.0 -116,294168,127969,0,1,1,0,0,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 -117,391158,176237,1,0,0,1,0,0,0,0,1,0,0,0,4.0,4,1.0,6,1.5 -118,407527,71949,0,0,0,0,0,1,0,0,1,1,0,0,4.0,4,1.0,0,0.0 -119,357736,67831,0,0,0,0,1,0,0,0,1,0,0,1,8.0,8,1.0,6,0.75 -120,404101,144428,1,0,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -121,219612,41318,0,0,0,1,0,1,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -122,318302,30258,0,0,1,0,0,1,0,0,0,1,0,0,4.0,4,1.0,8,2.0 -123,288533,44469,0,0,0,1,0,0,1,1,0,0,0,0,10.0,10,1.0,10,1.0 -124,207023,75965,1,0,0,1,0,1,0,0,0,0,0,0,2.0,2,1.0,2,1.0 -125,429278,87887,1,1,0,0,0,0,0,0,0,1,0,0,4.0,4,1.0,2,0.5 -126,409992,121431,1,0,1,0,0,0,0,1,0,0,0,0,8.0,8,1.0,2,0.25 -127,451142,137504,0,0,0,1,0,1,0,0,1,0,0,0,10.0,10,1.0,12,1.2 -128,375827,105166,0,1,1,0,0,0,1,0,0,0,0,0,,0,0.0,10,5.0 -129,495343,120288,0,1,1,0,0,1,0,0,0,0,0,0,6.0,6,1.0,6,1.0 -130,328338,20029,0,0,0,0,1,0,0,0,0,1,1,0,6.0,6,1.0,6,1.0 -131,299643,168222,0,0,0,0,0,0,1,0,0,1,1,0,6.0,6,1.0,4,0.67 -132,366391,27774,0,0,1,0,0,0,1,0,0,0,0,1,8.0,8,1.0,8,1.0 -133,433053,53942,0,0,1,0,0,0,1,0,0,0,0,1,2.0,2,1.0,10,5.0 -134,275331,94983,0,0,0,0,0,0,1,0,1,0,0,1,2.0,2,1.0,8,4.0 -135,379968,11980,0,0,0,0,1,0,0,0,0,1,1,0,4.0,4,1.0,6,1.5 -136,349473,160707,0,0,0,0,1,0,1,0,0,0,1,0,4.0,4,1.0,8,2.0 -137,464334,84683,0,0,0,0,0,0,1,0,0,1,0,1,6.0,6,1.0,10,1.67 -138,359174,18420,0,0,0,0,1,1,0,0,0,0,1,0,2.0,2,1.0,2,1.0 -139,479372,50259,0,0,1,0,0,0,0,0,1,1,0,0,4.0,4,1.0,4,1.0 -140,276422,164299,0,0,0,0,1,0,0,0,0,0,1,1,8.0,8,1.0,10,1.25 -141,268704,176375,1,0,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,12,1.5 -142,321766,196059,1,0,0,0,0,1,1,0,0,0,0,0,6.0,6,1.0,6,1.0 -143,244480,182013,0,0,1,0,1,0,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -144,491347,45546,0,0,1,1,0,0,0,0,0,1,0,0,4.0,4,1.0,6,1.5 -145,479164,31723,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,6,1.5 -146,450496,180807,0,0,0,0,0,1,0,0,0,1,1,0,4.0,4,1.0,2,0.5 -147,473203,178811,0,0,0,0,1,0,0,1,0,1,0,0,8.0,8,1.0,12,1.5 -148,379234,139834,0,1,1,0,0,0,0,0,0,0,0,1,2.0,2,1.0,2,1.0 -149,463183,100745,1,0,0,0,0,1,0,0,0,0,1,0,6.0,6,1.0,10,1.67 -150,268605,39040,0,1,0,0,1,0,0,0,0,0,0,1,2.0,2,1.0,4,2.0 -151,201272,29406,0,1,0,0,1,1,0,0,0,0,0,0,2.0,2,1.0,6,3.0 -152,260646,123073,0,0,0,1,0,1,0,0,0,0,1,0,8.0,8,1.0,6,0.75 -153,432589,168024,0,0,0,1,1,0,0,0,0,0,0,1,6.0,6,1.0,8,1.33 -154,441903,167510,1,0,0,0,0,0,0,0,1,0,1,0,2.0,2,1.0,12,6.0 -155,407162,40023,1,0,0,0,0,0,0,0,0,0,1,1,8.0,8,1.0,10,1.25 -156,277219,159091,0,0,1,0,1,0,1,0,0,0,0,0,2.0,2,1.0,14,7.0 -157,434645,48399,0,0,0,0,0,0,0,1,0,1,1,0,8.0,8,1.0,6,0.75 -158,330608,144690,0,0,1,0,1,0,0,0,0,1,0,0,2.0,2,1.0,2,1.0 -159,301470,90552,0,0,0,0,0,0,0,1,1,0,1,0,2.0,2,1.0,4,2.0 -160,231090,32498,0,1,0,0,1,0,0,0,1,0,0,0,6.0,6,1.0,10,1.67 -161,230042,159215,0,0,1,1,0,1,0,0,0,0,0,0,10.0,10,1.0,10,1.0 -162,335301,149189,0,0,0,1,0,1,0,0,0,0,1,0,2.0,2,1.0,4,2.0 -163,268901,85454,1,0,1,0,0,0,0,0,0,1,0,0,6.0,6,1.0,12,2.0 -164,431844,31263,0,0,1,0,1,0,1,0,0,0,0,0,,0,0.0,6,5.0 -165,305315,66125,0,0,0,0,1,0,1,1,0,0,0,0,8.0,8,1.0,8,1.0 -166,461726,158451,1,0,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,2,0.25 -167,291481,19032,1,0,1,0,0,0,0,0,0,1,0,0,6.0,6,1.0,0,0.0 -168,345816,78571,1,1,0,1,0,0,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -169,309393,198909,1,0,0,1,0,0,0,0,1,0,0,0,,0,0.0,8,5.0 -170,260505,94505,0,1,0,0,0,0,0,0,1,1,0,0,2.0,2,1.0,2,1.0 -171,278640,74242,1,0,0,0,1,0,1,0,0,0,0,0,2.0,2,1.0,4,2.0 -172,448128,59574,1,0,0,0,0,0,0,1,0,0,1,0,8.0,8,1.0,16,2.0 -173,431000,146506,0,0,0,1,0,0,1,1,0,0,0,0,4.0,4,1.0,2,0.5 -174,364180,61310,1,0,0,0,0,0,1,0,0,1,0,0,6.0,6,1.0,8,1.33 -175,356124,41448,0,0,1,0,0,1,0,0,0,1,0,0,2.0,2,1.0,6,3.0 -176,468564,162622,0,0,0,1,1,0,0,1,0,0,0,0,2.0,2,1.0,6,3.0 -177,220994,107493,1,0,1,0,0,0,0,1,0,0,0,0,12.0,12,1.0,6,0.5 -178,290759,196399,0,1,0,0,0,0,0,0,1,1,0,0,,0,0.0,6,5.0 -179,400526,193715,1,0,1,0,1,0,0,0,0,0,0,0,2.0,2,1.0,16,8.0 -180,413880,79311,0,1,0,1,1,0,0,0,0,0,0,0,6.0,6,1.0,2,0.33 -181,321989,11008,1,0,0,0,0,0,1,0,0,0,1,0,10.0,10,1.0,4,0.4 -182,345544,170913,0,0,0,1,1,0,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -183,221962,183937,1,1,0,0,0,0,0,0,1,0,0,0,6.0,6,1.0,6,1.0 -184,311028,161327,1,0,0,0,0,1,0,1,0,0,0,0,10.0,10,1.0,10,1.0 -185,248252,183519,1,0,1,0,0,0,0,0,0,0,1,0,2.0,2,1.0,2,1.0 -186,451106,122923,0,0,0,0,0,0,1,0,1,0,1,0,2.0,2,1.0,10,5.0 -187,405380,147113,1,0,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,2,0.17 -188,203839,28523,0,1,0,0,0,0,0,0,1,0,0,1,6.0,6,1.0,4,0.67 -189,333806,76186,0,0,1,0,0,1,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -190,432627,92873,0,0,0,0,1,0,0,0,1,0,0,1,6.0,6,1.0,18,3.0 -191,215347,95798,1,0,0,1,0,0,0,0,0,0,1,0,4.0,4,1.0,6,1.5 -192,404671,118625,0,0,0,1,0,1,0,0,0,1,0,0,2.0,2,1.0,12,6.0 -193,363349,82141,1,0,0,0,1,0,1,0,0,0,0,0,6.0,6,1.0,2,0.33 -194,278162,176609,0,1,1,0,0,1,0,0,0,0,0,0,2.0,2,1.0,10,5.0 -195,384349,89445,0,0,1,0,0,0,0,1,0,0,1,0,6.0,6,1.0,0,0.0 -196,422863,69355,0,0,0,0,0,0,1,0,1,0,1,0,4.0,4,1.0,12,3.0 -197,203902,149506,1,0,0,0,0,1,0,0,0,1,0,0,2.0,2,1.0,14,7.0 -198,474910,51413,0,0,1,0,0,0,1,0,1,0,0,0,,0,0.0,12,5.0 -199,476345,79957,1,0,0,0,0,0,0,0,1,0,1,0,6.0,6,1.0,12,2.0 -200,220967,15234,0,0,0,0,1,1,0,0,0,0,1,0,4.0,4,1.0,12,3.0 -201,355305,72795,0,1,0,0,0,0,1,0,1,0,0,0,6.0,6,1.0,6,1.0 -202,374021,44532,0,1,0,1,0,0,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -203,279504,51617,0,0,0,1,0,1,0,0,1,0,0,0,6.0,6,1.0,12,2.0 -204,443296,160288,0,1,1,0,1,0,0,0,0,0,0,0,2.0,2,1.0,4,2.0 -205,375556,180085,0,1,0,0,0,0,1,0,0,1,0,0,4.0,4,1.0,6,1.5 -206,251345,176851,0,0,0,1,0,0,1,0,0,0,0,1,4.0,4,1.0,6,1.5 -207,494189,172931,0,1,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,8,1.0 -208,380802,94459,0,0,0,1,1,0,0,0,0,0,0,1,6.0,6,1.0,8,1.33 -209,237485,15499,1,1,0,0,0,0,0,0,0,0,1,0,2.0,2,1.0,4,2.0 -210,389579,74857,0,0,0,1,0,0,0,0,1,0,0,1,4.0,4,1.0,4,1.0 -211,360769,92737,0,0,0,1,0,0,1,0,0,0,0,1,2.0,2,1.0,6,3.0 -212,433681,66872,0,1,1,0,0,0,0,0,0,0,0,1,10.0,10,1.0,2,0.2 -213,244503,153773,0,0,1,1,0,0,0,0,0,0,0,1,,0,0.0,4,5.0 -214,282906,138552,0,1,0,0,0,1,0,0,0,1,0,0,6.0,6,1.0,6,1.0 -215,309651,27015,0,0,1,1,1,0,0,0,0,0,0,0,4.0,4,1.0,2,0.5 -216,235826,35438,0,0,0,0,0,1,0,1,0,0,0,1,10.0,10,1.0,8,0.8 -217,348160,50165,0,1,0,1,0,0,0,0,0,0,1,0,4.0,4,1.0,10,2.5 -218,252461,37506,1,0,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,10,1.25 -219,425014,120768,0,1,0,0,1,0,1,0,0,0,0,0,12.0,12,1.0,4,0.33 -220,484657,54360,0,0,0,0,1,0,1,0,0,0,1,0,6.0,6,1.0,4,0.67 -221,357157,115437,0,0,0,0,0,0,1,1,1,0,0,0,12.0,12,1.0,2,0.17 -222,409313,88353,0,1,0,0,1,0,0,0,0,1,0,0,4.0,4,1.0,4,1.0 -223,295721,179055,1,0,0,0,0,0,0,0,0,1,1,0,10.0,10,1.0,8,0.8 -224,211633,26505,1,0,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,8,1.0 -225,301350,99891,0,0,0,0,1,0,0,0,1,1,0,0,6.0,6,1.0,8,1.33 -226,262495,70481,0,0,0,1,0,0,0,1,0,0,0,1,10.0,10,1.0,6,0.6 -227,210992,35595,0,0,0,0,0,1,0,0,1,0,1,0,4.0,4,1.0,4,1.0 -228,266644,79859,0,1,1,0,0,0,1,0,0,0,0,0,8.0,8,1.0,8,1.0 -229,279378,93433,1,0,1,0,0,1,0,0,0,0,0,0,2.0,2,1.0,10,5.0 -230,358083,49661,1,0,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,2,0.5 -231,417867,196091,0,1,0,0,0,0,1,0,0,1,0,0,6.0,6,1.0,8,1.33 -232,307433,54841,1,0,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,6,0.75 -233,266947,171934,0,0,0,0,1,0,1,0,0,0,0,1,2.0,2,1.0,12,6.0 -234,444798,121560,0,1,0,0,0,0,0,0,1,0,0,1,2.0,2,1.0,2,1.0 -235,457960,22034,0,0,0,1,0,0,0,0,1,0,1,0,6.0,6,1.0,2,0.33 -236,213562,185489,0,1,0,0,0,1,0,0,0,1,0,0,2.0,2,1.0,4,2.0 -237,292293,188625,1,0,0,0,0,0,1,1,0,0,0,0,4.0,4,1.0,2,0.5 -238,425475,63185,1,0,0,0,0,1,0,0,1,0,0,0,2.0,2,1.0,6,3.0 -239,299594,160250,0,0,0,0,0,0,1,0,1,1,0,0,6.0,6,1.0,2,0.33 -240,499985,97669,0,1,0,0,1,0,1,0,0,0,0,0,6.0,6,1.0,6,1.0 -241,467802,71172,0,0,0,0,1,0,0,0,1,1,0,0,4.0,4,1.0,2,0.5 -242,294060,122809,1,0,1,0,0,0,1,0,0,0,0,0,10.0,10,1.0,2,0.2 -243,295196,97857,0,0,0,0,1,1,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -244,341689,162736,0,0,0,1,0,1,0,0,1,0,0,0,6.0,6,1.0,6,1.0 -245,446327,42277,0,0,0,1,0,0,1,1,0,0,0,0,4.0,4,1.0,4,1.0 -246,362016,85719,0,0,0,1,0,0,0,0,0,1,0,1,4.0,4,1.0,8,2.0 -247,292418,17470,0,1,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,8,1.0 -248,402010,28515,0,1,0,1,0,0,0,0,0,0,0,1,,0,0.0,8,5.0 -249,297665,147497,0,1,0,0,0,0,1,0,0,0,0,1,6.0,6,1.0,0,0.0 -250,344575,169672,0,1,0,0,0,1,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -251,494766,34199,0,0,0,1,0,0,0,1,0,0,0,1,8.0,8,1.0,4,0.5 -252,455465,99429,0,0,0,1,0,1,0,0,0,1,0,0,8.0,8,1.0,2,0.25 -253,468129,19544,0,0,0,0,0,0,1,0,1,0,1,0,2.0,2,1.0,2,1.0 -254,285542,168275,0,0,1,0,0,0,1,0,1,0,0,0,6.0,6,1.0,4,0.67 -255,401087,92771,0,0,0,0,0,0,1,0,0,1,0,1,6.0,6,1.0,8,1.33 -256,300457,182506,0,0,0,1,0,0,0,1,0,0,1,0,,0,0.0,2,5.0 -257,225116,127431,1,0,0,0,1,0,0,1,0,0,0,0,6.0,6,1.0,8,1.33 -258,341865,89645,0,1,0,1,0,1,0,0,0,0,0,0,,0,0.0,6,5.0 -259,329782,138901,0,0,0,0,0,0,0,0,1,0,1,1,,0,0.0,8,5.0 -260,260576,199473,0,0,0,0,0,1,1,0,0,0,1,0,6.0,6,1.0,6,1.0 -261,455435,17795,1,0,0,1,0,0,0,0,0,1,0,0,2.0,2,1.0,4,2.0 -262,332828,107041,1,0,0,0,0,0,0,0,0,1,0,1,8.0,8,1.0,4,0.5 -263,312057,35945,0,0,0,0,0,1,1,0,1,0,0,0,4.0,4,1.0,4,1.0 -264,470692,92096,1,1,0,0,0,0,0,1,0,0,0,0,8.0,8,1.0,6,0.75 -265,230964,80257,0,1,0,0,0,1,0,0,0,1,0,0,10.0,10,1.0,6,0.6 -266,423501,78904,0,0,0,1,1,0,0,0,0,1,0,0,10.0,10,1.0,4,0.4 -267,373241,33338,0,0,1,0,1,0,0,1,0,0,0,0,6.0,6,1.0,2,0.33 -268,476092,101934,1,0,1,0,0,0,0,0,0,0,0,1,6.0,6,1.0,2,0.33 -269,494248,186835,0,0,1,0,0,0,0,1,0,1,0,0,6.0,6,1.0,6,1.0 -270,426884,194172,0,0,0,0,0,0,0,1,0,1,0,1,,0,0.0,2,5.0 -271,202738,87671,0,0,0,0,0,0,1,0,1,0,1,0,4.0,4,1.0,6,1.5 -272,393215,36585,1,0,0,0,0,0,0,1,0,0,1,0,8.0,8,1.0,6,0.75 -273,227536,64387,0,1,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,0,0.0 -274,408448,106251,1,0,0,1,0,0,0,0,1,0,0,0,4.0,4,1.0,0,0.0 -275,397164,56350,0,0,0,1,0,0,0,0,1,1,0,0,,0,0.0,8,5.0 -276,401375,190889,0,1,0,0,0,0,0,1,1,0,0,0,6.0,6,1.0,4,0.67 -277,243472,52240,0,0,0,0,0,0,1,0,0,1,1,0,8.0,8,1.0,8,1.0 -278,390850,94391,0,0,0,0,0,1,0,1,0,0,0,1,2.0,2,1.0,8,4.0 -279,486146,63673,0,0,0,1,0,1,0,0,0,0,1,0,,0,0.0,2,5.0 -280,316122,137715,0,0,1,0,0,0,0,0,0,1,1,0,6.0,6,1.0,6,1.0 -281,417758,81087,1,1,0,0,1,0,0,0,0,0,0,0,6.0,6,1.0,6,1.0 -282,200066,52917,0,0,0,0,0,0,0,1,0,1,1,0,10.0,10,1.0,8,0.8 -283,466157,77291,0,0,1,0,0,0,0,0,1,0,0,1,2.0,2,1.0,6,3.0 -284,311745,117818,0,0,0,0,0,1,0,1,0,0,0,1,6.0,6,1.0,2,0.33 -285,388810,106468,0,1,0,0,0,0,1,0,0,0,1,0,2.0,2,1.0,6,3.0 -286,295784,96831,0,1,0,0,0,0,1,0,0,0,0,1,2.0,2,1.0,4,2.0 -287,239957,80359,0,0,0,1,0,0,1,0,1,0,0,0,6.0,6,1.0,4,0.67 -288,259571,96779,0,0,0,1,0,0,1,0,0,0,1,0,,0,0.0,8,5.0 -289,470592,57685,1,1,0,0,0,0,0,1,0,0,0,0,4.0,4,1.0,0,0.0 -290,360940,92218,0,1,0,0,0,1,0,1,0,0,0,0,4.0,4,1.0,16,4.0 -291,459336,78436,1,0,0,0,0,0,0,0,0,1,1,0,4.0,4,1.0,4,1.0 -292,296446,82822,1,0,0,0,1,1,0,0,0,0,0,0,6.0,6,1.0,0,0.0 -293,285626,67263,0,1,0,0,1,0,0,0,0,0,1,0,,0,0.0,4,5.0 -294,464099,170208,0,0,0,1,0,0,0,1,0,1,0,0,4.0,4,1.0,6,1.5 -295,263105,164140,1,0,0,0,1,0,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -296,302474,167473,0,1,0,0,0,0,0,0,0,0,1,1,2.0,2,1.0,4,2.0 -297,372056,174677,0,1,1,0,0,0,0,0,0,0,0,1,6.0,6,1.0,4,0.67 -298,352449,52010,0,0,0,1,0,1,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -299,419186,171727,0,0,0,0,1,1,0,0,0,0,1,0,10.0,10,1.0,2,0.2 -300,336617,82806,1,1,0,0,0,0,0,0,1,0,0,0,4.0,4,1.0,10,2.5 -301,400878,153139,0,0,1,0,1,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 -302,376532,58763,0,0,0,1,0,0,0,0,0,0,1,1,4.0,4,1.0,12,3.0 -303,215966,114816,1,0,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,4,1.0 -304,298739,78236,0,0,0,0,0,1,0,1,0,0,1,0,2.0,2,1.0,10,5.0 -305,324152,168504,0,1,0,0,1,0,0,0,1,0,0,0,6.0,6,1.0,4,0.67 -306,225832,114959,0,0,0,0,1,0,0,0,0,0,1,1,2.0,2,1.0,8,4.0 -307,218247,97153,0,0,1,1,0,0,0,0,0,0,1,0,2.0,2,1.0,4,2.0 -308,442559,186433,0,0,1,1,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 -309,400541,29388,0,0,0,0,1,1,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -310,322029,114911,1,1,0,0,0,0,0,0,0,0,0,1,4.0,4,1.0,0,0.0 -311,233292,162186,0,1,0,0,0,0,0,0,0,1,0,1,2.0,2,1.0,2,1.0 -312,313899,35635,0,1,0,0,0,1,1,0,0,0,0,0,10.0,10,1.0,6,0.6 -313,403970,11492,1,0,0,0,0,0,0,1,0,0,1,0,6.0,6,1.0,0,0.0 -314,292531,147251,0,1,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,16,2.0 -315,499030,65865,0,1,0,0,1,0,0,0,0,0,1,0,2.0,2,1.0,8,4.0 -316,345372,25792,0,0,0,0,1,0,0,0,0,0,1,1,8.0,8,1.0,6,0.75 -317,279212,121179,0,1,1,1,0,0,0,0,0,0,0,0,8.0,8,1.0,8,1.0 -318,434460,169288,0,0,0,0,0,0,0,0,1,1,0,1,4.0,4,1.0,2,0.5 -319,461196,108275,1,0,0,0,0,0,1,0,0,0,1,0,,0,0.0,6,5.0 -320,214434,163733,1,0,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,2,0.25 -321,380690,119490,0,0,0,1,0,1,1,0,0,0,0,0,2.0,2,1.0,10,5.0 -322,207430,146046,1,0,1,0,0,0,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -323,291430,197852,0,1,0,0,0,0,1,0,0,0,1,0,2.0,2,1.0,0,0.0 -324,281509,145993,0,0,0,0,0,0,1,0,0,1,1,0,4.0,4,1.0,10,2.5 -325,450296,105489,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,4,0.5 -326,304679,130659,1,0,0,1,0,0,0,1,0,0,0,0,10.0,10,1.0,8,0.8 -327,383569,138762,0,1,0,0,0,0,0,0,1,0,1,0,4.0,4,1.0,0,0.0 -328,353518,60651,0,0,1,0,1,0,0,0,0,1,0,0,6.0,6,1.0,10,1.67 -329,274717,155240,0,0,0,1,0,0,0,1,0,0,1,0,,0,0.0,8,5.0 -330,384499,155236,0,0,0,0,0,0,0,1,1,0,0,1,6.0,6,1.0,4,0.67 -331,465116,153987,0,0,0,0,0,0,0,0,1,1,1,0,4.0,4,1.0,4,1.0 -332,422611,142253,0,0,0,1,0,0,0,1,0,0,1,0,2.0,2,1.0,6,3.0 -333,273142,46673,1,0,1,0,0,0,1,0,0,0,0,0,6.0,6,1.0,6,1.0 -334,276436,53260,0,1,0,1,0,0,0,0,0,1,0,0,10.0,10,1.0,14,1.4 -335,202608,38139,1,0,1,0,1,0,0,0,0,0,0,0,6.0,6,1.0,8,1.33 -336,243929,179707,0,0,0,0,0,0,1,1,1,0,0,0,2.0,2,1.0,6,3.0 -337,258763,141289,1,0,1,0,0,0,0,0,0,0,0,1,4.0,4,1.0,2,0.5 -338,322834,58273,0,1,0,0,0,0,1,0,0,1,0,0,8.0,8,1.0,8,1.0 -339,211616,151350,0,1,0,0,0,0,1,0,0,1,0,0,8.0,8,1.0,4,0.5 -340,417342,43433,0,0,1,0,0,0,0,1,0,1,0,0,2.0,2,1.0,10,5.0 -341,333428,53017,0,0,0,0,0,1,0,0,0,1,0,1,6.0,6,1.0,12,2.0 -342,453483,112882,0,0,0,1,1,0,0,0,0,0,0,1,6.0,6,1.0,8,1.33 -343,266412,108033,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,6,5.0 -344,408051,140955,0,0,0,0,1,0,1,0,0,0,1,0,6.0,6,1.0,2,0.33 -345,456345,94717,0,0,0,0,0,0,1,0,0,0,1,1,10.0,10,1.0,14,1.4 -346,457822,28648,1,0,1,0,0,1,0,0,0,0,0,0,6.0,6,1.0,10,1.67 -347,255709,92725,0,0,0,0,0,0,1,0,1,0,1,0,4.0,4,1.0,4,1.0 -348,451104,192438,1,0,0,0,0,1,0,1,0,0,0,0,4.0,4,1.0,10,2.5 -349,420645,79899,1,0,0,0,0,0,1,0,0,0,0,1,10.0,10,1.0,6,0.6 -350,392445,125863,0,1,0,0,0,0,0,1,0,0,1,0,6.0,6,1.0,6,1.0 -351,207515,78961,0,1,0,0,1,0,0,0,1,0,0,0,4.0,4,1.0,4,1.0 -352,317523,53884,0,1,0,0,0,0,1,0,0,0,1,0,2.0,2,1.0,4,2.0 -353,444435,197545,0,0,0,1,0,0,0,1,1,0,0,0,14.0,14,1.0,6,0.43 -354,316633,42407,0,0,0,0,0,0,1,0,1,0,1,0,14.0,14,1.0,2,0.14 -355,258931,173204,0,0,0,0,1,0,0,1,0,0,1,0,,0,0.0,2,5.0 -356,372692,190010,0,1,0,1,0,0,0,1,0,0,0,0,2.0,2,1.0,4,2.0 -357,343398,37837,0,0,1,1,0,0,0,0,0,0,1,0,2.0,2,1.0,6,3.0 -358,281317,59623,1,0,0,1,0,0,0,1,0,0,0,0,6.0,6,1.0,6,1.0 -359,332501,26790,0,0,0,1,0,1,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -360,384262,158042,1,0,0,0,0,0,0,1,0,1,0,0,6.0,6,1.0,6,1.0 -361,428215,93593,1,0,1,0,0,0,0,0,1,0,0,0,6.0,6,1.0,14,2.33 -362,249481,163969,1,0,0,1,1,0,0,0,0,0,0,0,6.0,6,1.0,10,1.67 -363,414483,50161,0,1,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,4,0.5 -364,464670,118855,1,0,0,0,0,0,0,0,0,0,1,1,4.0,4,1.0,6,1.5 -365,474311,172110,1,0,0,0,0,0,1,0,0,0,0,1,2.0,2,1.0,0,0.0 -366,348959,114042,0,0,0,0,0,0,0,0,1,1,1,0,6.0,6,1.0,8,1.33 -367,234548,101587,0,0,0,0,0,0,0,1,0,1,0,1,4.0,4,1.0,10,2.5 -368,219163,174151,1,0,0,0,1,0,1,0,0,0,0,0,4.0,4,1.0,0,0.0 -369,302195,65234,0,1,1,1,0,0,0,0,0,0,0,0,2.0,2,1.0,10,5.0 -370,422472,147660,0,0,0,0,0,1,0,0,1,0,0,1,8.0,8,1.0,8,1.0 -371,227784,146399,0,0,0,0,1,1,0,1,0,0,0,0,2.0,2,1.0,6,3.0 -372,498482,171040,0,0,0,1,0,1,1,0,0,0,0,0,8.0,8,1.0,4,0.5 -373,373709,25938,0,0,0,1,0,0,0,1,1,0,0,0,8.0,8,1.0,6,0.75 -374,470459,35826,1,0,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,12,1.5 -375,354110,120657,0,0,0,0,0,0,1,1,0,0,0,1,4.0,4,1.0,10,2.5 -376,206024,117159,0,0,0,1,0,0,1,0,1,0,0,0,10.0,10,1.0,0,0.0 -377,358946,83301,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,4,1.0 -378,308803,59281,1,1,0,0,0,0,0,0,1,0,0,0,,0,0.0,6,5.0 -379,474517,31884,0,0,0,0,0,0,1,0,0,1,1,0,2.0,2,1.0,8,4.0 -380,242390,142871,0,0,0,1,0,0,1,0,0,0,1,0,4.0,4,1.0,0,0.0 -381,340548,187457,0,0,0,1,1,0,0,0,1,0,0,0,6.0,6,1.0,2,0.33 -382,299177,177984,0,1,0,0,0,0,0,0,1,0,1,0,6.0,6,1.0,6,1.0 -383,270532,82671,1,1,1,0,0,0,0,0,0,0,0,0,8.0,8,1.0,6,0.75 -384,268992,88647,0,0,0,0,0,0,0,1,1,1,0,0,6.0,6,1.0,6,1.0 -385,259627,71976,0,1,0,0,0,0,0,1,0,0,1,0,2.0,2,1.0,6,3.0 -386,266860,39850,1,1,0,0,0,0,1,0,0,0,0,0,12.0,12,1.0,8,0.67 -387,202576,112778,0,1,0,0,0,1,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -388,289778,148181,0,0,0,1,1,0,0,0,1,0,0,0,6.0,6,1.0,6,1.0 -389,318503,98397,1,1,0,0,0,0,0,0,0,1,0,0,12.0,12,1.0,4,0.33 -390,307917,126264,0,0,0,0,0,0,0,0,0,1,1,1,6.0,6,1.0,10,1.67 -391,369358,93056,0,0,0,0,1,0,0,0,1,0,0,1,6.0,6,1.0,4,0.67 -392,373343,59530,0,0,0,0,0,1,0,0,1,0,1,0,6.0,6,1.0,4,0.67 -393,278415,191858,0,0,0,0,0,0,1,0,1,0,0,1,8.0,8,1.0,8,1.0 -394,254809,78233,0,1,0,0,0,0,0,0,0,0,1,1,8.0,8,1.0,6,0.75 -395,358742,41600,0,0,0,1,0,1,0,0,0,1,0,0,8.0,8,1.0,14,1.75 -396,435593,136937,0,1,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 -397,444664,79308,0,1,0,0,1,0,0,1,0,0,0,0,2.0,2,1.0,8,4.0 -398,257040,189799,0,0,0,0,1,0,0,0,0,1,0,1,4.0,4,1.0,6,1.5 -399,471971,107481,0,1,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,4,0.5 -400,285534,69440,0,1,0,1,0,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 -401,399355,28851,0,1,0,1,0,1,0,0,0,0,0,0,2.0,2,1.0,6,3.0 -402,267282,130354,0,0,0,0,0,0,0,0,1,0,1,1,4.0,4,1.0,10,2.5 -403,376411,90246,1,1,0,0,0,1,0,0,0,0,0,0,6.0,6,1.0,2,0.33 -404,417066,68397,0,0,1,0,0,0,0,0,0,1,0,1,8.0,8,1.0,6,0.75 -405,312499,50643,1,0,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,8,2.0 -406,329969,70706,0,1,0,0,0,0,0,1,0,0,1,0,8.0,8,1.0,8,1.0 -407,468220,70210,0,0,1,0,0,0,0,0,1,0,0,1,2.0,2,1.0,8,4.0 -408,414297,162311,1,1,0,0,1,0,0,0,0,0,0,0,8.0,8,1.0,8,1.0 -409,219953,187847,0,0,1,1,1,0,0,0,0,0,0,0,2.0,2,1.0,2,1.0 -410,413257,192908,0,1,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,2,0.5 -411,334666,192762,0,0,1,1,0,0,0,1,0,0,0,0,4.0,4,1.0,10,2.5 -412,222514,50456,0,0,0,0,0,0,0,1,1,0,1,0,8.0,8,1.0,0,0.0 -413,259408,123579,1,0,1,0,0,0,0,0,1,0,0,0,14.0,14,1.0,6,0.43 -414,319363,16753,0,1,0,1,0,0,0,0,0,0,0,1,4.0,4,1.0,8,2.0 -415,398450,95097,0,0,1,0,0,1,0,1,0,0,0,0,6.0,6,1.0,2,0.33 -416,388838,107138,0,0,1,0,1,0,0,1,0,0,0,0,2.0,2,1.0,2,1.0 -417,256776,19251,0,1,0,0,0,0,0,0,0,1,0,1,2.0,2,1.0,4,2.0 -418,383184,82298,0,0,0,0,0,1,1,1,0,0,0,0,4.0,4,1.0,6,1.5 -419,262198,77538,0,1,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 -420,276703,139996,1,1,0,0,1,0,0,0,0,0,0,0,8.0,8,1.0,2,0.25 -421,276962,38350,1,0,0,0,0,1,0,0,0,0,1,0,14.0,14,1.0,4,0.29 -422,461187,140548,0,0,0,1,0,0,0,1,0,1,0,0,12.0,12,1.0,10,0.83 -423,480325,45818,0,1,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,10,1.25 -424,394427,83641,0,0,1,0,0,0,0,1,0,0,1,0,2.0,2,1.0,8,4.0 -425,435603,99934,0,0,1,0,1,0,0,0,0,1,0,0,2.0,2,1.0,6,3.0 -426,279346,67709,0,0,0,1,0,0,1,0,0,0,0,1,,0,0.0,12,5.0 -427,306633,93222,0,0,1,0,0,0,0,0,1,0,1,0,8.0,8,1.0,6,0.75 -428,483980,14466,1,0,0,0,0,0,0,1,0,0,0,1,4.0,4,1.0,4,1.0 -429,214878,173055,1,0,1,0,0,0,0,0,1,0,0,0,4.0,4,1.0,6,1.5 -430,263423,160431,0,0,0,1,0,0,1,0,1,0,0,0,10.0,10,1.0,6,0.6 -431,396807,194270,0,0,0,0,1,1,0,0,0,0,0,1,8.0,8,1.0,2,0.25 -432,468006,139244,0,0,0,0,1,0,0,1,0,0,1,0,4.0,4,1.0,8,2.0 -433,386017,54184,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,6,0.75 -434,237391,115716,0,0,1,1,0,0,0,1,0,0,0,0,12.0,12,1.0,6,0.5 -435,415828,29926,0,0,0,0,0,0,0,1,0,1,1,0,4.0,4,1.0,4,1.0 -436,208006,56098,0,0,0,1,0,0,0,1,1,0,0,0,6.0,6,1.0,4,0.67 -437,282798,92380,0,0,0,0,0,0,1,1,0,0,1,0,10.0,10,1.0,12,1.2 -438,322382,139215,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,12,3.0 -439,495656,24595,0,0,0,0,1,0,0,0,0,1,0,1,6.0,6,1.0,8,1.33 -440,487736,156629,0,0,0,1,0,1,0,1,0,0,0,0,6.0,6,1.0,6,1.0 -441,348167,162775,0,1,0,0,0,0,0,0,1,1,0,0,8.0,8,1.0,2,0.25 -442,413534,160420,0,1,0,0,0,0,0,1,1,0,0,0,4.0,4,1.0,4,1.0 -443,292531,155651,1,0,0,0,0,1,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -444,405225,64763,0,0,0,0,1,1,1,0,0,0,0,0,4.0,4,1.0,10,2.5 -445,388734,172721,1,0,1,0,0,0,0,0,1,0,0,0,8.0,8,1.0,10,1.25 -446,200028,69617,0,0,0,1,0,1,0,0,0,0,1,0,8.0,8,1.0,4,0.5 -447,368086,150465,0,0,1,0,0,0,0,0,0,1,0,1,8.0,8,1.0,10,1.25 -448,453598,88914,0,0,0,0,1,0,1,0,0,0,1,0,8.0,8,1.0,6,0.75 -449,490092,13516,0,0,1,0,1,0,0,0,0,1,0,0,6.0,6,1.0,2,0.33 -450,260490,195083,0,0,0,1,1,0,0,0,0,0,1,0,6.0,6,1.0,6,1.0 -451,455598,27274,1,0,1,0,0,0,0,1,0,0,0,0,10.0,10,1.0,4,0.4 -452,494976,161982,0,0,0,1,0,0,0,0,0,1,0,1,4.0,4,1.0,8,2.0 -453,396066,28764,0,1,1,0,0,0,0,0,0,0,1,0,8.0,8,1.0,8,1.0 -454,455579,88243,0,1,0,0,1,0,1,0,0,0,0,0,,0,0.0,8,5.0 -455,409085,70502,0,0,0,0,1,0,1,0,0,1,0,0,4.0,4,1.0,8,2.0 -456,371029,72053,0,0,0,1,1,1,0,0,0,0,0,0,4.0,4,1.0,2,0.5 -457,295670,196249,1,0,1,0,0,0,1,0,0,0,0,0,10.0,10,1.0,2,0.2 -458,361928,150430,1,0,0,0,1,0,0,0,0,0,0,1,2.0,2,1.0,6,3.0 -459,451024,173729,1,0,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,2,0.5 -460,490748,98179,0,0,0,0,0,1,0,0,1,0,0,1,6.0,6,1.0,4,0.67 -461,491945,30336,0,0,0,0,0,1,0,0,1,1,0,0,6.0,6,1.0,8,1.33 -462,241640,70420,0,1,0,0,1,0,0,0,0,0,0,1,2.0,2,1.0,8,4.0 -463,223055,35627,0,0,1,0,0,0,1,1,0,0,0,0,2.0,2,1.0,4,2.0 -464,467233,172942,0,0,0,0,0,1,0,0,1,1,0,0,4.0,4,1.0,4,1.0 -465,209404,90757,0,0,0,0,0,1,0,0,0,1,0,1,2.0,2,1.0,6,3.0 -466,235052,63309,0,0,0,1,0,0,1,0,0,0,0,1,8.0,8,1.0,10,1.25 -467,352043,122542,0,0,0,0,0,1,0,1,0,0,1,0,,0,0.0,2,5.0 -468,464655,71188,0,0,1,0,1,0,0,0,0,0,1,0,,0,0.0,4,5.0 -469,452949,99119,0,0,0,1,0,1,0,0,1,0,0,0,6.0,6,1.0,6,1.0 -470,426276,83280,0,1,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,10,2.5 -471,216417,72177,1,0,1,0,1,0,0,0,0,0,0,0,8.0,8,1.0,6,0.75 -472,475969,71538,0,1,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 -473,470315,16907,0,1,0,0,1,0,1,0,0,0,0,0,2.0,2,1.0,8,4.0 -474,278020,110948,0,0,0,1,0,1,0,0,0,0,1,0,14.0,14,1.0,8,0.57 -475,440036,30121,0,0,0,0,0,1,0,0,1,0,0,1,2.0,2,1.0,4,2.0 -476,314142,30943,0,1,0,0,0,0,1,0,0,1,0,0,2.0,2,1.0,0,0.0 -477,386746,20699,0,0,0,0,1,1,0,0,1,0,0,0,4.0,4,1.0,4,1.0 -478,261137,14474,1,0,0,0,0,0,0,0,0,1,1,0,4.0,4,1.0,0,0.0 -479,331372,39433,0,1,0,1,0,0,1,0,0,0,0,0,,0,0.0,2,5.0 -480,405773,32680,0,0,0,0,0,0,0,1,1,1,0,0,4.0,4,1.0,4,1.0 -481,429580,132313,0,1,0,0,1,0,0,0,0,0,1,0,2.0,2,1.0,2,1.0 -482,356059,157741,0,0,0,0,1,0,0,0,1,0,0,1,,0,0.0,4,5.0 -483,271684,55449,0,0,0,0,1,1,0,0,0,1,0,0,10.0,10,1.0,16,1.6 -484,347716,95631,0,0,0,0,1,1,1,0,0,0,0,0,2.0,2,1.0,6,3.0 -485,411171,62933,0,0,0,1,0,0,0,1,0,0,0,1,,0,0.0,8,5.0 -486,428977,75683,0,0,0,0,1,0,0,0,1,1,0,0,4.0,4,1.0,14,3.5 -487,466398,187686,0,0,1,0,0,0,0,1,0,0,0,1,8.0,8,1.0,6,0.75 -488,270081,33315,0,0,0,0,0,0,0,0,1,0,1,1,10.0,10,1.0,6,0.6 -489,485029,51880,1,0,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,6,0.75 -490,261276,108198,1,0,0,0,0,0,0,0,1,0,1,0,2.0,2,1.0,10,5.0 -491,407979,92566,1,0,0,0,0,0,1,0,0,0,0,1,8.0,8,1.0,4,0.5 -492,243348,173882,0,0,0,1,0,1,0,0,0,0,0,1,6.0,6,1.0,6,1.0 -493,411586,174035,0,0,0,0,1,0,0,1,0,0,0,1,6.0,6,1.0,8,1.33 -494,297625,144010,1,0,0,1,0,0,1,0,0,0,0,0,8.0,8,1.0,6,0.75 -495,402601,92174,0,0,0,0,0,0,1,1,0,1,0,0,2.0,2,1.0,0,0.0 -496,266498,69504,0,0,0,1,0,1,1,0,0,0,0,0,8.0,8,1.0,4,0.5 -497,303917,41341,0,0,0,0,1,1,0,0,0,0,1,0,4.0,4,1.0,2,0.5 -498,205307,26146,1,0,0,0,0,1,1,0,0,0,0,0,6.0,6,1.0,6,1.0 -499,471466,124691,1,0,1,0,0,0,0,1,0,0,0,0,10.0,10,1.0,6,0.6 -500,363805,196676,0,1,0,0,0,0,0,1,1,0,0,0,4.0,4,1.0,4,1.0 -501,328308,62536,0,0,1,1,0,0,0,1,0,0,0,0,,0,0.0,2,5.0 -502,267523,85770,0,0,1,0,0,0,1,0,0,0,1,0,8.0,8,1.0,4,0.5 -503,332979,84112,1,0,1,0,0,0,0,0,0,0,1,0,2.0,2,1.0,0,0.0 -504,309464,180538,0,0,0,0,1,1,0,0,0,1,0,0,2.0,2,1.0,8,4.0 -505,488292,185347,1,0,0,0,0,0,0,1,0,1,0,0,2.0,2,1.0,4,2.0 -506,361471,165007,0,0,0,0,0,0,1,1,0,0,0,1,6.0,6,1.0,4,0.67 -507,208687,105248,0,0,1,0,0,0,1,1,0,0,0,0,8.0,8,1.0,4,0.5 -508,232684,161404,0,1,0,0,0,1,0,1,0,0,0,0,2.0,2,1.0,6,3.0 -509,442554,16172,0,0,0,0,1,0,0,0,0,0,1,1,4.0,4,1.0,12,3.0 -510,204657,170199,0,0,1,0,0,0,0,0,0,0,1,1,2.0,2,1.0,4,2.0 -511,239389,70377,0,0,1,0,1,0,0,0,1,0,0,0,10.0,10,1.0,6,0.6 -512,218008,101701,1,0,0,0,1,0,0,1,0,0,0,0,4.0,4,1.0,10,2.5 -513,383759,194651,0,0,1,0,0,0,0,0,1,0,1,0,2.0,2,1.0,6,3.0 -514,306213,61447,0,0,0,1,1,0,1,0,0,0,0,0,4.0,4,1.0,2,0.5 -515,312123,48279,0,0,1,0,1,0,1,0,0,0,0,0,2.0,2,1.0,4,2.0 -516,361275,116248,0,0,0,1,0,0,0,0,0,0,1,1,,0,0.0,6,5.0 -517,388396,90908,0,1,0,0,0,0,0,1,0,1,0,0,2.0,2,1.0,6,3.0 -518,288128,133729,0,0,0,0,1,1,0,0,0,0,1,0,4.0,4,1.0,4,1.0 -519,425896,76334,0,0,0,1,0,0,1,0,0,1,0,0,2.0,2,1.0,12,6.0 -520,227571,49234,0,0,1,0,0,0,1,0,0,1,0,0,4.0,4,1.0,8,2.0 -521,479020,137397,0,0,0,0,1,1,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -522,204399,43095,0,0,1,0,0,0,0,1,0,0,0,1,8.0,8,1.0,10,1.25 -523,338325,154358,0,0,0,1,1,0,0,1,0,0,0,0,8.0,8,1.0,4,0.5 -524,336982,132921,0,0,1,1,0,0,0,0,1,0,0,0,14.0,14,1.0,8,0.57 -525,341459,91124,0,0,1,0,0,0,0,0,0,1,0,1,8.0,8,1.0,0,0.0 -526,406510,63478,0,0,0,1,0,0,0,0,1,0,0,1,4.0,4,1.0,4,1.0 -527,332700,64958,0,1,0,0,0,1,0,0,0,0,1,0,6.0,6,1.0,2,0.33 -528,254230,86953,0,0,1,0,0,0,1,1,0,0,0,0,,0,0.0,6,5.0 -529,216574,171859,0,0,0,1,0,0,0,0,0,1,1,0,4.0,4,1.0,6,1.5 -530,482990,41795,0,0,0,1,0,1,0,0,0,0,0,1,10.0,10,1.0,6,0.6 +1,286141,183646,0,1,1,0,0,0,0,0,0,0,1,0,16.0,16,1.0,12,0.75 +2,444065,170838,0,0,0,0,0,1,1,0,0,1,0,0,8.0,8,1.0,12,1.5 +3,395779,79971,0,0,1,1,0,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 +4,200419,82759,1,0,0,0,0,0,1,0,0,0,1,0,16.0,16,1.0,12,0.75 +5,341399,147700,0,0,1,0,0,1,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +6,304138,159625,0,1,1,0,1,0,0,0,0,0,0,0,20.0,20,1.0,0,0.0 +7,338378,151460,1,0,0,0,0,1,0,0,0,0,0,1,4.0,4,1.0,24,6.0 +8,376584,66889,0,0,0,0,0,0,1,1,0,1,0,0,8.0,8,1.0,4,0.5 +9,393014,91158,0,0,0,1,0,0,0,0,1,0,1,0,4.0,4,1.0,8,2.0 +10,267487,170163,0,0,1,1,0,0,0,1,0,0,0,0,,0,0.0,16,5.0 +11,470524,29163,1,0,0,0,1,0,0,0,0,0,1,0,12.0,12,1.0,8,0.67 +12,446869,142652,0,1,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,8,0.67 +13,351840,185816,0,1,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,12,0.75 +14,332509,38329,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,12,3.0 +15,266941,59156,0,1,0,0,0,0,1,0,0,0,0,1,24.0,24,1.0,20,0.83 +16,337197,194701,1,1,0,0,0,0,0,0,1,0,0,0,4.0,4,1.0,16,4.0 +17,481858,85248,0,0,1,1,0,0,0,0,0,1,0,0,4.0,4,1.0,12,3.0 +18,282926,36257,1,0,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,20,5.0 +19,327603,197901,1,0,0,0,0,0,0,0,1,0,0,1,16.0,16,1.0,8,0.5 +20,292092,166283,0,1,0,0,0,0,0,1,0,1,0,0,20.0,20,1.0,16,0.8 +21,353557,28934,0,0,1,0,0,0,0,1,0,0,0,1,4.0,4,1.0,4,1.0 +22,319083,167451,0,1,1,0,0,0,0,0,0,1,0,0,12.0,12,1.0,16,1.33 +23,295629,72379,0,0,1,0,0,0,0,1,0,0,0,1,4.0,4,1.0,16,4.0 +24,279131,95305,0,0,0,1,0,0,0,0,0,1,0,1,8.0,8,1.0,32,4.0 +25,233947,106565,0,0,1,0,0,1,0,0,0,0,0,1,12.0,12,1.0,8,0.67 +26,288981,180902,1,0,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,12,3.0 +27,254815,187129,1,1,0,0,0,0,0,0,1,0,0,0,4.0,4,1.0,16,4.0 +28,403490,169312,0,0,0,1,0,0,1,0,0,0,0,1,20.0,20,1.0,52,2.6 +29,403053,151914,0,0,0,0,0,0,1,0,1,0,0,1,8.0,8,1.0,12,1.5 +30,227881,133174,0,0,0,1,0,1,0,0,0,0,1,0,12.0,12,1.0,8,0.67 +31,207906,88090,0,0,1,0,0,0,0,0,1,1,0,0,4.0,4,1.0,20,5.0 +32,270292,97183,0,1,1,0,0,0,0,0,0,1,0,0,8.0,8,1.0,12,1.5 +33,403876,89280,0,0,0,0,0,0,0,1,1,0,1,0,12.0,12,1.0,8,0.67 +34,312422,61746,0,0,1,1,0,0,0,0,0,1,0,0,12.0,12,1.0,16,1.33 +35,369064,109942,0,0,0,0,0,1,0,0,0,1,1,0,12.0,12,1.0,16,1.33 +36,493736,110323,0,1,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,4,1.0 +37,469280,35055,1,0,0,1,0,0,0,0,0,0,0,1,4.0,4,1.0,8,2.0 +38,484502,61187,1,0,0,0,0,0,0,0,1,1,0,0,8.0,8,1.0,16,2.0 +39,311737,164628,0,0,0,1,0,0,0,0,0,1,1,0,4.0,4,1.0,24,6.0 +40,238578,199747,0,0,0,1,0,0,1,0,1,0,0,0,16.0,16,1.0,0,0.0 +41,492507,124228,0,0,0,1,0,1,1,0,0,0,0,0,28.0,28,1.0,8,0.29 +42,274639,87199,0,1,0,1,0,0,0,0,0,0,0,1,12.0,12,1.0,16,1.33 +43,380147,35046,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,16,2.0 +44,422839,141179,0,0,0,0,0,0,0,1,1,0,1,0,8.0,8,1.0,8,1.0 +45,473654,131248,0,0,0,1,0,1,1,0,0,0,0,0,8.0,8,1.0,16,2.0 +46,410500,54614,0,0,0,1,0,1,0,0,1,0,0,0,,0,0.0,16,5.0 +47,406337,96377,0,0,1,0,0,0,0,0,0,0,1,1,4.0,4,1.0,4,1.0 +48,489105,173326,0,0,1,1,0,0,0,0,0,0,0,1,20.0,20,1.0,16,0.8 +49,459629,48247,0,0,1,0,1,0,0,1,0,0,0,0,20.0,20,1.0,8,0.4 +50,364108,174482,0,0,1,1,0,0,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +51,259108,89491,0,1,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 +52,200179,105429,0,1,0,0,1,0,0,1,0,0,0,0,8.0,8,1.0,4,0.5 +53,236409,108492,0,0,1,0,0,0,0,0,1,1,0,0,8.0,8,1.0,28,3.5 +54,422377,81914,1,0,0,0,0,0,1,0,1,0,0,0,12.0,12,1.0,20,1.67 +55,388247,16187,0,0,1,0,0,0,0,0,0,1,0,1,4.0,4,1.0,16,4.0 +56,309257,67266,0,0,1,0,1,0,0,0,0,0,1,0,4.0,4,1.0,20,5.0 +57,203384,55275,1,0,0,0,0,0,1,1,0,0,0,0,20.0,20,1.0,12,0.6 +58,329780,83254,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,16,5.0 +59,401574,129489,0,0,1,0,0,1,0,1,0,0,0,0,4.0,4,1.0,20,5.0 +60,271891,43058,0,0,0,0,0,1,0,1,0,0,0,1,24.0,24,1.0,32,1.33 +61,256791,187570,1,0,1,0,0,0,0,1,0,0,0,0,20.0,20,1.0,20,1.0 +62,419351,21516,0,0,0,0,0,0,0,0,1,1,1,0,16.0,16,1.0,8,0.5 +63,235396,92464,0,0,1,0,0,0,1,0,0,0,1,0,16.0,16,1.0,16,1.0 +64,246214,150863,0,0,0,1,0,0,0,1,1,0,0,0,12.0,12,1.0,20,1.67 +65,485892,155450,0,0,0,1,0,1,0,0,0,0,1,0,4.0,4,1.0,4,1.0 +66,234185,79818,0,0,0,1,0,0,1,0,0,0,1,0,20.0,20,1.0,16,0.8 +67,472937,181198,0,0,0,0,0,0,1,1,0,1,0,0,12.0,12,1.0,8,0.67 +68,401695,121572,0,1,1,0,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 +69,492640,53616,0,1,0,0,0,0,1,1,0,0,0,0,12.0,12,1.0,4,0.33 +70,375454,119576,0,0,1,0,0,0,0,0,0,1,1,0,20.0,20,1.0,8,0.4 +71,354216,13768,0,1,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,4,1.0 +72,424256,97033,0,1,0,0,0,0,0,1,1,0,0,0,4.0,4,1.0,20,5.0 +73,345640,58444,0,0,1,0,0,0,1,0,0,0,1,0,20.0,20,1.0,4,0.2 +74,248236,120737,1,0,0,0,0,1,1,0,0,0,0,0,,0,0.0,24,5.0 +75,267707,123521,0,1,0,0,1,0,0,0,1,0,0,0,8.0,8,1.0,4,0.5 +76,470071,172019,0,0,0,0,0,1,1,1,0,0,0,0,4.0,4,1.0,4,1.0 +77,399068,90198,0,0,0,0,0,1,1,0,1,0,0,0,16.0,16,1.0,8,0.5 +78,308313,65073,0,0,1,0,0,0,0,0,1,0,0,1,8.0,8,1.0,12,1.5 +79,234246,149171,0,0,0,0,1,1,0,0,0,0,1,0,8.0,8,1.0,20,2.5 +80,267787,79665,1,1,0,0,0,0,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +81,393826,22004,0,0,1,1,0,0,0,0,0,1,0,0,8.0,8,1.0,12,1.5 +82,229961,94945,0,0,0,0,0,0,0,1,1,1,0,0,16.0,16,1.0,8,0.5 +83,444105,167915,0,1,0,0,0,0,0,1,1,0,0,0,16.0,16,1.0,8,0.5 +84,382730,132551,0,0,0,1,1,0,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +85,205497,46979,1,1,0,1,0,0,0,0,0,0,0,0,,0,0.0,12,5.0 +86,398628,185050,0,1,0,0,1,0,0,1,0,0,0,0,16.0,16,1.0,8,0.5 +87,378633,135979,0,1,0,1,1,0,0,0,0,0,0,0,8.0,8,1.0,4,0.5 +88,470318,148192,0,0,0,1,1,0,0,0,1,0,0,0,12.0,12,1.0,20,1.67 +89,238334,185320,0,0,0,0,0,0,0,1,0,0,1,1,24.0,24,1.0,12,0.5 +90,421438,26856,0,1,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,8,2.0 +91,486531,50526,0,0,1,0,1,0,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +92,364554,82141,0,0,0,0,0,1,0,0,0,0,1,1,12.0,12,1.0,8,0.67 +93,345162,66823,0,0,0,0,1,1,0,0,0,0,0,1,4.0,4,1.0,24,6.0 +94,401582,85499,1,0,0,0,0,0,0,0,1,1,0,0,12.0,12,1.0,12,1.0 +95,231653,96497,0,0,0,1,1,0,1,0,0,0,0,0,20.0,20,1.0,8,0.4 +96,452322,29299,1,0,0,0,0,0,0,1,0,1,0,0,8.0,8,1.0,16,2.0 +97,246133,27202,1,0,0,0,0,0,1,0,1,0,0,0,8.0,8,1.0,4,0.5 +98,330349,129763,0,0,1,0,1,0,0,0,1,0,0,0,16.0,16,1.0,4,0.25 +99,303372,93365,0,0,0,0,1,0,0,1,0,0,0,1,8.0,8,1.0,0,0.0 +100,437106,79353,1,0,0,1,0,0,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +101,223500,140727,0,0,0,0,0,0,1,0,0,1,0,1,4.0,4,1.0,0,0.0 +102,354243,126610,0,0,0,0,0,0,0,0,1,0,1,1,24.0,24,1.0,16,0.67 +103,426530,12059,0,0,1,0,0,0,0,1,0,1,0,0,,0,0.0,12,5.0 +104,332202,132141,0,0,0,1,0,0,0,0,1,0,0,1,,0,0.0,20,5.0 +105,499973,37922,0,0,1,0,1,0,0,1,0,0,0,0,,0,0.0,12,5.0 +106,230682,64249,0,0,0,0,1,1,0,1,0,0,0,0,20.0,20,1.0,8,0.4 +107,467101,65313,0,0,0,0,1,1,0,0,1,0,0,0,16.0,16,1.0,0,0.0 +108,244275,169377,0,0,1,1,0,0,0,0,0,0,1,0,16.0,16,1.0,0,0.0 +109,258548,15342,0,0,0,1,0,1,0,1,0,0,0,0,20.0,20,1.0,24,1.2 +110,355408,97494,0,0,0,0,1,1,0,0,0,0,1,0,12.0,12,1.0,12,1.0 +111,490070,36258,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,16,5.0 +112,263578,115332,0,1,0,0,0,0,1,0,0,1,0,0,12.0,12,1.0,20,1.67 +113,312245,127054,0,0,0,1,0,0,0,0,1,0,0,1,8.0,8,1.0,16,2.0 +114,327001,166239,0,0,1,1,1,0,0,0,0,0,0,0,8.0,8,1.0,4,0.5 +115,285426,113164,0,0,0,0,0,0,1,1,0,0,1,0,16.0,16,1.0,16,1.0 +116,294168,127969,0,1,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 +117,391158,176237,1,0,0,1,0,0,0,0,1,0,0,0,8.0,8,1.0,12,1.5 +118,407527,71949,0,0,0,0,0,1,0,0,1,1,0,0,8.0,8,1.0,0,0.0 +119,357736,67831,0,0,0,0,1,0,0,0,1,0,0,1,16.0,16,1.0,12,0.75 +120,404101,144428,1,0,0,1,0,1,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +121,219612,41318,0,0,0,1,0,1,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +122,318302,30258,0,0,1,0,0,1,0,0,0,1,0,0,8.0,8,1.0,16,2.0 +123,288533,44469,0,0,0,1,0,0,1,1,0,0,0,0,20.0,20,1.0,20,1.0 +124,207023,75965,1,0,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,4,1.0 +125,429278,87887,1,1,0,0,0,0,0,0,0,1,0,0,8.0,8,1.0,4,0.5 +126,409992,121431,1,0,1,0,0,0,0,1,0,0,0,0,16.0,16,1.0,4,0.25 +127,451142,137504,0,0,0,1,0,1,0,0,1,0,0,0,20.0,20,1.0,24,1.2 +128,375827,105166,0,1,1,0,0,0,1,0,0,0,0,0,,0,0.0,20,5.0 +129,495343,120288,0,1,1,0,0,1,0,0,0,0,0,0,12.0,12,1.0,12,1.0 +130,328338,20029,0,0,0,0,1,0,0,0,0,1,1,0,12.0,12,1.0,12,1.0 +131,299643,168222,0,0,0,0,0,0,1,0,0,1,1,0,12.0,12,1.0,8,0.67 +132,366391,27774,0,0,1,0,0,0,1,0,0,0,0,1,16.0,16,1.0,16,1.0 +133,433053,53942,0,0,1,0,0,0,1,0,0,0,0,1,4.0,4,1.0,20,5.0 +134,275331,94983,0,0,0,0,0,0,1,0,1,0,0,1,4.0,4,1.0,16,4.0 +135,379968,11980,0,0,0,0,1,0,0,0,0,1,1,0,8.0,8,1.0,12,1.5 +136,349473,160707,0,0,0,0,1,0,1,0,0,0,1,0,8.0,8,1.0,16,2.0 +137,464334,84683,0,0,0,0,0,0,1,0,0,1,0,1,12.0,12,1.0,20,1.67 +138,359174,18420,0,0,0,0,1,1,0,0,0,0,1,0,4.0,4,1.0,4,1.0 +139,479372,50259,0,0,1,0,0,0,0,0,1,1,0,0,8.0,8,1.0,8,1.0 +140,276422,164299,0,0,0,0,1,0,0,0,0,0,1,1,16.0,16,1.0,20,1.25 +141,268704,176375,1,0,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,24,1.5 +142,321766,196059,1,0,0,0,0,1,1,0,0,0,0,0,12.0,12,1.0,12,1.0 +143,244480,182013,0,0,1,0,1,0,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +144,491347,45546,0,0,1,1,0,0,0,0,0,1,0,0,8.0,8,1.0,12,1.5 +145,479164,31723,0,0,1,0,0,0,0,0,1,0,0,1,8.0,8,1.0,12,1.5 +146,450496,180807,0,0,0,0,0,1,0,0,0,1,1,0,8.0,8,1.0,4,0.5 +147,473203,178811,0,0,0,0,1,0,0,1,0,1,0,0,16.0,16,1.0,24,1.5 +148,379234,139834,0,1,1,0,0,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 +149,463183,100745,1,0,0,0,0,1,0,0,0,0,1,0,12.0,12,1.0,20,1.67 +150,268605,39040,0,1,0,0,1,0,0,0,0,0,0,1,4.0,4,1.0,8,2.0 +151,201272,29406,0,1,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,12,3.0 +152,260646,123073,0,0,0,1,0,1,0,0,0,0,1,0,16.0,16,1.0,12,0.75 +153,432589,168024,0,0,0,1,1,0,0,0,0,0,0,1,12.0,12,1.0,16,1.33 +154,441903,167510,1,0,0,0,0,0,0,0,1,0,1,0,4.0,4,1.0,24,6.0 +155,407162,40023,1,0,0,0,0,0,0,0,0,0,1,1,16.0,16,1.0,20,1.25 +156,277219,159091,0,0,1,0,1,0,1,0,0,0,0,0,4.0,4,1.0,28,7.0 +157,434645,48399,0,0,0,0,0,0,0,1,0,1,1,0,16.0,16,1.0,12,0.75 +158,330608,144690,0,0,1,0,1,0,0,0,0,1,0,0,4.0,4,1.0,4,1.0 +159,301470,90552,0,0,0,0,0,0,0,1,1,0,1,0,4.0,4,1.0,8,2.0 +160,231090,32498,0,1,0,0,1,0,0,0,1,0,0,0,12.0,12,1.0,20,1.67 +161,230042,159215,0,0,1,1,0,1,0,0,0,0,0,0,20.0,20,1.0,20,1.0 +162,335301,149189,0,0,0,1,0,1,0,0,0,0,1,0,4.0,4,1.0,8,2.0 +163,268901,85454,1,0,1,0,0,0,0,0,0,1,0,0,12.0,12,1.0,24,2.0 +164,431844,31263,0,0,1,0,1,0,1,0,0,0,0,0,,0,0.0,12,5.0 +165,305315,66125,0,0,0,0,1,0,1,1,0,0,0,0,16.0,16,1.0,16,1.0 +166,461726,158451,1,0,1,0,0,1,0,0,0,0,0,0,16.0,16,1.0,4,0.25 +167,291481,19032,1,0,1,0,0,0,0,0,0,1,0,0,12.0,12,1.0,0,0.0 +168,345816,78571,1,1,0,1,0,0,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +169,309393,198909,1,0,0,1,0,0,0,0,1,0,0,0,,0,0.0,16,5.0 +170,260505,94505,0,1,0,0,0,0,0,0,1,1,0,0,4.0,4,1.0,4,1.0 +171,278640,74242,1,0,0,0,1,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 +172,448128,59574,1,0,0,0,0,0,0,1,0,0,1,0,16.0,16,1.0,32,2.0 +173,431000,146506,0,0,0,1,0,0,1,1,0,0,0,0,8.0,8,1.0,4,0.5 +174,364180,61310,1,0,0,0,0,0,1,0,0,1,0,0,12.0,12,1.0,16,1.33 +175,356124,41448,0,0,1,0,0,1,0,0,0,1,0,0,4.0,4,1.0,12,3.0 +176,468564,162622,0,0,0,1,1,0,0,1,0,0,0,0,4.0,4,1.0,12,3.0 +177,220994,107493,1,0,1,0,0,0,0,1,0,0,0,0,24.0,24,1.0,12,0.5 +178,290759,196399,0,1,0,0,0,0,0,0,1,1,0,0,,0,0.0,12,5.0 +179,400526,193715,1,0,1,0,1,0,0,0,0,0,0,0,4.0,4,1.0,32,8.0 +180,413880,79311,0,1,0,1,1,0,0,0,0,0,0,0,12.0,12,1.0,4,0.33 +181,321989,11008,1,0,0,0,0,0,1,0,0,0,1,0,20.0,20,1.0,8,0.4 +182,345544,170913,0,0,0,1,1,0,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +183,221962,183937,1,1,0,0,0,0,0,0,1,0,0,0,12.0,12,1.0,12,1.0 +184,311028,161327,1,0,0,0,0,1,0,1,0,0,0,0,20.0,20,1.0,20,1.0 +185,248252,183519,1,0,1,0,0,0,0,0,0,0,1,0,4.0,4,1.0,4,1.0 +186,451106,122923,0,0,0,0,0,0,1,0,1,0,1,0,4.0,4,1.0,20,5.0 +187,405380,147113,1,0,0,0,0,0,0,0,1,0,1,0,24.0,24,1.0,4,0.17 +188,203839,28523,0,1,0,0,0,0,0,0,1,0,0,1,12.0,12,1.0,8,0.67 +189,333806,76186,0,0,1,0,0,1,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +190,432627,92873,0,0,0,0,1,0,0,0,1,0,0,1,12.0,12,1.0,36,3.0 +191,215347,95798,1,0,0,1,0,0,0,0,0,0,1,0,8.0,8,1.0,12,1.5 +192,404671,118625,0,0,0,1,0,1,0,0,0,1,0,0,4.0,4,1.0,24,6.0 +193,363349,82141,1,0,0,0,1,0,1,0,0,0,0,0,12.0,12,1.0,4,0.33 +194,278162,176609,0,1,1,0,0,1,0,0,0,0,0,0,4.0,4,1.0,20,5.0 +195,384349,89445,0,0,1,0,0,0,0,1,0,0,1,0,12.0,12,1.0,0,0.0 +196,422863,69355,0,0,0,0,0,0,1,0,1,0,1,0,8.0,8,1.0,24,3.0 +197,203902,149506,1,0,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,28,7.0 +198,474910,51413,0,0,1,0,0,0,1,0,1,0,0,0,,0,0.0,24,5.0 +199,476345,79957,1,0,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,24,2.0 +200,220967,15234,0,0,0,0,1,1,0,0,0,0,1,0,8.0,8,1.0,24,3.0 +201,355305,72795,0,1,0,0,0,0,1,0,1,0,0,0,12.0,12,1.0,12,1.0 +202,374021,44532,0,1,0,1,0,0,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +203,279504,51617,0,0,0,1,0,1,0,0,1,0,0,0,12.0,12,1.0,24,2.0 +204,443296,160288,0,1,1,0,1,0,0,0,0,0,0,0,4.0,4,1.0,8,2.0 +205,375556,180085,0,1,0,0,0,0,1,0,0,1,0,0,8.0,8,1.0,12,1.5 +206,251345,176851,0,0,0,1,0,0,1,0,0,0,0,1,8.0,8,1.0,12,1.5 +207,494189,172931,0,1,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,16,1.0 +208,380802,94459,0,0,0,1,1,0,0,0,0,0,0,1,12.0,12,1.0,16,1.33 +209,237485,15499,1,1,0,0,0,0,0,0,0,0,1,0,4.0,4,1.0,8,2.0 +210,389579,74857,0,0,0,1,0,0,0,0,1,0,0,1,8.0,8,1.0,8,1.0 +211,360769,92737,0,0,0,1,0,0,1,0,0,0,0,1,4.0,4,1.0,12,3.0 +212,433681,66872,0,1,1,0,0,0,0,0,0,0,0,1,20.0,20,1.0,4,0.2 +213,244503,153773,0,0,1,1,0,0,0,0,0,0,0,1,,0,0.0,8,5.0 +214,282906,138552,0,1,0,0,0,1,0,0,0,1,0,0,12.0,12,1.0,12,1.0 +215,309651,27015,0,0,1,1,1,0,0,0,0,0,0,0,8.0,8,1.0,4,0.5 +216,235826,35438,0,0,0,0,0,1,0,1,0,0,0,1,20.0,20,1.0,16,0.8 +217,348160,50165,0,1,0,1,0,0,0,0,0,0,1,0,8.0,8,1.0,20,2.5 +218,252461,37506,1,0,0,0,1,1,0,0,0,0,0,0,16.0,16,1.0,20,1.25 +219,425014,120768,0,1,0,0,1,0,1,0,0,0,0,0,24.0,24,1.0,8,0.33 +220,484657,54360,0,0,0,0,1,0,1,0,0,0,1,0,12.0,12,1.0,8,0.67 +221,357157,115437,0,0,0,0,0,0,1,1,1,0,0,0,24.0,24,1.0,4,0.17 +222,409313,88353,0,1,0,0,1,0,0,0,0,1,0,0,8.0,8,1.0,8,1.0 +223,295721,179055,1,0,0,0,0,0,0,0,0,1,1,0,20.0,20,1.0,16,0.8 +224,211633,26505,1,0,0,0,1,1,0,0,0,0,0,0,16.0,16,1.0,16,1.0 +225,301350,99891,0,0,0,0,1,0,0,0,1,1,0,0,12.0,12,1.0,16,1.33 +226,262495,70481,0,0,0,1,0,0,0,1,0,0,0,1,20.0,20,1.0,12,0.6 +227,210992,35595,0,0,0,0,0,1,0,0,1,0,1,0,8.0,8,1.0,8,1.0 +228,266644,79859,0,1,1,0,0,0,1,0,0,0,0,0,16.0,16,1.0,16,1.0 +229,279378,93433,1,0,1,0,0,1,0,0,0,0,0,0,4.0,4,1.0,20,5.0 +230,358083,49661,1,0,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,4,0.5 +231,417867,196091,0,1,0,0,0,0,1,0,0,1,0,0,12.0,12,1.0,16,1.33 +232,307433,54841,1,0,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,12,0.75 +233,266947,171934,0,0,0,0,1,0,1,0,0,0,0,1,4.0,4,1.0,24,6.0 +234,444798,121560,0,1,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,4,1.0 +235,457960,22034,0,0,0,1,0,0,0,0,1,0,1,0,12.0,12,1.0,4,0.33 +236,213562,185489,0,1,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,8,2.0 +237,292293,188625,1,0,0,0,0,0,1,1,0,0,0,0,8.0,8,1.0,4,0.5 +238,425475,63185,1,0,0,0,0,1,0,0,1,0,0,0,4.0,4,1.0,12,3.0 +239,299594,160250,0,0,0,0,0,0,1,0,1,1,0,0,12.0,12,1.0,4,0.33 +240,499985,97669,0,1,0,0,1,0,1,0,0,0,0,0,12.0,12,1.0,12,1.0 +241,467802,71172,0,0,0,0,1,0,0,0,1,1,0,0,8.0,8,1.0,4,0.5 +242,294060,122809,1,0,1,0,0,0,1,0,0,0,0,0,20.0,20,1.0,4,0.2 +243,295196,97857,0,0,0,0,1,1,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +244,341689,162736,0,0,0,1,0,1,0,0,1,0,0,0,12.0,12,1.0,12,1.0 +245,446327,42277,0,0,0,1,0,0,1,1,0,0,0,0,8.0,8,1.0,8,1.0 +246,362016,85719,0,0,0,1,0,0,0,0,0,1,0,1,8.0,8,1.0,16,2.0 +247,292418,17470,0,1,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,16,1.0 +248,402010,28515,0,1,0,1,0,0,0,0,0,0,0,1,,0,0.0,16,5.0 +249,297665,147497,0,1,0,0,0,0,1,0,0,0,0,1,12.0,12,1.0,0,0.0 +250,344575,169672,0,1,0,0,0,1,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +251,494766,34199,0,0,0,1,0,0,0,1,0,0,0,1,16.0,16,1.0,8,0.5 +252,455465,99429,0,0,0,1,0,1,0,0,0,1,0,0,16.0,16,1.0,4,0.25 +253,468129,19544,0,0,0,0,0,0,1,0,1,0,1,0,4.0,4,1.0,4,1.0 +254,285542,168275,0,0,1,0,0,0,1,0,1,0,0,0,12.0,12,1.0,8,0.67 +255,401087,92771,0,0,0,0,0,0,1,0,0,1,0,1,12.0,12,1.0,16,1.33 +256,300457,182506,0,0,0,1,0,0,0,1,0,0,1,0,,0,0.0,4,5.0 +257,225116,127431,1,0,0,0,1,0,0,1,0,0,0,0,12.0,12,1.0,16,1.33 +258,341865,89645,0,1,0,1,0,1,0,0,0,0,0,0,,0,0.0,12,5.0 +259,329782,138901,0,0,0,0,0,0,0,0,1,0,1,1,,0,0.0,16,5.0 +260,260576,199473,0,0,0,0,0,1,1,0,0,0,1,0,12.0,12,1.0,12,1.0 +261,455435,17795,1,0,0,1,0,0,0,0,0,1,0,0,4.0,4,1.0,8,2.0 +262,332828,107041,1,0,0,0,0,0,0,0,0,1,0,1,16.0,16,1.0,8,0.5 +263,312057,35945,0,0,0,0,0,1,1,0,1,0,0,0,8.0,8,1.0,8,1.0 +264,470692,92096,1,1,0,0,0,0,0,1,0,0,0,0,16.0,16,1.0,12,0.75 +265,230964,80257,0,1,0,0,0,1,0,0,0,1,0,0,20.0,20,1.0,12,0.6 +266,423501,78904,0,0,0,1,1,0,0,0,0,1,0,0,20.0,20,1.0,8,0.4 +267,373241,33338,0,0,1,0,1,0,0,1,0,0,0,0,12.0,12,1.0,4,0.33 +268,476092,101934,1,0,1,0,0,0,0,0,0,0,0,1,12.0,12,1.0,4,0.33 +269,494248,186835,0,0,1,0,0,0,0,1,0,1,0,0,12.0,12,1.0,12,1.0 +270,426884,194172,0,0,0,0,0,0,0,1,0,1,0,1,,0,0.0,4,5.0 +271,202738,87671,0,0,0,0,0,0,1,0,1,0,1,0,8.0,8,1.0,12,1.5 +272,393215,36585,1,0,0,0,0,0,0,1,0,0,1,0,16.0,16,1.0,12,0.75 +273,227536,64387,0,1,0,0,0,0,0,1,0,0,1,0,24.0,24,1.0,0,0.0 +274,408448,106251,1,0,0,1,0,0,0,0,1,0,0,0,8.0,8,1.0,0,0.0 +275,397164,56350,0,0,0,1,0,0,0,0,1,1,0,0,,0,0.0,16,5.0 +276,401375,190889,0,1,0,0,0,0,0,1,1,0,0,0,12.0,12,1.0,8,0.67 +277,243472,52240,0,0,0,0,0,0,1,0,0,1,1,0,16.0,16,1.0,16,1.0 +278,390850,94391,0,0,0,0,0,1,0,1,0,0,0,1,4.0,4,1.0,16,4.0 +279,486146,63673,0,0,0,1,0,1,0,0,0,0,1,0,,0,0.0,4,5.0 +280,316122,137715,0,0,1,0,0,0,0,0,0,1,1,0,12.0,12,1.0,12,1.0 +281,417758,81087,1,1,0,0,1,0,0,0,0,0,0,0,12.0,12,1.0,12,1.0 +282,200066,52917,0,0,0,0,0,0,0,1,0,1,1,0,20.0,20,1.0,16,0.8 +283,466157,77291,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,12,3.0 +284,311745,117818,0,0,0,0,0,1,0,1,0,0,0,1,12.0,12,1.0,4,0.33 +285,388810,106468,0,1,0,0,0,0,1,0,0,0,1,0,4.0,4,1.0,12,3.0 +286,295784,96831,0,1,0,0,0,0,1,0,0,0,0,1,4.0,4,1.0,8,2.0 +287,239957,80359,0,0,0,1,0,0,1,0,1,0,0,0,12.0,12,1.0,8,0.67 +288,259571,96779,0,0,0,1,0,0,1,0,0,0,1,0,,0,0.0,16,5.0 +289,470592,57685,1,1,0,0,0,0,0,1,0,0,0,0,8.0,8,1.0,0,0.0 +290,360940,92218,0,1,0,0,0,1,0,1,0,0,0,0,8.0,8,1.0,32,4.0 +291,459336,78436,1,0,0,0,0,0,0,0,0,1,1,0,8.0,8,1.0,8,1.0 +292,296446,82822,1,0,0,0,1,1,0,0,0,0,0,0,12.0,12,1.0,0,0.0 +293,285626,67263,0,1,0,0,1,0,0,0,0,0,1,0,,0,0.0,8,5.0 +294,464099,170208,0,0,0,1,0,0,0,1,0,1,0,0,8.0,8,1.0,12,1.5 +295,263105,164140,1,0,0,0,1,0,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +296,302474,167473,0,1,0,0,0,0,0,0,0,0,1,1,4.0,4,1.0,8,2.0 +297,372056,174677,0,1,1,0,0,0,0,0,0,0,0,1,12.0,12,1.0,8,0.67 +298,352449,52010,0,0,0,1,0,1,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +299,419186,171727,0,0,0,0,1,1,0,0,0,0,1,0,20.0,20,1.0,4,0.2 +300,336617,82806,1,1,0,0,0,0,0,0,1,0,0,0,8.0,8,1.0,20,2.5 +301,400878,153139,0,0,1,0,1,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 +302,376532,58763,0,0,0,1,0,0,0,0,0,0,1,1,8.0,8,1.0,24,3.0 +303,215966,114816,1,0,0,0,0,0,0,0,1,0,0,1,8.0,8,1.0,8,1.0 +304,298739,78236,0,0,0,0,0,1,0,1,0,0,1,0,4.0,4,1.0,20,5.0 +305,324152,168504,0,1,0,0,1,0,0,0,1,0,0,0,12.0,12,1.0,8,0.67 +306,225832,114959,0,0,0,0,1,0,0,0,0,0,1,1,4.0,4,1.0,16,4.0 +307,218247,97153,0,0,1,1,0,0,0,0,0,0,1,0,4.0,4,1.0,8,2.0 +308,442559,186433,0,0,1,1,0,0,0,1,0,0,0,0,8.0,8,1.0,16,2.0 +309,400541,29388,0,0,0,0,1,1,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +310,322029,114911,1,1,0,0,0,0,0,0,0,0,0,1,8.0,8,1.0,0,0.0 +311,233292,162186,0,1,0,0,0,0,0,0,0,1,0,1,4.0,4,1.0,4,1.0 +312,313899,35635,0,1,0,0,0,1,1,0,0,0,0,0,20.0,20,1.0,12,0.6 +313,403970,11492,1,0,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,0,0.0 +314,292531,147251,0,1,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,32,2.0 +315,499030,65865,0,1,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,16,4.0 +316,345372,25792,0,0,0,0,1,0,0,0,0,0,1,1,16.0,16,1.0,12,0.75 +317,279212,121179,0,1,1,1,0,0,0,0,0,0,0,0,16.0,16,1.0,16,1.0 +318,434460,169288,0,0,0,0,0,0,0,0,1,1,0,1,8.0,8,1.0,4,0.5 +319,461196,108275,1,0,0,0,0,0,1,0,0,0,1,0,,0,0.0,12,5.0 +320,214434,163733,1,0,0,0,0,0,0,1,1,0,0,0,16.0,16,1.0,4,0.25 +321,380690,119490,0,0,0,1,0,1,1,0,0,0,0,0,4.0,4,1.0,20,5.0 +322,207430,146046,1,0,1,0,0,0,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +323,291430,197852,0,1,0,0,0,0,1,0,0,0,1,0,4.0,4,1.0,0,0.0 +324,281509,145993,0,0,0,0,0,0,1,0,0,1,1,0,8.0,8,1.0,20,2.5 +325,450296,105489,0,0,0,1,0,0,1,0,0,0,1,0,16.0,16,1.0,8,0.5 +326,304679,130659,1,0,0,1,0,0,0,1,0,0,0,0,20.0,20,1.0,16,0.8 +327,383569,138762,0,1,0,0,0,0,0,0,1,0,1,0,8.0,8,1.0,0,0.0 +328,353518,60651,0,0,1,0,1,0,0,0,0,1,0,0,12.0,12,1.0,20,1.67 +329,274717,155240,0,0,0,1,0,0,0,1,0,0,1,0,,0,0.0,16,5.0 +330,384499,155236,0,0,0,0,0,0,0,1,1,0,0,1,12.0,12,1.0,8,0.67 +331,465116,153987,0,0,0,0,0,0,0,0,1,1,1,0,8.0,8,1.0,8,1.0 +332,422611,142253,0,0,0,1,0,0,0,1,0,0,1,0,4.0,4,1.0,12,3.0 +333,273142,46673,1,0,1,0,0,0,1,0,0,0,0,0,12.0,12,1.0,12,1.0 +334,276436,53260,0,1,0,1,0,0,0,0,0,1,0,0,20.0,20,1.0,28,1.4 +335,202608,38139,1,0,1,0,1,0,0,0,0,0,0,0,12.0,12,1.0,16,1.33 +336,243929,179707,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,12,3.0 +337,258763,141289,1,0,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,4,0.5 +338,322834,58273,0,1,0,0,0,0,1,0,0,1,0,0,16.0,16,1.0,16,1.0 +339,211616,151350,0,1,0,0,0,0,1,0,0,1,0,0,16.0,16,1.0,8,0.5 +340,417342,43433,0,0,1,0,0,0,0,1,0,1,0,0,4.0,4,1.0,20,5.0 +341,333428,53017,0,0,0,0,0,1,0,0,0,1,0,1,12.0,12,1.0,24,2.0 +342,453483,112882,0,0,0,1,1,0,0,0,0,0,0,1,12.0,12,1.0,16,1.33 +343,266412,108033,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,12,5.0 +344,408051,140955,0,0,0,0,1,0,1,0,0,0,1,0,12.0,12,1.0,4,0.33 +345,456345,94717,0,0,0,0,0,0,1,0,0,0,1,1,20.0,20,1.0,28,1.4 +346,457822,28648,1,0,1,0,0,1,0,0,0,0,0,0,12.0,12,1.0,20,1.67 +347,255709,92725,0,0,0,0,0,0,1,0,1,0,1,0,8.0,8,1.0,8,1.0 +348,451104,192438,1,0,0,0,0,1,0,1,0,0,0,0,8.0,8,1.0,20,2.5 +349,420645,79899,1,0,0,0,0,0,1,0,0,0,0,1,20.0,20,1.0,12,0.6 +350,392445,125863,0,1,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,12,1.0 +351,207515,78961,0,1,0,0,1,0,0,0,1,0,0,0,8.0,8,1.0,8,1.0 +352,317523,53884,0,1,0,0,0,0,1,0,0,0,1,0,4.0,4,1.0,8,2.0 +353,444435,197545,0,0,0,1,0,0,0,1,1,0,0,0,28.0,28,1.0,12,0.43 +354,316633,42407,0,0,0,0,0,0,1,0,1,0,1,0,28.0,28,1.0,4,0.14 +355,258931,173204,0,0,0,0,1,0,0,1,0,0,1,0,,0,0.0,4,5.0 +356,372692,190010,0,1,0,1,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 +357,343398,37837,0,0,1,1,0,0,0,0,0,0,1,0,4.0,4,1.0,12,3.0 +358,281317,59623,1,0,0,1,0,0,0,1,0,0,0,0,12.0,12,1.0,12,1.0 +359,332501,26790,0,0,0,1,0,1,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +360,384262,158042,1,0,0,0,0,0,0,1,0,1,0,0,12.0,12,1.0,12,1.0 +361,428215,93593,1,0,1,0,0,0,0,0,1,0,0,0,12.0,12,1.0,28,2.33 +362,249481,163969,1,0,0,1,1,0,0,0,0,0,0,0,12.0,12,1.0,20,1.67 +363,414483,50161,0,1,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,8,0.5 +364,464670,118855,1,0,0,0,0,0,0,0,0,0,1,1,8.0,8,1.0,12,1.5 +365,474311,172110,1,0,0,0,0,0,1,0,0,0,0,1,4.0,4,1.0,0,0.0 +366,348959,114042,0,0,0,0,0,0,0,0,1,1,1,0,12.0,12,1.0,16,1.33 +367,234548,101587,0,0,0,0,0,0,0,1,0,1,0,1,8.0,8,1.0,20,2.5 +368,219163,174151,1,0,0,0,1,0,1,0,0,0,0,0,8.0,8,1.0,0,0.0 +369,302195,65234,0,1,1,1,0,0,0,0,0,0,0,0,4.0,4,1.0,20,5.0 +370,422472,147660,0,0,0,0,0,1,0,0,1,0,0,1,16.0,16,1.0,16,1.0 +371,227784,146399,0,0,0,0,1,1,0,1,0,0,0,0,4.0,4,1.0,12,3.0 +372,498482,171040,0,0,0,1,0,1,1,0,0,0,0,0,16.0,16,1.0,8,0.5 +373,373709,25938,0,0,0,1,0,0,0,1,1,0,0,0,16.0,16,1.0,12,0.75 +374,470459,35826,1,0,1,0,0,1,0,0,0,0,0,0,16.0,16,1.0,24,1.5 +375,354110,120657,0,0,0,0,0,0,1,1,0,0,0,1,8.0,8,1.0,20,2.5 +376,206024,117159,0,0,0,1,0,0,1,0,1,0,0,0,20.0,20,1.0,0,0.0 +377,358946,83301,0,0,0,0,0,0,1,1,1,0,0,0,8.0,8,1.0,8,1.0 +378,308803,59281,1,1,0,0,0,0,0,0,1,0,0,0,,0,0.0,12,5.0 +379,474517,31884,0,0,0,0,0,0,1,0,0,1,1,0,4.0,4,1.0,16,4.0 +380,242390,142871,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,0,0.0 +381,340548,187457,0,0,0,1,1,0,0,0,1,0,0,0,12.0,12,1.0,4,0.33 +382,299177,177984,0,1,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,12,1.0 +383,270532,82671,1,1,1,0,0,0,0,0,0,0,0,0,16.0,16,1.0,12,0.75 +384,268992,88647,0,0,0,0,0,0,0,1,1,1,0,0,12.0,12,1.0,12,1.0 +385,259627,71976,0,1,0,0,0,0,0,1,0,0,1,0,4.0,4,1.0,12,3.0 +386,266860,39850,1,1,0,0,0,0,1,0,0,0,0,0,24.0,24,1.0,16,0.67 +387,202576,112778,0,1,0,0,0,1,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +388,289778,148181,0,0,0,1,1,0,0,0,1,0,0,0,12.0,12,1.0,12,1.0 +389,318503,98397,1,1,0,0,0,0,0,0,0,1,0,0,24.0,24,1.0,8,0.33 +390,307917,126264,0,0,0,0,0,0,0,0,0,1,1,1,12.0,12,1.0,20,1.67 +391,369358,93056,0,0,0,0,1,0,0,0,1,0,0,1,12.0,12,1.0,8,0.67 +392,373343,59530,0,0,0,0,0,1,0,0,1,0,1,0,12.0,12,1.0,8,0.67 +393,278415,191858,0,0,0,0,0,0,1,0,1,0,0,1,16.0,16,1.0,16,1.0 +394,254809,78233,0,1,0,0,0,0,0,0,0,0,1,1,16.0,16,1.0,12,0.75 +395,358742,41600,0,0,0,1,0,1,0,0,0,1,0,0,16.0,16,1.0,28,1.75 +396,435593,136937,0,1,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,16,1.0 +397,444664,79308,0,1,0,0,1,0,0,1,0,0,0,0,4.0,4,1.0,16,4.0 +398,257040,189799,0,0,0,0,1,0,0,0,0,1,0,1,8.0,8,1.0,12,1.5 +399,471971,107481,0,1,0,0,1,1,0,0,0,0,0,0,16.0,16,1.0,8,0.5 +400,285534,69440,0,1,0,1,0,0,1,0,0,0,0,0,8.0,8,1.0,16,2.0 +401,399355,28851,0,1,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,12,3.0 +402,267282,130354,0,0,0,0,0,0,0,0,1,0,1,1,8.0,8,1.0,20,2.5 +403,376411,90246,1,1,0,0,0,1,0,0,0,0,0,0,12.0,12,1.0,4,0.33 +404,417066,68397,0,0,1,0,0,0,0,0,0,1,0,1,16.0,16,1.0,12,0.75 +405,312499,50643,1,0,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,16,2.0 +406,329969,70706,0,1,0,0,0,0,0,1,0,0,1,0,16.0,16,1.0,16,1.0 +407,468220,70210,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,16,4.0 +408,414297,162311,1,1,0,0,1,0,0,0,0,0,0,0,16.0,16,1.0,16,1.0 +409,219953,187847,0,0,1,1,1,0,0,0,0,0,0,0,4.0,4,1.0,4,1.0 +410,413257,192908,0,1,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,4,0.5 +411,334666,192762,0,0,1,1,0,0,0,1,0,0,0,0,8.0,8,1.0,20,2.5 +412,222514,50456,0,0,0,0,0,0,0,1,1,0,1,0,16.0,16,1.0,0,0.0 +413,259408,123579,1,0,1,0,0,0,0,0,1,0,0,0,28.0,28,1.0,12,0.43 +414,319363,16753,0,1,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,16,2.0 +415,398450,95097,0,0,1,0,0,1,0,1,0,0,0,0,12.0,12,1.0,4,0.33 +416,388838,107138,0,0,1,0,1,0,0,1,0,0,0,0,4.0,4,1.0,4,1.0 +417,256776,19251,0,1,0,0,0,0,0,0,0,1,0,1,4.0,4,1.0,8,2.0 +418,383184,82298,0,0,0,0,0,1,1,1,0,0,0,0,8.0,8,1.0,12,1.5 +419,262198,77538,0,1,1,0,0,0,1,0,0,0,0,0,8.0,8,1.0,16,2.0 +420,276703,139996,1,1,0,0,1,0,0,0,0,0,0,0,16.0,16,1.0,4,0.25 +421,276962,38350,1,0,0,0,0,1,0,0,0,0,1,0,28.0,28,1.0,8,0.29 +422,461187,140548,0,0,0,1,0,0,0,1,0,1,0,0,24.0,24,1.0,20,0.83 +423,480325,45818,0,1,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,20,1.25 +424,394427,83641,0,0,1,0,0,0,0,1,0,0,1,0,4.0,4,1.0,16,4.0 +425,435603,99934,0,0,1,0,1,0,0,0,0,1,0,0,4.0,4,1.0,12,3.0 +426,279346,67709,0,0,0,1,0,0,1,0,0,0,0,1,,0,0.0,24,5.0 +427,306633,93222,0,0,1,0,0,0,0,0,1,0,1,0,16.0,16,1.0,12,0.75 +428,483980,14466,1,0,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,8,1.0 +429,214878,173055,1,0,1,0,0,0,0,0,1,0,0,0,8.0,8,1.0,12,1.5 +430,263423,160431,0,0,0,1,0,0,1,0,1,0,0,0,20.0,20,1.0,12,0.6 +431,396807,194270,0,0,0,0,1,1,0,0,0,0,0,1,16.0,16,1.0,4,0.25 +432,468006,139244,0,0,0,0,1,0,0,1,0,0,1,0,8.0,8,1.0,16,2.0 +433,386017,54184,0,0,0,1,0,0,1,0,0,0,1,0,16.0,16,1.0,12,0.75 +434,237391,115716,0,0,1,1,0,0,0,1,0,0,0,0,24.0,24,1.0,12,0.5 +435,415828,29926,0,0,0,0,0,0,0,1,0,1,1,0,8.0,8,1.0,8,1.0 +436,208006,56098,0,0,0,1,0,0,0,1,1,0,0,0,12.0,12,1.0,8,0.67 +437,282798,92380,0,0,0,0,0,0,1,1,0,0,1,0,20.0,20,1.0,24,1.2 +438,322382,139215,0,0,0,0,0,0,1,1,1,0,0,0,8.0,8,1.0,24,3.0 +439,495656,24595,0,0,0,0,1,0,0,0,0,1,0,1,12.0,12,1.0,16,1.33 +440,487736,156629,0,0,0,1,0,1,0,1,0,0,0,0,12.0,12,1.0,12,1.0 +441,348167,162775,0,1,0,0,0,0,0,0,1,1,0,0,16.0,16,1.0,4,0.25 +442,413534,160420,0,1,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,8,1.0 +443,292531,155651,1,0,0,0,0,1,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +444,405225,64763,0,0,0,0,1,1,1,0,0,0,0,0,8.0,8,1.0,20,2.5 +445,388734,172721,1,0,1,0,0,0,0,0,1,0,0,0,16.0,16,1.0,20,1.25 +446,200028,69617,0,0,0,1,0,1,0,0,0,0,1,0,16.0,16,1.0,8,0.5 +447,368086,150465,0,0,1,0,0,0,0,0,0,1,0,1,16.0,16,1.0,20,1.25 +448,453598,88914,0,0,0,0,1,0,1,0,0,0,1,0,16.0,16,1.0,12,0.75 +449,490092,13516,0,0,1,0,1,0,0,0,0,1,0,0,12.0,12,1.0,4,0.33 +450,260490,195083,0,0,0,1,1,0,0,0,0,0,1,0,12.0,12,1.0,12,1.0 +451,455598,27274,1,0,1,0,0,0,0,1,0,0,0,0,20.0,20,1.0,8,0.4 +452,494976,161982,0,0,0,1,0,0,0,0,0,1,0,1,8.0,8,1.0,16,2.0 +453,396066,28764,0,1,1,0,0,0,0,0,0,0,1,0,16.0,16,1.0,16,1.0 +454,455579,88243,0,1,0,0,1,0,1,0,0,0,0,0,,0,0.0,16,5.0 +455,409085,70502,0,0,0,0,1,0,1,0,0,1,0,0,8.0,8,1.0,16,2.0 +456,371029,72053,0,0,0,1,1,1,0,0,0,0,0,0,8.0,8,1.0,4,0.5 +457,295670,196249,1,0,1,0,0,0,1,0,0,0,0,0,20.0,20,1.0,4,0.2 +458,361928,150430,1,0,0,0,1,0,0,0,0,0,0,1,4.0,4,1.0,12,3.0 +459,451024,173729,1,0,0,0,0,0,0,0,1,0,0,1,8.0,8,1.0,4,0.5 +460,490748,98179,0,0,0,0,0,1,0,0,1,0,0,1,12.0,12,1.0,8,0.67 +461,491945,30336,0,0,0,0,0,1,0,0,1,1,0,0,12.0,12,1.0,16,1.33 +462,241640,70420,0,1,0,0,1,0,0,0,0,0,0,1,4.0,4,1.0,16,4.0 +463,223055,35627,0,0,1,0,0,0,1,1,0,0,0,0,4.0,4,1.0,8,2.0 +464,467233,172942,0,0,0,0,0,1,0,0,1,1,0,0,8.0,8,1.0,8,1.0 +465,209404,90757,0,0,0,0,0,1,0,0,0,1,0,1,4.0,4,1.0,12,3.0 +466,235052,63309,0,0,0,1,0,0,1,0,0,0,0,1,16.0,16,1.0,20,1.25 +467,352043,122542,0,0,0,0,0,1,0,1,0,0,1,0,,0,0.0,4,5.0 +468,464655,71188,0,0,1,0,1,0,0,0,0,0,1,0,,0,0.0,8,5.0 +469,452949,99119,0,0,0,1,0,1,0,0,1,0,0,0,12.0,12,1.0,12,1.0 +470,426276,83280,0,1,1,0,0,0,1,0,0,0,0,0,8.0,8,1.0,20,2.5 +471,216417,72177,1,0,1,0,1,0,0,0,0,0,0,0,16.0,16,1.0,12,0.75 +472,475969,71538,0,1,0,1,0,0,0,0,0,0,0,1,16.0,16,1.0,16,1.0 +473,470315,16907,0,1,0,0,1,0,1,0,0,0,0,0,4.0,4,1.0,16,4.0 +474,278020,110948,0,0,0,1,0,1,0,0,0,0,1,0,28.0,28,1.0,16,0.57 +475,440036,30121,0,0,0,0,0,1,0,0,1,0,0,1,4.0,4,1.0,8,2.0 +476,314142,30943,0,1,0,0,0,0,1,0,0,1,0,0,4.0,4,1.0,0,0.0 +477,386746,20699,0,0,0,0,1,1,0,0,1,0,0,0,8.0,8,1.0,8,1.0 +478,261137,14474,1,0,0,0,0,0,0,0,0,1,1,0,8.0,8,1.0,0,0.0 +479,331372,39433,0,1,0,1,0,0,1,0,0,0,0,0,,0,0.0,4,5.0 +480,405773,32680,0,0,0,0,0,0,0,1,1,1,0,0,8.0,8,1.0,8,1.0 +481,429580,132313,0,1,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,4,1.0 +482,356059,157741,0,0,0,0,1,0,0,0,1,0,0,1,,0,0.0,8,5.0 +483,271684,55449,0,0,0,0,1,1,0,0,0,1,0,0,20.0,20,1.0,32,1.6 +484,347716,95631,0,0,0,0,1,1,1,0,0,0,0,0,4.0,4,1.0,12,3.0 +485,411171,62933,0,0,0,1,0,0,0,1,0,0,0,1,,0,0.0,16,5.0 +486,428977,75683,0,0,0,0,1,0,0,0,1,1,0,0,8.0,8,1.0,28,3.5 +487,466398,187686,0,0,1,0,0,0,0,1,0,0,0,1,16.0,16,1.0,12,0.75 +488,270081,33315,0,0,0,0,0,0,0,0,1,0,1,1,20.0,20,1.0,12,0.6 +489,485029,51880,1,0,0,1,0,0,0,0,0,0,0,1,16.0,16,1.0,12,0.75 +490,261276,108198,1,0,0,0,0,0,0,0,1,0,1,0,4.0,4,1.0,20,5.0 +491,407979,92566,1,0,0,0,0,0,1,0,0,0,0,1,16.0,16,1.0,8,0.5 +492,243348,173882,0,0,0,1,0,1,0,0,0,0,0,1,12.0,12,1.0,12,1.0 +493,411586,174035,0,0,0,0,1,0,0,1,0,0,0,1,12.0,12,1.0,16,1.33 +494,297625,144010,1,0,0,1,0,0,1,0,0,0,0,0,16.0,16,1.0,12,0.75 +495,402601,92174,0,0,0,0,0,0,1,1,0,1,0,0,4.0,4,1.0,0,0.0 +496,266498,69504,0,0,0,1,0,1,1,0,0,0,0,0,16.0,16,1.0,8,0.5 +497,303917,41341,0,0,0,0,1,1,0,0,0,0,1,0,8.0,8,1.0,4,0.5 +498,205307,26146,1,0,0,0,0,1,1,0,0,0,0,0,12.0,12,1.0,12,1.0 +499,471466,124691,1,0,1,0,0,0,0,1,0,0,0,0,20.0,20,1.0,12,0.6 +500,363805,196676,0,1,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,8,1.0 +501,328308,62536,0,0,1,1,0,0,0,1,0,0,0,0,,0,0.0,4,5.0 +502,267523,85770,0,0,1,0,0,0,1,0,0,0,1,0,16.0,16,1.0,8,0.5 +503,332979,84112,1,0,1,0,0,0,0,0,0,0,1,0,4.0,4,1.0,0,0.0 +504,309464,180538,0,0,0,0,1,1,0,0,0,1,0,0,4.0,4,1.0,16,4.0 +505,488292,185347,1,0,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,8,2.0 +506,361471,165007,0,0,0,0,0,0,1,1,0,0,0,1,12.0,12,1.0,8,0.67 +507,208687,105248,0,0,1,0,0,0,1,1,0,0,0,0,16.0,16,1.0,8,0.5 +508,232684,161404,0,1,0,0,0,1,0,1,0,0,0,0,4.0,4,1.0,12,3.0 +509,442554,16172,0,0,0,0,1,0,0,0,0,0,1,1,8.0,8,1.0,24,3.0 +510,204657,170199,0,0,1,0,0,0,0,0,0,0,1,1,4.0,4,1.0,8,2.0 +511,239389,70377,0,0,1,0,1,0,0,0,1,0,0,0,20.0,20,1.0,12,0.6 +512,218008,101701,1,0,0,0,1,0,0,1,0,0,0,0,8.0,8,1.0,20,2.5 +513,383759,194651,0,0,1,0,0,0,0,0,1,0,1,0,4.0,4,1.0,12,3.0 +514,306213,61447,0,0,0,1,1,0,1,0,0,0,0,0,8.0,8,1.0,4,0.5 +515,312123,48279,0,0,1,0,1,0,1,0,0,0,0,0,4.0,4,1.0,8,2.0 +516,361275,116248,0,0,0,1,0,0,0,0,0,0,1,1,,0,0.0,12,5.0 +517,388396,90908,0,1,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,12,3.0 +518,288128,133729,0,0,0,0,1,1,0,0,0,0,1,0,8.0,8,1.0,8,1.0 +519,425896,76334,0,0,0,1,0,0,1,0,0,1,0,0,4.0,4,1.0,24,6.0 +520,227571,49234,0,0,1,0,0,0,1,0,0,1,0,0,8.0,8,1.0,16,2.0 +521,479020,137397,0,0,0,0,1,1,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +522,204399,43095,0,0,1,0,0,0,0,1,0,0,0,1,16.0,16,1.0,20,1.25 +523,338325,154358,0,0,0,1,1,0,0,1,0,0,0,0,16.0,16,1.0,8,0.5 +524,336982,132921,0,0,1,1,0,0,0,0,1,0,0,0,28.0,28,1.0,16,0.57 +525,341459,91124,0,0,1,0,0,0,0,0,0,1,0,1,16.0,16,1.0,0,0.0 +526,406510,63478,0,0,0,1,0,0,0,0,1,0,0,1,8.0,8,1.0,8,1.0 +527,332700,64958,0,1,0,0,0,1,0,0,0,0,1,0,12.0,12,1.0,4,0.33 +528,254230,86953,0,0,1,0,0,0,1,1,0,0,0,0,,0,0.0,12,5.0 +529,216574,171859,0,0,0,1,0,0,0,0,0,1,1,0,8.0,8,1.0,12,1.5 +530,482990,41795,0,0,0,1,0,1,0,0,0,0,0,1,20.0,20,1.0,12,0.6 531,330133,123832,0,0,0,0,1,0,0,0,0,1,1,0,,0,0.0,0,0.0 -532,395228,53435,1,0,0,0,0,0,0,1,0,0,1,0,6.0,6,1.0,14,2.33 -533,219301,83245,1,0,0,0,1,0,0,0,0,0,1,0,2.0,2,1.0,4,2.0 -534,241393,116263,0,1,0,1,0,0,0,0,0,1,0,0,6.0,6,1.0,12,2.0 -535,209948,50931,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,4,0.5 -536,434471,63782,1,0,0,1,0,0,0,1,0,0,0,0,4.0,4,1.0,4,1.0 -537,402834,161904,0,0,1,0,1,1,0,0,0,0,0,0,2.0,2,1.0,8,4.0 -538,441114,132616,1,0,0,0,0,1,1,0,0,0,0,0,2.0,2,1.0,12,6.0 -539,467585,31356,0,1,0,1,1,0,0,0,0,0,0,0,8.0,8,1.0,4,0.5 -540,348008,95495,1,0,0,0,0,0,0,1,0,1,0,0,10.0,10,1.0,6,0.6 -541,445390,136281,0,0,1,0,0,0,0,0,1,0,1,0,4.0,4,1.0,8,2.0 -542,323829,25432,0,0,1,0,0,1,0,1,0,0,0,0,2.0,2,1.0,12,6.0 -543,205185,12607,0,0,0,0,0,0,1,0,0,1,0,1,8.0,8,1.0,6,0.75 -544,490248,26420,0,0,0,1,0,1,0,1,0,0,0,0,4.0,4,1.0,8,2.0 -545,463292,161027,0,0,0,0,1,0,0,0,1,0,1,0,4.0,4,1.0,8,2.0 -546,480943,157829,0,1,0,1,0,0,0,1,0,0,0,0,8.0,8,1.0,8,1.0 -547,347856,29055,1,0,1,0,0,0,0,0,0,0,0,1,12.0,12,1.0,0,0.0 -548,239565,96294,0,1,0,0,0,0,0,0,0,0,1,1,6.0,6,1.0,8,1.33 -549,255734,62437,1,0,0,0,0,0,0,1,0,0,1,0,10.0,10,1.0,4,0.4 -550,242008,183474,0,1,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,2,0.5 -551,278966,28705,0,1,0,1,0,0,0,0,1,0,0,0,10.0,10,1.0,2,0.2 -552,266072,17762,0,0,1,1,0,0,0,0,0,1,0,0,6.0,6,1.0,8,1.33 -553,380708,37376,0,0,0,1,0,0,1,0,0,0,1,0,4.0,4,1.0,4,1.0 -554,207097,109083,0,1,0,1,0,0,0,0,1,0,0,0,14.0,14,1.0,4,0.29 -555,220167,167100,0,1,1,0,0,0,0,1,0,0,0,0,6.0,6,1.0,8,1.33 -556,328447,185659,0,0,0,0,0,0,1,0,0,1,0,1,,0,0.0,2,5.0 -557,263123,158856,0,1,0,1,0,0,0,0,0,1,0,0,4.0,4,1.0,4,1.0 -558,244123,101011,0,0,0,1,1,0,0,0,0,0,0,1,14.0,14,1.0,14,1.0 -559,212049,197338,1,0,0,0,0,1,0,0,0,1,0,0,2.0,2,1.0,8,4.0 -560,244946,29596,1,0,0,0,1,0,0,0,0,1,0,0,4.0,4,1.0,6,1.5 -561,358017,45793,0,0,0,0,0,1,0,1,0,0,1,0,8.0,8,1.0,8,1.0 -562,344701,132333,1,0,1,0,0,0,0,1,0,0,0,0,6.0,6,1.0,12,2.0 -563,411913,180763,0,0,0,0,1,1,1,0,0,0,0,0,8.0,8,1.0,10,1.25 -564,449587,131493,1,1,0,1,0,0,0,0,0,0,0,0,4.0,4,1.0,10,2.5 -565,486488,50115,0,0,0,1,0,0,0,1,0,1,0,0,16.0,16,1.0,4,0.25 -566,362731,138975,1,0,0,0,0,1,0,1,0,0,0,0,10.0,10,1.0,10,1.0 -567,496717,190331,0,1,1,0,0,0,1,0,0,0,0,0,10.0,10,1.0,6,0.6 -568,306057,157754,0,0,1,0,0,0,0,0,1,1,0,0,4.0,4,1.0,6,1.5 -569,338709,85126,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,4,1.0 -570,200001,143481,0,0,0,1,1,0,0,0,0,0,1,0,2.0,2,1.0,0,0.0 -571,223457,100696,0,0,0,1,0,1,0,0,1,0,0,0,8.0,8,1.0,8,1.0 -572,441179,25685,0,0,1,0,0,0,0,0,1,0,1,0,12.0,12,1.0,4,0.33 -573,353594,110800,1,0,0,0,0,0,0,0,1,1,0,0,6.0,6,1.0,8,1.33 -574,393614,13674,0,0,1,0,0,1,1,0,0,0,0,0,4.0,4,1.0,16,4.0 -575,430394,20528,0,0,1,0,1,0,0,0,0,1,0,0,10.0,10,1.0,2,0.2 -576,331831,152774,1,0,0,1,0,0,0,0,1,0,0,0,2.0,2,1.0,10,5.0 -577,398194,120484,0,0,0,0,0,1,1,0,0,1,0,0,8.0,8,1.0,8,1.0 -578,276427,176286,1,0,1,0,0,0,0,1,0,0,0,0,2.0,2,1.0,6,3.0 -579,499987,117675,0,0,0,0,0,1,0,0,1,1,0,0,12.0,12,1.0,12,1.0 -580,290614,113585,0,0,1,1,1,0,0,0,0,0,0,0,6.0,6,1.0,2,0.33 -581,478243,195676,0,0,1,0,0,0,1,0,0,0,0,1,2.0,2,1.0,8,4.0 -582,402493,128166,0,0,0,0,0,1,0,1,0,1,0,0,12.0,12,1.0,8,0.67 -583,393992,143737,0,0,0,1,0,0,1,0,1,0,0,0,6.0,6,1.0,2,0.33 -584,327428,88091,0,1,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,4,0.5 -585,462940,164467,0,0,0,0,0,1,0,0,1,0,0,1,2.0,2,1.0,12,6.0 -586,265145,97582,0,0,0,0,0,1,0,0,1,1,0,0,4.0,4,1.0,4,1.0 -587,451884,125764,1,0,0,1,0,0,0,0,0,1,0,0,,0,0.0,10,5.0 -588,470082,123085,0,0,0,1,1,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 -589,302570,55829,0,1,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -590,271350,163049,0,0,0,0,0,1,0,0,1,1,0,0,6.0,6,1.0,10,1.67 -591,463176,190070,0,0,1,0,1,0,0,0,0,0,1,0,2.0,2,1.0,10,5.0 -592,239269,115358,0,0,0,0,0,0,1,1,0,0,0,1,2.0,2,1.0,10,5.0 -593,492234,80321,0,0,0,0,0,1,1,1,0,0,0,0,6.0,6,1.0,14,2.33 -594,447098,150300,0,0,1,0,0,0,0,0,1,0,1,0,,0,0.0,4,5.0 -595,289440,191239,0,1,0,1,0,0,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -596,395653,181703,0,0,0,0,0,1,1,0,1,0,0,0,4.0,4,1.0,6,1.5 -597,413273,39791,0,0,0,1,1,0,1,0,0,0,0,0,,0,0.0,2,5.0 -598,225693,56526,0,1,0,0,0,0,1,0,0,0,0,1,10.0,10,1.0,4,0.4 -599,457079,135393,0,0,1,0,0,0,0,0,1,0,1,0,6.0,6,1.0,6,1.0 -600,406200,92399,0,0,1,0,0,1,0,0,0,0,0,1,2.0,2,1.0,10,5.0 -601,391476,55926,0,0,0,1,0,0,1,0,0,1,0,0,6.0,6,1.0,4,0.67 -602,425438,98860,0,0,0,0,0,1,1,0,1,0,0,0,,0,0.0,12,5.0 -603,334418,120570,1,0,0,0,0,0,0,0,1,0,0,1,2.0,2,1.0,4,2.0 -604,348429,28194,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,4,5.0 -605,400528,37541,0,0,0,1,1,0,1,0,0,0,0,0,6.0,6,1.0,10,1.67 -606,319216,42896,0,0,0,0,0,1,0,0,0,1,0,1,4.0,4,1.0,6,1.5 -607,480884,46973,0,0,0,0,1,0,0,0,1,0,0,1,2.0,2,1.0,8,4.0 -608,319510,197788,1,0,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,0,0.0 -609,311293,74309,0,0,1,1,0,0,0,0,0,1,0,0,4.0,4,1.0,14,3.5 -610,393323,181443,0,0,1,0,0,0,1,0,0,0,1,0,4.0,4,1.0,10,2.5 -611,311966,145582,1,0,0,0,1,0,0,0,0,1,0,0,4.0,4,1.0,6,1.5 -612,406543,90709,0,0,0,0,1,0,0,0,0,1,1,0,6.0,6,1.0,6,1.0 -613,302751,155861,0,1,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,10,0.83 -614,207585,47241,0,0,0,0,0,1,0,0,1,0,1,0,4.0,4,1.0,4,1.0 -615,275659,21504,0,0,0,1,0,1,0,0,0,0,1,0,10.0,10,1.0,10,1.0 -616,406861,28337,0,0,0,0,1,0,0,1,1,0,0,0,2.0,2,1.0,0,0.0 -617,362463,158731,0,0,0,0,1,0,0,1,1,0,0,0,6.0,6,1.0,8,1.33 -618,411510,30753,0,0,0,0,0,1,0,1,0,1,0,0,4.0,4,1.0,6,1.5 -619,485980,143009,0,0,0,0,1,1,1,0,0,0,0,0,2.0,2,1.0,0,0.0 -620,220964,110716,0,1,0,1,0,0,1,0,0,0,0,0,2.0,2,1.0,10,5.0 -621,315555,151131,0,0,0,0,1,0,1,0,1,0,0,0,,0,0.0,2,5.0 -622,471157,68516,1,0,1,0,1,0,0,0,0,0,0,0,8.0,8,1.0,14,1.75 -623,406859,61140,0,1,0,0,0,1,0,0,0,0,1,0,8.0,8,1.0,2,0.25 -624,464367,152582,0,0,0,0,1,0,0,0,0,1,1,0,6.0,6,1.0,2,0.33 -625,384728,71607,0,1,0,0,0,0,0,1,1,0,0,0,10.0,10,1.0,4,0.4 -626,442739,43642,0,1,0,1,0,0,0,0,0,0,0,1,10.0,10,1.0,2,0.2 -627,345081,39891,0,0,0,0,0,0,0,1,1,1,0,0,8.0,8,1.0,2,0.25 -628,356882,151586,0,0,0,0,0,0,0,0,1,1,1,0,8.0,8,1.0,10,1.25 -629,467710,51296,0,0,0,0,1,0,0,0,1,0,0,1,6.0,6,1.0,6,1.0 -630,320831,133375,0,0,0,0,0,0,1,1,0,0,1,0,8.0,8,1.0,8,1.0 -631,277376,98064,0,0,0,0,0,0,0,1,0,1,1,0,10.0,10,1.0,2,0.2 -632,336337,92259,0,0,0,1,0,0,0,1,0,0,0,1,6.0,6,1.0,12,2.0 -633,205914,12925,0,1,0,0,0,0,1,0,1,0,0,0,4.0,4,1.0,2,0.5 -634,334891,180467,0,1,1,1,0,0,0,0,0,0,0,0,2.0,2,1.0,0,0.0 -635,293350,93423,1,0,0,0,0,1,0,0,0,0,1,0,8.0,8,1.0,2,0.25 -636,248133,102234,0,0,0,1,0,0,0,0,0,1,1,0,14.0,14,1.0,2,0.14 -637,463279,66610,0,0,0,0,0,1,0,0,0,1,0,1,4.0,4,1.0,4,1.0 -638,233225,144563,0,0,0,0,1,1,0,0,0,1,0,0,4.0,4,1.0,12,3.0 -639,480066,24214,1,0,0,0,1,0,0,0,1,0,0,0,2.0,2,1.0,6,3.0 -640,298201,68870,0,1,0,0,0,0,0,0,1,0,0,1,2.0,2,1.0,8,4.0 -641,316673,22103,0,0,0,0,0,1,0,0,0,1,1,0,6.0,6,1.0,12,2.0 -642,435702,113156,1,0,0,0,0,0,0,0,0,1,0,1,8.0,8,1.0,8,1.0 -643,382302,131574,0,0,0,1,1,1,0,0,0,0,0,0,16.0,16,1.0,8,0.5 -644,308676,103934,1,0,0,0,0,1,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -645,424223,88807,0,1,0,0,0,0,0,1,1,0,0,0,4.0,4,1.0,0,0.0 -646,447228,44269,0,1,0,0,0,1,0,0,0,0,1,0,10.0,10,1.0,6,0.6 -647,277880,55032,1,0,0,0,0,0,1,0,0,0,1,0,6.0,6,1.0,8,1.33 -648,222776,158303,0,0,0,0,0,1,0,0,1,1,0,0,6.0,6,1.0,10,1.67 -649,472568,61872,1,0,0,0,0,1,0,0,0,0,0,1,6.0,6,1.0,6,1.0 -650,326433,106534,1,0,0,1,0,0,0,0,0,0,0,1,4.0,4,1.0,6,1.5 -651,339979,90419,0,1,0,0,1,1,0,0,0,0,0,0,2.0,2,1.0,6,3.0 -652,444103,137728,0,0,1,0,1,0,0,0,0,1,0,0,4.0,4,1.0,2,0.5 -653,453724,109824,0,0,0,1,0,0,0,1,0,0,0,1,6.0,6,1.0,8,1.33 -654,487460,127989,1,1,0,0,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 -655,483102,182886,0,1,0,1,0,0,0,0,0,0,1,0,4.0,4,1.0,10,2.5 -656,249702,127895,0,0,0,0,0,0,1,0,0,1,0,1,6.0,6,1.0,6,1.0 -657,474378,61869,0,0,0,0,0,1,0,0,1,1,0,0,2.0,2,1.0,12,6.0 -658,362653,145831,0,0,0,0,0,0,0,1,0,1,1,0,2.0,2,1.0,6,3.0 -659,370714,71649,0,0,0,0,0,1,0,1,0,1,0,0,8.0,8,1.0,6,0.75 -660,258959,28713,0,1,0,0,0,1,1,0,0,0,0,0,6.0,6,1.0,6,1.0 -661,405274,24301,0,0,0,0,0,0,1,1,1,0,0,0,2.0,2,1.0,6,3.0 -662,220897,99513,1,0,1,1,0,0,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -663,454203,156314,0,0,0,0,0,0,0,0,1,0,1,1,2.0,2,1.0,4,2.0 -664,462761,164531,0,0,0,1,0,0,0,1,0,0,1,0,6.0,6,1.0,8,1.33 -665,325579,59883,0,0,0,0,0,1,0,0,1,0,1,0,4.0,4,1.0,10,2.5 -666,473233,154694,0,0,1,1,0,0,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -667,420925,199716,1,0,0,0,0,1,0,0,0,1,0,0,6.0,6,1.0,0,0.0 -668,200999,106467,0,1,1,0,0,0,1,0,0,0,0,0,6.0,6,1.0,4,0.67 -669,251803,124949,1,1,0,0,0,1,0,0,0,0,0,0,6.0,6,1.0,6,1.0 -670,498285,29863,0,1,0,0,0,0,0,0,1,0,1,0,6.0,6,1.0,8,1.33 -671,262904,122418,0,1,0,0,0,1,0,1,0,0,0,0,8.0,8,1.0,2,0.25 -672,422921,59595,0,0,0,0,0,1,1,0,0,0,1,0,2.0,2,1.0,2,1.0 -673,420628,177529,1,0,0,0,0,0,0,0,1,0,1,0,2.0,2,1.0,4,2.0 -674,324917,104702,0,0,0,1,0,1,0,0,1,0,0,0,6.0,6,1.0,6,1.0 -675,430855,75453,0,0,0,0,1,0,1,0,0,0,1,0,2.0,2,1.0,6,3.0 -676,285582,74778,1,1,0,0,0,0,1,0,0,0,0,0,8.0,8,1.0,2,0.25 -677,496860,49463,0,1,0,0,0,1,1,0,0,0,0,0,8.0,8,1.0,0,0.0 -678,423904,187806,0,1,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,6,0.75 -679,204938,36785,1,0,1,1,0,0,0,0,0,0,0,0,8.0,8,1.0,2,0.25 -680,375284,196746,0,0,0,0,1,0,0,1,1,0,0,0,6.0,6,1.0,6,1.0 -681,244556,29179,0,0,0,0,1,0,0,0,0,1,0,1,4.0,4,1.0,4,1.0 -682,229152,127759,0,0,0,0,0,0,1,1,0,0,0,1,,0,0.0,8,5.0 -683,361553,64970,0,0,1,0,1,0,0,0,0,0,1,0,6.0,6,1.0,6,1.0 -684,271352,113220,0,0,1,1,0,0,0,0,0,0,0,1,2.0,2,1.0,6,3.0 -685,295760,74282,1,0,0,0,1,0,0,0,1,0,0,0,,0,0.0,6,5.0 -686,318293,33441,0,0,0,0,0,0,1,1,0,0,1,0,8.0,8,1.0,8,1.0 -687,383619,40056,0,0,1,0,0,0,0,0,1,0,0,1,10.0,10,1.0,2,0.2 -688,314795,136938,0,0,0,0,1,0,1,0,0,0,1,0,10.0,10,1.0,2,0.2 -689,478514,189564,0,0,0,1,1,0,0,0,0,1,0,0,4.0,4,1.0,2,0.5 -690,404950,188127,0,0,0,1,1,1,0,0,0,0,0,0,6.0,6,1.0,8,1.33 -691,445341,106081,1,0,0,1,1,0,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -692,449316,77666,1,0,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,8,1.0 -693,302075,138096,0,0,0,0,0,1,1,1,0,0,0,0,8.0,8,1.0,14,1.75 -694,456352,36009,0,0,0,1,0,0,1,0,0,0,1,0,4.0,4,1.0,6,1.5 -695,490129,92546,0,1,0,0,0,0,0,0,1,0,1,0,6.0,6,1.0,4,0.67 -696,433637,55286,0,1,0,0,1,1,0,0,0,0,0,0,6.0,6,1.0,10,1.67 -697,210566,31212,0,0,0,0,0,0,1,0,1,0,0,1,,0,0.0,12,5.0 -698,402223,122169,0,1,1,0,0,0,1,0,0,0,0,0,12.0,12,1.0,2,0.17 -699,244785,198667,1,0,0,0,0,0,0,0,0,1,0,1,10.0,10,1.0,12,1.2 -700,402291,166371,0,0,0,0,0,1,0,0,1,0,0,1,8.0,8,1.0,10,1.25 -701,313478,78519,0,1,0,0,0,0,0,1,0,0,1,0,4.0,4,1.0,14,3.5 -702,438841,134520,1,0,1,0,0,0,0,0,1,0,0,0,8.0,8,1.0,6,0.75 -703,213094,70655,0,0,0,0,1,0,0,0,1,0,0,1,8.0,8,1.0,0,0.0 -704,350978,43948,0,0,1,1,0,0,0,1,0,0,0,0,2.0,2,1.0,6,3.0 -705,254726,172807,0,0,0,0,0,0,1,0,1,1,0,0,8.0,8,1.0,6,0.75 -706,434372,189536,0,0,0,1,0,0,0,0,0,0,1,1,4.0,4,1.0,4,1.0 -707,345034,80288,0,1,0,0,0,0,0,0,1,0,1,0,,0,0.0,10,5.0 -708,232062,187285,0,1,0,0,1,0,0,0,1,0,0,0,6.0,6,1.0,0,0.0 -709,411841,19443,0,0,0,0,0,1,0,1,0,1,0,0,2.0,2,1.0,6,3.0 -710,453578,18206,0,1,0,0,1,0,0,0,0,0,1,0,2.0,2,1.0,10,5.0 -711,241167,176591,0,0,0,0,0,1,0,0,0,0,1,1,2.0,2,1.0,8,4.0 -712,371904,166797,0,0,0,0,0,0,0,0,1,1,0,1,4.0,4,1.0,2,0.5 -713,496726,73973,1,0,0,0,0,0,1,0,0,0,0,1,2.0,2,1.0,6,3.0 -714,285423,39451,1,1,0,0,0,0,0,0,1,0,0,0,6.0,6,1.0,8,1.33 -715,235681,76552,0,1,0,0,0,0,1,0,0,0,1,0,6.0,6,1.0,4,0.67 -716,204840,172449,0,0,0,0,0,0,1,1,1,0,0,0,2.0,2,1.0,8,4.0 -717,405744,175646,1,1,1,0,0,0,0,0,0,0,0,0,10.0,10,1.0,10,1.0 -718,247418,116395,1,0,0,0,0,0,1,0,0,1,0,0,6.0,6,1.0,6,1.0 -719,268489,54245,0,0,0,0,1,0,1,0,0,0,1,0,10.0,10,1.0,4,0.4 -720,328945,165590,0,0,1,0,0,0,0,0,0,1,0,1,2.0,2,1.0,4,2.0 -721,410193,141893,0,0,0,0,0,1,1,0,0,1,0,0,2.0,2,1.0,8,4.0 -722,345034,40830,1,0,1,0,1,0,0,0,0,0,0,0,10.0,10,1.0,2,0.2 -723,364372,64143,0,0,0,1,1,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 +532,395228,53435,1,0,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,28,2.33 +533,219301,83245,1,0,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,8,2.0 +534,241393,116263,0,1,0,1,0,0,0,0,0,1,0,0,12.0,12,1.0,24,2.0 +535,209948,50931,0,0,0,1,0,0,1,0,0,0,1,0,16.0,16,1.0,8,0.5 +536,434471,63782,1,0,0,1,0,0,0,1,0,0,0,0,8.0,8,1.0,8,1.0 +537,402834,161904,0,0,1,0,1,1,0,0,0,0,0,0,4.0,4,1.0,16,4.0 +538,441114,132616,1,0,0,0,0,1,1,0,0,0,0,0,4.0,4,1.0,24,6.0 +539,467585,31356,0,1,0,1,1,0,0,0,0,0,0,0,16.0,16,1.0,8,0.5 +540,348008,95495,1,0,0,0,0,0,0,1,0,1,0,0,20.0,20,1.0,12,0.6 +541,445390,136281,0,0,1,0,0,0,0,0,1,0,1,0,8.0,8,1.0,16,2.0 +542,323829,25432,0,0,1,0,0,1,0,1,0,0,0,0,4.0,4,1.0,24,6.0 +543,205185,12607,0,0,0,0,0,0,1,0,0,1,0,1,16.0,16,1.0,12,0.75 +544,490248,26420,0,0,0,1,0,1,0,1,0,0,0,0,8.0,8,1.0,16,2.0 +545,463292,161027,0,0,0,0,1,0,0,0,1,0,1,0,8.0,8,1.0,16,2.0 +546,480943,157829,0,1,0,1,0,0,0,1,0,0,0,0,16.0,16,1.0,16,1.0 +547,347856,29055,1,0,1,0,0,0,0,0,0,0,0,1,24.0,24,1.0,0,0.0 +548,239565,96294,0,1,0,0,0,0,0,0,0,0,1,1,12.0,12,1.0,16,1.33 +549,255734,62437,1,0,0,0,0,0,0,1,0,0,1,0,20.0,20,1.0,8,0.4 +550,242008,183474,0,1,0,0,0,0,0,1,0,1,0,0,8.0,8,1.0,4,0.5 +551,278966,28705,0,1,0,1,0,0,0,0,1,0,0,0,20.0,20,1.0,4,0.2 +552,266072,17762,0,0,1,1,0,0,0,0,0,1,0,0,12.0,12,1.0,16,1.33 +553,380708,37376,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,8,1.0 +554,207097,109083,0,1,0,1,0,0,0,0,1,0,0,0,28.0,28,1.0,8,0.29 +555,220167,167100,0,1,1,0,0,0,0,1,0,0,0,0,12.0,12,1.0,16,1.33 +556,328447,185659,0,0,0,0,0,0,1,0,0,1,0,1,,0,0.0,4,5.0 +557,263123,158856,0,1,0,1,0,0,0,0,0,1,0,0,8.0,8,1.0,8,1.0 +558,244123,101011,0,0,0,1,1,0,0,0,0,0,0,1,28.0,28,1.0,28,1.0 +559,212049,197338,1,0,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,16,4.0 +560,244946,29596,1,0,0,0,1,0,0,0,0,1,0,0,8.0,8,1.0,12,1.5 +561,358017,45793,0,0,0,0,0,1,0,1,0,0,1,0,16.0,16,1.0,16,1.0 +562,344701,132333,1,0,1,0,0,0,0,1,0,0,0,0,12.0,12,1.0,24,2.0 +563,411913,180763,0,0,0,0,1,1,1,0,0,0,0,0,16.0,16,1.0,20,1.25 +564,449587,131493,1,1,0,1,0,0,0,0,0,0,0,0,8.0,8,1.0,20,2.5 +565,486488,50115,0,0,0,1,0,0,0,1,0,1,0,0,32.0,32,1.0,8,0.25 +566,362731,138975,1,0,0,0,0,1,0,1,0,0,0,0,20.0,20,1.0,20,1.0 +567,496717,190331,0,1,1,0,0,0,1,0,0,0,0,0,20.0,20,1.0,12,0.6 +568,306057,157754,0,0,1,0,0,0,0,0,1,1,0,0,8.0,8,1.0,12,1.5 +569,338709,85126,0,0,1,0,0,0,0,0,1,0,0,1,8.0,8,1.0,8,1.0 +570,200001,143481,0,0,0,1,1,0,0,0,0,0,1,0,4.0,4,1.0,0,0.0 +571,223457,100696,0,0,0,1,0,1,0,0,1,0,0,0,16.0,16,1.0,16,1.0 +572,441179,25685,0,0,1,0,0,0,0,0,1,0,1,0,24.0,24,1.0,8,0.33 +573,353594,110800,1,0,0,0,0,0,0,0,1,1,0,0,12.0,12,1.0,16,1.33 +574,393614,13674,0,0,1,0,0,1,1,0,0,0,0,0,8.0,8,1.0,32,4.0 +575,430394,20528,0,0,1,0,1,0,0,0,0,1,0,0,20.0,20,1.0,4,0.2 +576,331831,152774,1,0,0,1,0,0,0,0,1,0,0,0,4.0,4,1.0,20,5.0 +577,398194,120484,0,0,0,0,0,1,1,0,0,1,0,0,16.0,16,1.0,16,1.0 +578,276427,176286,1,0,1,0,0,0,0,1,0,0,0,0,4.0,4,1.0,12,3.0 +579,499987,117675,0,0,0,0,0,1,0,0,1,1,0,0,24.0,24,1.0,24,1.0 +580,290614,113585,0,0,1,1,1,0,0,0,0,0,0,0,12.0,12,1.0,4,0.33 +581,478243,195676,0,0,1,0,0,0,1,0,0,0,0,1,4.0,4,1.0,16,4.0 +582,402493,128166,0,0,0,0,0,1,0,1,0,1,0,0,24.0,24,1.0,16,0.67 +583,393992,143737,0,0,0,1,0,0,1,0,1,0,0,0,12.0,12,1.0,4,0.33 +584,327428,88091,0,1,1,0,0,1,0,0,0,0,0,0,16.0,16,1.0,8,0.5 +585,462940,164467,0,0,0,0,0,1,0,0,1,0,0,1,4.0,4,1.0,24,6.0 +586,265145,97582,0,0,0,0,0,1,0,0,1,1,0,0,8.0,8,1.0,8,1.0 +587,451884,125764,1,0,0,1,0,0,0,0,0,1,0,0,,0,0.0,20,5.0 +588,470082,123085,0,0,0,1,1,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 +589,302570,55829,0,1,0,1,0,1,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +590,271350,163049,0,0,0,0,0,1,0,0,1,1,0,0,12.0,12,1.0,20,1.67 +591,463176,190070,0,0,1,0,1,0,0,0,0,0,1,0,4.0,4,1.0,20,5.0 +592,239269,115358,0,0,0,0,0,0,1,1,0,0,0,1,4.0,4,1.0,20,5.0 +593,492234,80321,0,0,0,0,0,1,1,1,0,0,0,0,12.0,12,1.0,28,2.33 +594,447098,150300,0,0,1,0,0,0,0,0,1,0,1,0,,0,0.0,8,5.0 +595,289440,191239,0,1,0,1,0,0,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +596,395653,181703,0,0,0,0,0,1,1,0,1,0,0,0,8.0,8,1.0,12,1.5 +597,413273,39791,0,0,0,1,1,0,1,0,0,0,0,0,,0,0.0,4,5.0 +598,225693,56526,0,1,0,0,0,0,1,0,0,0,0,1,20.0,20,1.0,8,0.4 +599,457079,135393,0,0,1,0,0,0,0,0,1,0,1,0,12.0,12,1.0,12,1.0 +600,406200,92399,0,0,1,0,0,1,0,0,0,0,0,1,4.0,4,1.0,20,5.0 +601,391476,55926,0,0,0,1,0,0,1,0,0,1,0,0,12.0,12,1.0,8,0.67 +602,425438,98860,0,0,0,0,0,1,1,0,1,0,0,0,,0,0.0,24,5.0 +603,334418,120570,1,0,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,8,2.0 +604,348429,28194,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,8,5.0 +605,400528,37541,0,0,0,1,1,0,1,0,0,0,0,0,12.0,12,1.0,20,1.67 +606,319216,42896,0,0,0,0,0,1,0,0,0,1,0,1,8.0,8,1.0,12,1.5 +607,480884,46973,0,0,0,0,1,0,0,0,1,0,0,1,4.0,4,1.0,16,4.0 +608,319510,197788,1,0,0,1,0,0,0,0,0,0,0,1,16.0,16,1.0,0,0.0 +609,311293,74309,0,0,1,1,0,0,0,0,0,1,0,0,8.0,8,1.0,28,3.5 +610,393323,181443,0,0,1,0,0,0,1,0,0,0,1,0,8.0,8,1.0,20,2.5 +611,311966,145582,1,0,0,0,1,0,0,0,0,1,0,0,8.0,8,1.0,12,1.5 +612,406543,90709,0,0,0,0,1,0,0,0,0,1,1,0,12.0,12,1.0,12,1.0 +613,302751,155861,0,1,0,0,0,0,0,0,1,0,1,0,24.0,24,1.0,20,0.83 +614,207585,47241,0,0,0,0,0,1,0,0,1,0,1,0,8.0,8,1.0,8,1.0 +615,275659,21504,0,0,0,1,0,1,0,0,0,0,1,0,20.0,20,1.0,20,1.0 +616,406861,28337,0,0,0,0,1,0,0,1,1,0,0,0,4.0,4,1.0,0,0.0 +617,362463,158731,0,0,0,0,1,0,0,1,1,0,0,0,12.0,12,1.0,16,1.33 +618,411510,30753,0,0,0,0,0,1,0,1,0,1,0,0,8.0,8,1.0,12,1.5 +619,485980,143009,0,0,0,0,1,1,1,0,0,0,0,0,4.0,4,1.0,0,0.0 +620,220964,110716,0,1,0,1,0,0,1,0,0,0,0,0,4.0,4,1.0,20,5.0 +621,315555,151131,0,0,0,0,1,0,1,0,1,0,0,0,,0,0.0,4,5.0 +622,471157,68516,1,0,1,0,1,0,0,0,0,0,0,0,16.0,16,1.0,28,1.75 +623,406859,61140,0,1,0,0,0,1,0,0,0,0,1,0,16.0,16,1.0,4,0.25 +624,464367,152582,0,0,0,0,1,0,0,0,0,1,1,0,12.0,12,1.0,4,0.33 +625,384728,71607,0,1,0,0,0,0,0,1,1,0,0,0,20.0,20,1.0,8,0.4 +626,442739,43642,0,1,0,1,0,0,0,0,0,0,0,1,20.0,20,1.0,4,0.2 +627,345081,39891,0,0,0,0,0,0,0,1,1,1,0,0,16.0,16,1.0,4,0.25 +628,356882,151586,0,0,0,0,0,0,0,0,1,1,1,0,16.0,16,1.0,20,1.25 +629,467710,51296,0,0,0,0,1,0,0,0,1,0,0,1,12.0,12,1.0,12,1.0 +630,320831,133375,0,0,0,0,0,0,1,1,0,0,1,0,16.0,16,1.0,16,1.0 +631,277376,98064,0,0,0,0,0,0,0,1,0,1,1,0,20.0,20,1.0,4,0.2 +632,336337,92259,0,0,0,1,0,0,0,1,0,0,0,1,12.0,12,1.0,24,2.0 +633,205914,12925,0,1,0,0,0,0,1,0,1,0,0,0,8.0,8,1.0,4,0.5 +634,334891,180467,0,1,1,1,0,0,0,0,0,0,0,0,4.0,4,1.0,0,0.0 +635,293350,93423,1,0,0,0,0,1,0,0,0,0,1,0,16.0,16,1.0,4,0.25 +636,248133,102234,0,0,0,1,0,0,0,0,0,1,1,0,28.0,28,1.0,4,0.14 +637,463279,66610,0,0,0,0,0,1,0,0,0,1,0,1,8.0,8,1.0,8,1.0 +638,233225,144563,0,0,0,0,1,1,0,0,0,1,0,0,8.0,8,1.0,24,3.0 +639,480066,24214,1,0,0,0,1,0,0,0,1,0,0,0,4.0,4,1.0,12,3.0 +640,298201,68870,0,1,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,16,4.0 +641,316673,22103,0,0,0,0,0,1,0,0,0,1,1,0,12.0,12,1.0,24,2.0 +642,435702,113156,1,0,0,0,0,0,0,0,0,1,0,1,16.0,16,1.0,16,1.0 +643,382302,131574,0,0,0,1,1,1,0,0,0,0,0,0,32.0,32,1.0,16,0.5 +644,308676,103934,1,0,0,0,0,1,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +645,424223,88807,0,1,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,0,0.0 +646,447228,44269,0,1,0,0,0,1,0,0,0,0,1,0,20.0,20,1.0,12,0.6 +647,277880,55032,1,0,0,0,0,0,1,0,0,0,1,0,12.0,12,1.0,16,1.33 +648,222776,158303,0,0,0,0,0,1,0,0,1,1,0,0,12.0,12,1.0,20,1.67 +649,472568,61872,1,0,0,0,0,1,0,0,0,0,0,1,12.0,12,1.0,12,1.0 +650,326433,106534,1,0,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,12,1.5 +651,339979,90419,0,1,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,12,3.0 +652,444103,137728,0,0,1,0,1,0,0,0,0,1,0,0,8.0,8,1.0,4,0.5 +653,453724,109824,0,0,0,1,0,0,0,1,0,0,0,1,12.0,12,1.0,16,1.33 +654,487460,127989,1,1,0,0,0,0,0,1,0,0,0,0,8.0,8,1.0,16,2.0 +655,483102,182886,0,1,0,1,0,0,0,0,0,0,1,0,8.0,8,1.0,20,2.5 +656,249702,127895,0,0,0,0,0,0,1,0,0,1,0,1,12.0,12,1.0,12,1.0 +657,474378,61869,0,0,0,0,0,1,0,0,1,1,0,0,4.0,4,1.0,24,6.0 +658,362653,145831,0,0,0,0,0,0,0,1,0,1,1,0,4.0,4,1.0,12,3.0 +659,370714,71649,0,0,0,0,0,1,0,1,0,1,0,0,16.0,16,1.0,12,0.75 +660,258959,28713,0,1,0,0,0,1,1,0,0,0,0,0,12.0,12,1.0,12,1.0 +661,405274,24301,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,12,3.0 +662,220897,99513,1,0,1,1,0,0,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +663,454203,156314,0,0,0,0,0,0,0,0,1,0,1,1,4.0,4,1.0,8,2.0 +664,462761,164531,0,0,0,1,0,0,0,1,0,0,1,0,12.0,12,1.0,16,1.33 +665,325579,59883,0,0,0,0,0,1,0,0,1,0,1,0,8.0,8,1.0,20,2.5 +666,473233,154694,0,0,1,1,0,0,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +667,420925,199716,1,0,0,0,0,1,0,0,0,1,0,0,12.0,12,1.0,0,0.0 +668,200999,106467,0,1,1,0,0,0,1,0,0,0,0,0,12.0,12,1.0,8,0.67 +669,251803,124949,1,1,0,0,0,1,0,0,0,0,0,0,12.0,12,1.0,12,1.0 +670,498285,29863,0,1,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,16,1.33 +671,262904,122418,0,1,0,0,0,1,0,1,0,0,0,0,16.0,16,1.0,4,0.25 +672,422921,59595,0,0,0,0,0,1,1,0,0,0,1,0,4.0,4,1.0,4,1.0 +673,420628,177529,1,0,0,0,0,0,0,0,1,0,1,0,4.0,4,1.0,8,2.0 +674,324917,104702,0,0,0,1,0,1,0,0,1,0,0,0,12.0,12,1.0,12,1.0 +675,430855,75453,0,0,0,0,1,0,1,0,0,0,1,0,4.0,4,1.0,12,3.0 +676,285582,74778,1,1,0,0,0,0,1,0,0,0,0,0,16.0,16,1.0,4,0.25 +677,496860,49463,0,1,0,0,0,1,1,0,0,0,0,0,16.0,16,1.0,0,0.0 +678,423904,187806,0,1,0,0,1,0,0,0,0,0,1,0,16.0,16,1.0,12,0.75 +679,204938,36785,1,0,1,1,0,0,0,0,0,0,0,0,16.0,16,1.0,4,0.25 +680,375284,196746,0,0,0,0,1,0,0,1,1,0,0,0,12.0,12,1.0,12,1.0 +681,244556,29179,0,0,0,0,1,0,0,0,0,1,0,1,8.0,8,1.0,8,1.0 +682,229152,127759,0,0,0,0,0,0,1,1,0,0,0,1,,0,0.0,16,5.0 +683,361553,64970,0,0,1,0,1,0,0,0,0,0,1,0,12.0,12,1.0,12,1.0 +684,271352,113220,0,0,1,1,0,0,0,0,0,0,0,1,4.0,4,1.0,12,3.0 +685,295760,74282,1,0,0,0,1,0,0,0,1,0,0,0,,0,0.0,12,5.0 +686,318293,33441,0,0,0,0,0,0,1,1,0,0,1,0,16.0,16,1.0,16,1.0 +687,383619,40056,0,0,1,0,0,0,0,0,1,0,0,1,20.0,20,1.0,4,0.2 +688,314795,136938,0,0,0,0,1,0,1,0,0,0,1,0,20.0,20,1.0,4,0.2 +689,478514,189564,0,0,0,1,1,0,0,0,0,1,0,0,8.0,8,1.0,4,0.5 +690,404950,188127,0,0,0,1,1,1,0,0,0,0,0,0,12.0,12,1.0,16,1.33 +691,445341,106081,1,0,0,1,1,0,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +692,449316,77666,1,0,1,0,0,1,0,0,0,0,0,0,16.0,16,1.0,16,1.0 +693,302075,138096,0,0,0,0,0,1,1,1,0,0,0,0,16.0,16,1.0,28,1.75 +694,456352,36009,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,12,1.5 +695,490129,92546,0,1,0,0,0,0,0,0,1,0,1,0,12.0,12,1.0,8,0.67 +696,433637,55286,0,1,0,0,1,1,0,0,0,0,0,0,12.0,12,1.0,20,1.67 +697,210566,31212,0,0,0,0,0,0,1,0,1,0,0,1,,0,0.0,24,5.0 +698,402223,122169,0,1,1,0,0,0,1,0,0,0,0,0,24.0,24,1.0,4,0.17 +699,244785,198667,1,0,0,0,0,0,0,0,0,1,0,1,20.0,20,1.0,24,1.2 +700,402291,166371,0,0,0,0,0,1,0,0,1,0,0,1,16.0,16,1.0,20,1.25 +701,313478,78519,0,1,0,0,0,0,0,1,0,0,1,0,8.0,8,1.0,28,3.5 +702,438841,134520,1,0,1,0,0,0,0,0,1,0,0,0,16.0,16,1.0,12,0.75 +703,213094,70655,0,0,0,0,1,0,0,0,1,0,0,1,16.0,16,1.0,0,0.0 +704,350978,43948,0,0,1,1,0,0,0,1,0,0,0,0,4.0,4,1.0,12,3.0 +705,254726,172807,0,0,0,0,0,0,1,0,1,1,0,0,16.0,16,1.0,12,0.75 +706,434372,189536,0,0,0,1,0,0,0,0,0,0,1,1,8.0,8,1.0,8,1.0 +707,345034,80288,0,1,0,0,0,0,0,0,1,0,1,0,,0,0.0,20,5.0 +708,232062,187285,0,1,0,0,1,0,0,0,1,0,0,0,12.0,12,1.0,0,0.0 +709,411841,19443,0,0,0,0,0,1,0,1,0,1,0,0,4.0,4,1.0,12,3.0 +710,453578,18206,0,1,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,20,5.0 +711,241167,176591,0,0,0,0,0,1,0,0,0,0,1,1,4.0,4,1.0,16,4.0 +712,371904,166797,0,0,0,0,0,0,0,0,1,1,0,1,8.0,8,1.0,4,0.5 +713,496726,73973,1,0,0,0,0,0,1,0,0,0,0,1,4.0,4,1.0,12,3.0 +714,285423,39451,1,1,0,0,0,0,0,0,1,0,0,0,12.0,12,1.0,16,1.33 +715,235681,76552,0,1,0,0,0,0,1,0,0,0,1,0,12.0,12,1.0,8,0.67 +716,204840,172449,0,0,0,0,0,0,1,1,1,0,0,0,4.0,4,1.0,16,4.0 +717,405744,175646,1,1,1,0,0,0,0,0,0,0,0,0,20.0,20,1.0,20,1.0 +718,247418,116395,1,0,0,0,0,0,1,0,0,1,0,0,12.0,12,1.0,12,1.0 +719,268489,54245,0,0,0,0,1,0,1,0,0,0,1,0,20.0,20,1.0,8,0.4 +720,328945,165590,0,0,1,0,0,0,0,0,0,1,0,1,4.0,4,1.0,8,2.0 +721,410193,141893,0,0,0,0,0,1,1,0,0,1,0,0,4.0,4,1.0,16,4.0 +722,345034,40830,1,0,1,0,1,0,0,0,0,0,0,0,20.0,20,1.0,4,0.2 +723,364372,64143,0,0,0,1,1,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 724,290738,129138,0,1,1,0,0,0,0,1,0,0,0,0,,0,0.0,0,0.0 -725,338965,85609,0,1,0,0,0,1,0,1,0,0,0,0,8.0,8,1.0,2,0.25 -726,467121,143254,0,0,0,0,1,0,0,0,0,0,1,1,6.0,6,1.0,2,0.33 -727,389983,27107,0,1,0,0,0,0,1,0,0,1,0,0,2.0,2,1.0,6,3.0 -728,483851,56319,0,0,0,1,1,0,0,0,0,1,0,0,2.0,2,1.0,4,2.0 -729,424730,165939,1,1,0,1,0,0,0,0,0,0,0,0,8.0,8,1.0,6,0.75 -730,249321,48359,0,1,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,2,0.5 -731,307430,81205,1,1,0,0,0,0,0,0,0,0,1,0,6.0,6,1.0,10,1.67 -732,347301,37884,1,0,1,0,0,0,0,0,1,0,0,0,10.0,10,1.0,12,1.2 -733,203743,110893,0,0,0,0,1,1,0,0,1,0,0,0,4.0,4,1.0,4,1.0 -734,292487,183232,1,0,0,0,1,0,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -735,322767,39677,1,0,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,4,1.0 -736,358514,80208,0,0,0,1,0,1,0,1,0,0,0,0,14.0,14,1.0,4,0.29 -737,427105,139153,0,1,0,0,1,0,0,0,1,0,0,0,4.0,4,1.0,4,1.0 -738,357046,63987,0,0,0,1,1,0,0,0,0,0,1,0,12.0,12,1.0,10,0.83 -739,307964,26776,1,0,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 -740,316623,39023,0,1,0,0,0,0,0,0,1,1,0,0,4.0,4,1.0,4,1.0 -741,228760,62415,1,0,0,0,0,0,0,1,0,1,0,0,,0,0.0,8,5.0 -742,338286,150402,1,1,0,0,0,0,1,0,0,0,0,0,12.0,12,1.0,12,1.0 -743,438728,154962,1,0,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,2,0.25 -744,404981,192372,0,0,0,0,1,0,0,0,1,0,1,0,6.0,6,1.0,4,0.67 -745,359528,25575,0,1,0,0,0,0,1,0,1,0,0,0,6.0,6,1.0,4,0.67 -746,349781,163587,1,0,1,0,0,0,0,0,1,0,0,0,8.0,8,1.0,6,0.75 -747,385237,59843,0,0,0,0,0,0,0,1,0,1,1,0,12.0,12,1.0,4,0.33 -748,384632,183937,0,0,0,0,1,1,0,0,0,1,0,0,4.0,4,1.0,10,2.5 -749,306961,148491,0,0,0,0,1,1,0,1,0,0,0,0,4.0,4,1.0,8,2.0 -750,396479,30844,0,0,0,0,1,0,0,0,1,1,0,0,10.0,10,1.0,2,0.2 -751,353952,189743,0,0,0,0,0,1,0,0,0,0,1,1,6.0,6,1.0,0,0.0 -752,458229,72535,0,1,0,0,0,0,1,0,0,0,0,1,6.0,6,1.0,14,2.33 -753,423828,162472,0,0,1,0,1,0,0,1,0,0,0,0,6.0,6,1.0,0,0.0 -754,369405,195449,0,0,0,0,0,1,0,0,1,0,1,0,14.0,14,1.0,8,0.57 -755,394880,59505,1,0,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,10,1.25 -756,216125,75680,0,0,1,1,0,1,0,0,0,0,0,0,4.0,4,1.0,4,1.0 -757,269805,185734,0,1,1,0,0,0,1,0,0,0,0,0,8.0,8,1.0,0,0.0 -758,413799,157617,0,0,0,0,0,1,1,0,1,0,0,0,,0,0.0,6,5.0 -759,302053,181745,0,0,0,0,0,1,1,0,0,0,0,1,2.0,2,1.0,8,4.0 -760,473261,167980,0,0,1,0,0,0,0,1,0,0,0,1,10.0,10,1.0,8,0.8 -761,445051,106289,1,1,1,0,0,0,0,0,0,0,0,0,10.0,10,1.0,2,0.2 -762,212245,132984,0,1,0,0,0,0,0,0,0,1,1,0,10.0,10,1.0,8,0.8 -763,480413,113380,0,0,1,0,0,0,0,0,0,1,0,1,8.0,8,1.0,6,0.75 -764,352346,53256,0,0,0,0,1,1,0,0,0,0,1,0,,0,0.0,2,5.0 -765,449161,26399,0,1,0,0,0,0,0,1,0,1,0,0,4.0,4,1.0,10,2.5 -766,400195,66688,1,0,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,8,2.0 -767,360809,41649,0,1,1,0,0,0,0,0,0,0,0,1,4.0,4,1.0,8,2.0 -768,487223,143779,0,0,1,0,1,0,0,0,0,1,0,0,10.0,10,1.0,8,0.8 -769,364632,163079,0,0,0,1,0,0,0,0,1,1,0,0,2.0,2,1.0,4,2.0 -770,332071,116680,1,0,0,0,1,0,1,0,0,0,0,0,4.0,4,1.0,6,1.5 -771,396359,89174,1,0,1,0,0,0,0,1,0,0,0,0,10.0,10,1.0,4,0.4 -772,320385,92456,0,0,1,1,0,1,0,0,0,0,0,0,6.0,6,1.0,8,1.33 -773,380098,93796,0,1,0,0,0,0,0,0,1,1,0,0,8.0,8,1.0,6,0.75 -774,385140,102819,1,1,0,0,0,0,0,0,0,0,1,0,,0,0.0,6,5.0 -775,473604,163744,0,0,0,0,0,1,1,0,0,1,0,0,4.0,4,1.0,8,2.0 -776,296380,178265,0,0,0,0,0,0,1,1,0,0,0,1,8.0,8,1.0,0,0.0 -777,217741,187165,0,1,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,4,0.5 -778,468179,141762,0,0,1,0,1,0,0,0,0,0,1,0,2.0,2,1.0,4,2.0 -779,265103,157564,1,0,0,0,0,0,1,0,0,1,0,0,4.0,4,1.0,2,0.5 -780,318917,57513,0,0,0,0,1,1,0,0,0,0,0,1,8.0,8,1.0,8,1.0 -781,335389,73388,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,8,5.0 -782,344494,41122,0,1,0,0,1,0,0,0,0,0,1,0,4.0,4,1.0,10,2.5 -783,468987,43930,1,0,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,4,1.0 -784,480547,64279,0,1,0,0,0,0,0,1,1,0,0,0,8.0,8,1.0,6,0.75 -785,375866,122880,0,0,0,0,0,0,0,1,0,1,1,0,4.0,4,1.0,4,1.0 -786,438184,138257,0,1,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,10,2.5 -787,221519,42324,1,0,0,0,0,1,1,0,0,0,0,0,6.0,6,1.0,4,0.67 -788,207393,114298,1,1,0,0,0,0,0,0,1,0,0,0,4.0,4,1.0,4,1.0 -789,356242,168810,0,0,0,0,0,0,0,0,1,1,1,0,6.0,6,1.0,10,1.67 -790,394532,87084,0,1,0,0,0,0,0,0,0,1,0,1,2.0,2,1.0,8,4.0 -791,322534,160752,0,0,0,0,1,1,0,0,0,1,0,0,4.0,4,1.0,2,0.5 -792,296770,177535,0,1,0,0,0,0,1,1,0,0,0,0,6.0,6,1.0,8,1.33 -793,359411,181579,0,0,0,0,0,0,1,0,1,1,0,0,6.0,6,1.0,8,1.33 -794,215607,147171,0,0,0,0,1,1,0,0,1,0,0,0,8.0,8,1.0,6,0.75 -795,252366,29720,0,0,1,0,0,0,0,1,0,0,0,1,8.0,8,1.0,2,0.25 -796,481102,62941,1,0,1,0,0,1,0,0,0,0,0,0,2.0,2,1.0,2,1.0 -797,264008,165244,0,0,0,0,0,1,0,1,0,0,1,0,4.0,4,1.0,0,0.0 -798,344748,198569,1,0,1,0,0,1,0,0,0,0,0,0,2.0,2,1.0,10,5.0 -799,491171,95359,1,0,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,20,5.0 -800,213500,95032,0,0,0,0,0,0,0,1,1,0,0,1,6.0,6,1.0,10,1.67 -801,450193,132018,1,0,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,4,1.0 -802,341561,154584,0,0,0,0,0,1,0,0,1,0,0,1,8.0,8,1.0,12,1.5 -803,491900,180115,0,0,0,1,0,0,1,0,0,0,0,1,4.0,4,1.0,2,0.5 -804,206970,151088,0,1,0,1,0,0,0,0,1,0,0,0,8.0,8,1.0,4,0.5 -805,316695,25455,0,0,0,0,1,0,0,0,1,0,1,0,6.0,6,1.0,2,0.33 -806,382649,34601,1,1,0,0,0,0,0,0,0,1,0,0,4.0,4,1.0,8,2.0 -807,256997,194583,0,1,0,0,0,1,0,1,0,0,0,0,6.0,6,1.0,10,1.67 -808,365277,184349,0,1,0,0,0,1,0,0,1,0,0,0,8.0,8,1.0,6,0.75 -809,485349,152806,0,1,0,1,0,0,0,0,0,0,1,0,6.0,6,1.0,6,1.0 -810,480262,53863,0,1,0,1,0,1,0,0,0,0,0,0,6.0,6,1.0,4,0.67 -811,486303,95202,0,1,1,0,0,0,0,1,0,0,0,0,,0,0.0,2,5.0 -812,258131,55993,0,0,0,0,0,0,0,1,0,0,1,1,2.0,2,1.0,4,2.0 +725,338965,85609,0,1,0,0,0,1,0,1,0,0,0,0,16.0,16,1.0,4,0.25 +726,467121,143254,0,0,0,0,1,0,0,0,0,0,1,1,12.0,12,1.0,4,0.33 +727,389983,27107,0,1,0,0,0,0,1,0,0,1,0,0,4.0,4,1.0,12,3.0 +728,483851,56319,0,0,0,1,1,0,0,0,0,1,0,0,4.0,4,1.0,8,2.0 +729,424730,165939,1,1,0,1,0,0,0,0,0,0,0,0,16.0,16,1.0,12,0.75 +730,249321,48359,0,1,1,0,0,0,1,0,0,0,0,0,8.0,8,1.0,4,0.5 +731,307430,81205,1,1,0,0,0,0,0,0,0,0,1,0,12.0,12,1.0,20,1.67 +732,347301,37884,1,0,1,0,0,0,0,0,1,0,0,0,20.0,20,1.0,24,1.2 +733,203743,110893,0,0,0,0,1,1,0,0,1,0,0,0,8.0,8,1.0,8,1.0 +734,292487,183232,1,0,0,0,1,0,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +735,322767,39677,1,0,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,8,1.0 +736,358514,80208,0,0,0,1,0,1,0,1,0,0,0,0,28.0,28,1.0,8,0.29 +737,427105,139153,0,1,0,0,1,0,0,0,1,0,0,0,8.0,8,1.0,8,1.0 +738,357046,63987,0,0,0,1,1,0,0,0,0,0,1,0,24.0,24,1.0,20,0.83 +739,307964,26776,1,0,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,16,1.0 +740,316623,39023,0,1,0,0,0,0,0,0,1,1,0,0,8.0,8,1.0,8,1.0 +741,228760,62415,1,0,0,0,0,0,0,1,0,1,0,0,,0,0.0,16,5.0 +742,338286,150402,1,1,0,0,0,0,1,0,0,0,0,0,24.0,24,1.0,24,1.0 +743,438728,154962,1,0,0,0,1,0,0,0,0,0,1,0,16.0,16,1.0,4,0.25 +744,404981,192372,0,0,0,0,1,0,0,0,1,0,1,0,12.0,12,1.0,8,0.67 +745,359528,25575,0,1,0,0,0,0,1,0,1,0,0,0,12.0,12,1.0,8,0.67 +746,349781,163587,1,0,1,0,0,0,0,0,1,0,0,0,16.0,16,1.0,12,0.75 +747,385237,59843,0,0,0,0,0,0,0,1,0,1,1,0,24.0,24,1.0,8,0.33 +748,384632,183937,0,0,0,0,1,1,0,0,0,1,0,0,8.0,8,1.0,20,2.5 +749,306961,148491,0,0,0,0,1,1,0,1,0,0,0,0,8.0,8,1.0,16,2.0 +750,396479,30844,0,0,0,0,1,0,0,0,1,1,0,0,20.0,20,1.0,4,0.2 +751,353952,189743,0,0,0,0,0,1,0,0,0,0,1,1,12.0,12,1.0,0,0.0 +752,458229,72535,0,1,0,0,0,0,1,0,0,0,0,1,12.0,12,1.0,28,2.33 +753,423828,162472,0,0,1,0,1,0,0,1,0,0,0,0,12.0,12,1.0,0,0.0 +754,369405,195449,0,0,0,0,0,1,0,0,1,0,1,0,28.0,28,1.0,16,0.57 +755,394880,59505,1,0,0,0,0,0,1,0,0,0,1,0,16.0,16,1.0,20,1.25 +756,216125,75680,0,0,1,1,0,1,0,0,0,0,0,0,8.0,8,1.0,8,1.0 +757,269805,185734,0,1,1,0,0,0,1,0,0,0,0,0,16.0,16,1.0,0,0.0 +758,413799,157617,0,0,0,0,0,1,1,0,1,0,0,0,,0,0.0,12,5.0 +759,302053,181745,0,0,0,0,0,1,1,0,0,0,0,1,4.0,4,1.0,16,4.0 +760,473261,167980,0,0,1,0,0,0,0,1,0,0,0,1,20.0,20,1.0,16,0.8 +761,445051,106289,1,1,1,0,0,0,0,0,0,0,0,0,20.0,20,1.0,4,0.2 +762,212245,132984,0,1,0,0,0,0,0,0,0,1,1,0,20.0,20,1.0,16,0.8 +763,480413,113380,0,0,1,0,0,0,0,0,0,1,0,1,16.0,16,1.0,12,0.75 +764,352346,53256,0,0,0,0,1,1,0,0,0,0,1,0,,0,0.0,4,5.0 +765,449161,26399,0,1,0,0,0,0,0,1,0,1,0,0,8.0,8,1.0,20,2.5 +766,400195,66688,1,0,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,16,2.0 +767,360809,41649,0,1,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,16,2.0 +768,487223,143779,0,0,1,0,1,0,0,0,0,1,0,0,20.0,20,1.0,16,0.8 +769,364632,163079,0,0,0,1,0,0,0,0,1,1,0,0,4.0,4,1.0,8,2.0 +770,332071,116680,1,0,0,0,1,0,1,0,0,0,0,0,8.0,8,1.0,12,1.5 +771,396359,89174,1,0,1,0,0,0,0,1,0,0,0,0,20.0,20,1.0,8,0.4 +772,320385,92456,0,0,1,1,0,1,0,0,0,0,0,0,12.0,12,1.0,16,1.33 +773,380098,93796,0,1,0,0,0,0,0,0,1,1,0,0,16.0,16,1.0,12,0.75 +774,385140,102819,1,1,0,0,0,0,0,0,0,0,1,0,,0,0.0,12,5.0 +775,473604,163744,0,0,0,0,0,1,1,0,0,1,0,0,8.0,8,1.0,16,2.0 +776,296380,178265,0,0,0,0,0,0,1,1,0,0,0,1,16.0,16,1.0,0,0.0 +777,217741,187165,0,1,0,0,0,0,1,0,0,0,1,0,16.0,16,1.0,8,0.5 +778,468179,141762,0,0,1,0,1,0,0,0,0,0,1,0,4.0,4,1.0,8,2.0 +779,265103,157564,1,0,0,0,0,0,1,0,0,1,0,0,8.0,8,1.0,4,0.5 +780,318917,57513,0,0,0,0,1,1,0,0,0,0,0,1,16.0,16,1.0,16,1.0 +781,335389,73388,0,0,1,0,0,0,1,0,0,0,0,1,,0,0.0,16,5.0 +782,344494,41122,0,1,0,0,1,0,0,0,0,0,1,0,8.0,8,1.0,20,2.5 +783,468987,43930,1,0,0,0,0,1,0,0,0,1,0,0,8.0,8,1.0,8,1.0 +784,480547,64279,0,1,0,0,0,0,0,1,1,0,0,0,16.0,16,1.0,12,0.75 +785,375866,122880,0,0,0,0,0,0,0,1,0,1,1,0,8.0,8,1.0,8,1.0 +786,438184,138257,0,1,0,0,0,0,0,0,1,0,0,1,8.0,8,1.0,20,2.5 +787,221519,42324,1,0,0,0,0,1,1,0,0,0,0,0,12.0,12,1.0,8,0.67 +788,207393,114298,1,1,0,0,0,0,0,0,1,0,0,0,8.0,8,1.0,8,1.0 +789,356242,168810,0,0,0,0,0,0,0,0,1,1,1,0,12.0,12,1.0,20,1.67 +790,394532,87084,0,1,0,0,0,0,0,0,0,1,0,1,4.0,4,1.0,16,4.0 +791,322534,160752,0,0,0,0,1,1,0,0,0,1,0,0,8.0,8,1.0,4,0.5 +792,296770,177535,0,1,0,0,0,0,1,1,0,0,0,0,12.0,12,1.0,16,1.33 +793,359411,181579,0,0,0,0,0,0,1,0,1,1,0,0,12.0,12,1.0,16,1.33 +794,215607,147171,0,0,0,0,1,1,0,0,1,0,0,0,16.0,16,1.0,12,0.75 +795,252366,29720,0,0,1,0,0,0,0,1,0,0,0,1,16.0,16,1.0,4,0.25 +796,481102,62941,1,0,1,0,0,1,0,0,0,0,0,0,4.0,4,1.0,4,1.0 +797,264008,165244,0,0,0,0,0,1,0,1,0,0,1,0,8.0,8,1.0,0,0.0 +798,344748,198569,1,0,1,0,0,1,0,0,0,0,0,0,4.0,4,1.0,20,5.0 +799,491171,95359,1,0,0,1,0,1,0,0,0,0,0,0,8.0,8,1.0,40,5.0 +800,213500,95032,0,0,0,0,0,0,0,1,1,0,0,1,12.0,12,1.0,20,1.67 +801,450193,132018,1,0,0,0,1,1,0,0,0,0,0,0,8.0,8,1.0,8,1.0 +802,341561,154584,0,0,0,0,0,1,0,0,1,0,0,1,16.0,16,1.0,24,1.5 +803,491900,180115,0,0,0,1,0,0,1,0,0,0,0,1,8.0,8,1.0,4,0.5 +804,206970,151088,0,1,0,1,0,0,0,0,1,0,0,0,16.0,16,1.0,8,0.5 +805,316695,25455,0,0,0,0,1,0,0,0,1,0,1,0,12.0,12,1.0,4,0.33 +806,382649,34601,1,1,0,0,0,0,0,0,0,1,0,0,8.0,8,1.0,16,2.0 +807,256997,194583,0,1,0,0,0,1,0,1,0,0,0,0,12.0,12,1.0,20,1.67 +808,365277,184349,0,1,0,0,0,1,0,0,1,0,0,0,16.0,16,1.0,12,0.75 +809,485349,152806,0,1,0,1,0,0,0,0,0,0,1,0,12.0,12,1.0,12,1.0 +810,480262,53863,0,1,0,1,0,1,0,0,0,0,0,0,12.0,12,1.0,8,0.67 +811,486303,95202,0,1,1,0,0,0,0,1,0,0,0,0,,0,0.0,4,5.0 +812,258131,55993,0,0,0,0,0,0,0,1,0,0,1,1,4.0,4,1.0,8,2.0 813,463050,126936,0,1,0,0,0,1,0,1,0,0,0,0,,0,0.0,0,0.0 -814,270756,29333,0,0,1,0,0,0,1,0,0,0,0,1,4.0,4,1.0,14,3.5 -815,231164,109027,1,0,0,0,0,1,1,0,0,0,0,0,4.0,4,1.0,8,2.0 -816,463899,33207,0,1,1,0,0,0,0,0,0,0,1,0,12.0,12,1.0,6,0.5 -817,399007,123759,0,1,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,6,0.75 -818,417191,61278,0,1,0,0,0,1,0,0,0,0,1,0,12.0,12,1.0,8,0.67 -819,343102,130956,0,1,0,1,0,0,0,0,0,0,1,0,6.0,6,1.0,12,2.0 -820,444359,117529,0,1,1,0,0,0,0,1,0,0,0,0,4.0,4,1.0,6,1.5 -821,456975,148816,1,0,0,0,0,1,0,0,0,0,0,1,4.0,4,1.0,8,2.0 -822,281006,127452,0,0,0,1,0,0,1,0,0,0,0,1,2.0,2,1.0,4,2.0 -823,397983,100855,0,1,0,0,1,0,0,1,0,0,0,0,6.0,6,1.0,0,0.0 -824,355294,76855,0,1,0,0,0,0,0,1,1,0,0,0,2.0,2,1.0,2,1.0 -825,422055,194612,1,0,0,1,0,0,0,0,1,0,0,0,,0,0.0,12,5.0 -826,214989,97653,0,0,0,0,0,1,0,0,1,0,1,0,4.0,4,1.0,8,2.0 -827,269975,157064,1,0,0,0,1,1,0,0,0,0,0,0,10.0,10,1.0,4,0.4 -828,494117,152366,1,0,0,0,1,0,0,0,1,0,0,0,6.0,6,1.0,8,1.33 -829,231915,65141,0,0,1,0,1,1,0,0,0,0,0,0,2.0,2,1.0,12,6.0 -830,458923,198250,1,0,0,0,0,1,0,0,1,0,0,0,4.0,4,1.0,6,1.5 -831,389130,141296,0,1,1,0,0,1,0,0,0,0,0,0,12.0,12,1.0,8,0.67 -832,470729,42046,0,0,1,0,1,0,0,0,0,0,0,1,8.0,8,1.0,4,0.5 -833,474279,30181,1,0,0,0,0,0,1,0,0,0,1,0,12.0,12,1.0,4,0.33 -834,406649,98575,0,0,0,1,0,0,0,0,0,1,1,0,4.0,4,1.0,4,1.0 -835,267055,184081,1,0,0,0,0,0,1,0,0,0,1,0,6.0,6,1.0,12,2.0 -836,276545,156447,0,0,0,1,1,0,0,0,0,0,0,1,6.0,6,1.0,6,1.0 -837,482190,36190,0,0,0,1,0,0,1,0,1,0,0,0,6.0,6,1.0,4,0.67 -838,370077,199736,0,1,0,0,1,0,0,1,0,0,0,0,14.0,14,1.0,6,0.43 -839,231280,154005,0,1,0,1,0,0,0,0,1,0,0,0,12.0,12,1.0,2,0.17 -840,495127,71108,0,1,0,0,0,1,0,0,1,0,0,0,4.0,4,1.0,6,1.5 -841,379036,151388,0,1,1,0,0,1,0,0,0,0,0,0,4.0,4,1.0,0,0.0 -842,275802,166094,1,0,0,0,0,1,0,0,0,0,1,0,4.0,4,1.0,6,1.5 -843,382652,65851,0,1,1,0,0,0,0,1,0,0,0,0,6.0,6,1.0,12,2.0 -844,442575,19464,0,0,1,1,0,0,0,0,0,1,0,0,4.0,4,1.0,4,1.0 -845,250858,124543,1,0,0,1,0,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 -846,337823,103129,0,0,0,0,1,0,0,1,0,0,1,0,10.0,10,1.0,2,0.2 -847,245935,144115,0,0,0,0,0,0,0,1,0,1,0,1,8.0,8,1.0,6,0.75 -848,370596,14192,0,0,0,0,1,1,0,0,1,0,0,0,6.0,6,1.0,8,1.33 -849,399212,20895,0,1,1,0,0,1,0,0,0,0,0,0,6.0,6,1.0,2,0.33 -850,476076,166707,0,0,0,1,0,0,1,0,0,0,0,1,4.0,4,1.0,6,1.5 -851,467415,130374,0,0,0,1,1,0,0,0,1,0,0,0,4.0,4,1.0,6,1.5 -852,367151,159580,1,0,0,0,0,0,1,0,1,0,0,0,12.0,12,1.0,6,0.5 -853,320912,97724,0,0,0,1,0,0,1,0,0,0,0,1,6.0,6,1.0,6,1.0 -854,227404,36661,0,1,0,1,0,0,0,0,0,0,0,1,10.0,10,1.0,6,0.6 -855,216256,178926,0,0,0,0,1,0,0,0,0,1,0,1,,0,0.0,2,5.0 -856,387475,80569,0,0,0,0,0,0,1,0,1,0,1,0,6.0,6,1.0,8,1.33 -857,249253,26385,0,0,1,0,1,0,0,1,0,0,0,0,4.0,4,1.0,12,3.0 -858,375847,176910,0,0,0,0,0,1,1,0,0,1,0,0,8.0,8,1.0,2,0.25 -859,251309,82035,0,0,0,0,0,0,0,1,1,0,0,1,10.0,10,1.0,6,0.6 -860,384041,50345,0,0,0,0,0,1,0,1,1,0,0,0,,0,0.0,4,5.0 -861,477161,106483,0,0,0,0,1,0,1,0,0,1,0,0,2.0,2,1.0,4,2.0 -862,218587,27343,1,0,1,0,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 -863,284722,71235,0,0,1,1,0,0,0,0,0,0,1,0,12.0,12,1.0,8,0.67 -864,223603,138762,1,0,1,0,0,0,1,0,0,0,0,0,2.0,2,1.0,0,0.0 -865,399762,13085,0,0,0,0,1,1,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -866,299484,114094,1,1,0,0,0,0,0,0,0,0,0,1,8.0,8,1.0,10,1.25 -867,407379,70302,0,1,0,0,1,0,0,1,0,0,0,0,6.0,6,1.0,6,1.0 -868,292348,137758,0,0,0,1,0,0,0,1,1,0,0,0,2.0,2,1.0,10,5.0 -869,287417,138395,0,0,0,1,0,0,1,0,1,0,0,0,8.0,8,1.0,6,0.75 -870,200590,66947,0,0,1,0,1,0,0,0,0,0,0,1,,0,0.0,4,5.0 -871,440673,85320,0,1,0,0,0,0,0,1,0,0,1,0,6.0,6,1.0,2,0.33 -872,356709,116973,0,1,0,1,0,0,1,0,0,0,0,0,2.0,2,1.0,6,3.0 -873,386039,186602,1,0,1,1,0,0,0,0,0,0,0,0,4.0,4,1.0,4,1.0 -874,270051,75658,0,0,0,0,1,0,0,1,1,0,0,0,6.0,6,1.0,6,1.0 -875,436580,164864,0,0,0,0,1,1,0,0,0,1,0,0,2.0,2,1.0,4,2.0 -876,202918,113863,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,8,5.0 -877,318488,149400,1,0,0,1,0,0,0,1,0,0,0,0,4.0,4,1.0,10,2.5 -878,315256,140817,0,0,0,1,0,0,0,1,1,0,0,0,10.0,10,1.0,10,1.0 -879,489213,18867,0,1,0,1,0,0,0,1,0,0,0,0,2.0,2,1.0,4,2.0 -880,446056,70527,1,0,0,0,0,0,0,0,0,0,1,1,10.0,10,1.0,6,0.6 -881,409633,106178,0,0,0,0,0,0,0,1,0,0,1,1,2.0,2,1.0,6,3.0 -882,451351,49239,0,0,0,0,0,0,0,0,1,1,0,1,4.0,4,1.0,2,0.5 -883,215427,15383,0,0,0,1,0,0,1,0,0,1,0,0,6.0,6,1.0,6,1.0 -884,406498,31959,0,1,0,0,0,0,0,0,0,1,0,1,2.0,2,1.0,6,3.0 -885,365405,108994,0,1,0,1,0,0,1,0,0,0,0,0,4.0,4,1.0,4,1.0 -886,280593,104025,1,0,1,0,0,0,0,0,0,0,0,1,10.0,10,1.0,4,0.4 -887,275781,182917,1,1,0,0,0,0,0,0,0,1,0,0,12.0,12,1.0,4,0.33 -888,278623,51336,1,0,0,0,0,0,0,1,0,0,0,1,8.0,8,1.0,10,1.25 -889,479862,137118,1,0,0,0,0,0,0,0,1,0,0,1,2.0,2,1.0,8,4.0 -890,371046,109092,0,0,0,1,0,0,0,0,1,0,0,1,6.0,6,1.0,6,1.0 -891,226188,133532,0,1,0,0,0,0,1,1,0,0,0,0,2.0,2,1.0,6,3.0 -892,413603,178951,0,0,0,1,1,1,0,0,0,0,0,0,2.0,2,1.0,6,3.0 -893,330993,191499,0,1,0,0,0,0,1,1,0,0,0,0,8.0,8,1.0,10,1.25 -894,487445,144381,0,1,0,1,0,1,0,0,0,0,0,0,,0,0.0,6,5.0 -895,446419,102496,0,1,0,0,0,0,1,1,0,0,0,0,12.0,12,1.0,0,0.0 -896,411668,159943,0,0,0,0,0,0,0,0,1,1,0,1,,0,0.0,6,5.0 -897,274151,49946,0,0,0,0,0,0,1,0,1,0,1,0,8.0,8,1.0,4,0.5 -898,454904,173407,0,0,0,0,0,1,0,1,0,1,0,0,16.0,16,1.0,4,0.25 -899,468542,42955,0,0,0,0,0,1,0,1,0,1,0,0,6.0,6,1.0,10,1.67 -900,317048,61101,0,0,0,0,0,1,0,1,0,0,0,1,,0,0.0,2,5.0 -901,385289,74675,1,0,0,0,0,1,0,0,0,0,1,0,8.0,8,1.0,2,0.25 -902,266676,72409,0,0,1,0,0,0,1,1,0,0,0,0,14.0,14,1.0,2,0.14 -903,254136,100314,0,1,0,0,1,0,0,0,1,0,0,0,10.0,10,1.0,4,0.4 -904,314605,151225,1,0,0,0,1,0,0,1,0,0,0,0,10.0,10,1.0,6,0.6 -905,459648,45195,0,0,0,0,0,1,0,0,1,1,0,0,4.0,4,1.0,2,0.5 -906,295585,86609,0,1,0,0,0,0,1,0,0,0,1,0,4.0,4,1.0,0,0.0 -907,371041,75883,0,0,1,0,0,0,1,0,1,0,0,0,4.0,4,1.0,4,1.0 -908,371542,57193,0,0,0,1,0,0,0,1,0,1,0,0,2.0,2,1.0,6,3.0 -909,419290,32255,0,0,0,0,1,1,0,0,0,0,0,1,2.0,2,1.0,12,6.0 -910,332905,154274,0,1,0,0,0,0,1,1,0,0,0,0,10.0,10,1.0,4,0.4 -911,396616,92842,1,1,0,0,0,0,0,1,0,0,0,0,2.0,2,1.0,4,2.0 -912,225868,72568,0,0,0,1,1,0,0,0,0,0,1,0,8.0,8,1.0,4,0.5 -913,483313,158045,0,0,1,0,0,0,0,1,1,0,0,0,4.0,4,1.0,6,1.5 -914,409500,59021,0,0,0,0,0,0,0,1,1,1,0,0,2.0,2,1.0,10,5.0 -915,347719,136188,1,0,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,2,0.25 -916,300122,70254,0,0,1,0,1,0,1,0,0,0,0,0,6.0,6,1.0,12,2.0 -917,306186,172369,0,0,0,0,0,0,1,0,0,1,1,0,6.0,6,1.0,4,0.67 -918,317909,48222,0,0,0,0,0,0,0,1,1,1,0,0,8.0,8,1.0,2,0.25 -919,243993,51965,0,0,0,0,0,0,1,0,0,0,1,1,6.0,6,1.0,6,1.0 -920,322095,172458,1,1,0,0,0,0,1,0,0,0,0,0,10.0,10,1.0,6,0.6 -921,305860,60419,0,0,1,1,0,0,0,0,0,0,0,1,10.0,10,1.0,2,0.2 -922,399045,24312,1,0,0,1,0,0,0,0,0,0,0,1,12.0,12,1.0,4,0.33 -923,466188,25075,0,0,0,0,1,1,0,0,0,0,0,1,12.0,12,1.0,10,0.83 -924,328821,44907,1,0,0,0,0,0,0,0,1,0,0,1,6.0,6,1.0,6,1.0 -925,392780,194685,0,1,0,0,0,0,0,0,1,1,0,0,2.0,2,1.0,6,3.0 -926,263701,186881,0,0,0,0,1,0,1,0,0,0,1,0,6.0,6,1.0,6,1.0 -927,485278,149804,0,1,1,0,0,0,0,0,1,0,0,0,,0,0.0,4,5.0 -928,265979,25740,0,0,0,0,1,0,1,0,0,0,0,1,6.0,6,1.0,12,2.0 -929,296060,13802,0,1,1,0,1,0,0,0,0,0,0,0,,0,0.0,4,5.0 -930,314827,183938,0,1,1,0,0,0,0,0,0,1,0,0,4.0,4,1.0,2,0.5 -931,327857,75977,1,0,0,1,0,1,0,0,0,0,0,0,4.0,4,1.0,4,1.0 -932,336696,195946,0,0,1,0,0,0,0,0,1,1,0,0,10.0,10,1.0,12,1.2 -933,381827,61627,0,0,0,0,1,0,0,0,0,1,0,1,,0,0.0,6,5.0 -934,402772,37800,0,0,0,0,1,0,0,0,0,1,1,0,2.0,2,1.0,8,4.0 -935,419875,103666,0,0,0,1,1,0,0,0,0,1,0,0,4.0,4,1.0,12,3.0 -936,370256,147215,0,0,1,0,0,0,0,1,0,1,0,0,8.0,8,1.0,6,0.75 -937,499515,81170,0,1,0,1,0,0,0,0,1,0,0,0,14.0,14,1.0,4,0.29 -938,489669,123869,0,0,1,1,0,0,0,0,1,0,0,0,10.0,10,1.0,8,0.8 -939,318548,157598,0,0,0,0,0,0,0,1,1,1,0,0,10.0,10,1.0,0,0.0 -940,378058,73800,0,0,1,0,0,0,0,1,0,0,0,1,8.0,8,1.0,8,1.0 -941,295093,155387,0,0,0,1,0,0,1,0,0,1,0,0,10.0,10,1.0,4,0.4 -942,255137,77953,0,0,0,1,0,0,0,0,0,1,0,1,2.0,2,1.0,4,2.0 -943,460003,175620,0,0,1,0,0,0,1,1,0,0,0,0,6.0,6,1.0,2,0.33 -944,435210,67087,0,0,0,0,0,0,1,0,1,1,0,0,4.0,4,1.0,6,1.5 -945,408719,21594,0,0,0,0,0,0,0,0,1,0,1,1,10.0,10,1.0,0,0.0 -946,442983,64387,1,0,0,0,1,0,0,0,1,0,0,0,8.0,8,1.0,6,0.75 -947,417183,170553,1,0,1,1,0,0,0,0,0,0,0,0,14.0,14,1.0,0,0.0 -948,245504,60047,0,0,0,0,0,1,0,1,1,0,0,0,8.0,8,1.0,8,1.0 -949,253896,99657,1,1,0,0,0,0,0,1,0,0,0,0,2.0,2,1.0,4,2.0 -950,360681,78417,0,0,0,0,0,1,0,0,1,0,0,1,6.0,6,1.0,6,1.0 -951,217587,140616,0,1,1,0,0,0,0,0,1,0,0,0,6.0,6,1.0,10,1.67 -952,399434,26457,0,1,0,0,0,1,1,0,0,0,0,0,12.0,12,1.0,2,0.17 -953,311115,142051,1,0,0,0,1,0,1,0,0,0,0,0,10.0,10,1.0,2,0.2 -954,468873,130072,0,0,0,0,0,0,0,1,1,0,1,0,4.0,4,1.0,14,3.5 -955,387628,54418,0,0,1,0,0,1,0,0,0,1,0,0,4.0,4,1.0,4,1.0 -956,311418,79285,0,0,1,0,0,1,0,0,0,0,0,1,6.0,6,1.0,10,1.67 -957,282189,153537,1,0,0,0,0,1,0,0,1,0,0,0,6.0,6,1.0,8,1.33 -958,242619,113714,0,0,0,0,0,0,1,0,1,0,0,1,8.0,8,1.0,10,1.25 -959,326593,187624,0,0,0,0,1,0,1,0,0,0,1,0,8.0,8,1.0,8,1.0 -960,219358,105388,1,0,0,0,0,0,1,1,0,0,0,0,6.0,6,1.0,12,2.0 -961,212481,34643,0,0,0,0,0,0,1,0,1,1,0,0,4.0,4,1.0,14,3.5 -962,493117,142931,0,0,0,0,0,1,0,1,0,1,0,0,4.0,4,1.0,4,1.0 -963,256384,79402,0,0,1,0,0,0,0,0,0,1,1,0,4.0,4,1.0,6,1.5 -964,496916,99297,1,0,0,0,0,1,0,0,0,0,1,0,,0,0.0,2,5.0 -965,423393,174792,0,1,0,0,0,0,1,1,0,0,0,0,,0,0.0,14,5.0 -966,473484,177801,1,0,0,0,1,1,0,0,0,0,0,0,,0,0.0,4,5.0 -967,371779,80404,0,0,0,0,1,0,0,0,0,1,1,0,2.0,2,1.0,6,3.0 -968,384400,71324,0,0,1,0,0,0,0,0,1,0,0,1,4.0,4,1.0,2,0.5 -969,217937,143787,1,0,0,0,1,0,0,0,0,1,0,0,2.0,2,1.0,8,4.0 -970,213566,98135,1,0,0,0,0,1,0,0,0,1,0,0,2.0,2,1.0,4,2.0 -971,289192,56182,0,1,0,0,0,0,1,1,0,0,0,0,6.0,6,1.0,0,0.0 -972,403598,73657,0,0,1,0,0,1,0,1,0,0,0,0,2.0,2,1.0,8,4.0 -973,284210,130990,1,0,0,1,0,0,0,1,0,0,0,0,6.0,6,1.0,4,0.67 -974,401410,40083,0,1,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,12,1.5 -975,337305,22859,0,0,0,1,0,0,0,0,0,1,0,1,6.0,6,1.0,2,0.33 -976,226419,158974,0,0,0,1,0,1,0,0,0,0,0,1,,0,0.0,2,5.0 -977,316248,105993,0,0,0,1,0,0,1,0,1,0,0,0,4.0,4,1.0,2,0.5 -978,319823,180018,0,0,0,0,0,1,0,1,0,1,0,0,8.0,8,1.0,4,0.5 -979,468990,123010,0,1,0,1,0,0,0,0,0,0,0,1,4.0,4,1.0,4,1.0 -980,403627,147220,0,0,0,0,0,0,1,0,1,0,1,0,10.0,10,1.0,4,0.4 -981,317225,57958,0,0,0,0,1,1,0,0,0,0,1,0,8.0,8,1.0,0,0.0 -982,229734,168922,0,0,0,1,0,0,0,1,0,1,0,0,4.0,4,1.0,6,1.5 -983,468070,155375,0,0,0,1,0,0,1,1,0,0,0,0,10.0,10,1.0,6,0.6 -984,252884,174360,0,0,0,1,0,0,0,0,1,0,0,1,2.0,2,1.0,10,5.0 -985,466428,100568,0,0,0,0,0,1,0,0,0,1,0,1,4.0,4,1.0,4,1.0 -986,452565,72172,0,0,0,0,0,1,1,0,0,0,0,1,4.0,4,1.0,14,3.5 -987,220362,170895,0,0,0,1,0,0,1,0,0,0,1,0,4.0,4,1.0,10,2.5 -988,270701,101907,0,0,0,0,0,1,0,1,0,0,1,0,8.0,8,1.0,6,0.75 -989,391764,45376,0,0,1,1,1,0,0,0,0,0,0,0,4.0,4,1.0,6,1.5 -990,305302,145908,0,0,0,0,0,1,0,0,1,1,0,0,10.0,10,1.0,6,0.6 -991,356465,130869,1,0,0,1,0,0,0,0,1,0,0,0,2.0,2,1.0,4,2.0 -992,260584,156699,0,0,0,0,0,1,0,1,0,1,0,0,6.0,6,1.0,10,1.67 -993,264824,101853,0,0,0,1,0,0,1,0,1,0,0,0,6.0,6,1.0,8,1.33 -994,229223,55028,1,0,0,0,1,0,0,0,1,0,0,0,10.0,10,1.0,4,0.4 -995,226950,14602,0,0,0,1,0,0,0,0,0,1,1,0,14.0,14,1.0,6,0.43 -996,280148,36439,0,0,0,0,1,0,0,0,1,0,0,1,2.0,2,1.0,8,4.0 -997,254227,51432,0,0,0,0,0,0,1,0,0,1,0,1,,0,0.0,4,5.0 -998,404381,168755,1,0,0,0,0,0,0,0,1,0,0,1,6.0,6,1.0,4,0.67 -999,265544,17476,0,1,0,1,0,1,0,0,0,0,0,0,8.0,8,1.0,6,0.75 -1000,348762,67260,0,0,0,1,1,0,0,0,0,0,0,1,8.0,8,1.0,4,0.5 -1004,90000,10000,1,1,1,0,0,0,0,0,0,0,0,0,,0,0.0,0,0.0 -1005,500000,10000,0,1,0,0,1,0,0,1,0,0,0,0,2.0,2,1.0,0,0.0 -1006,140000,70000,1,0,0,1,0,0,0,1,0,0,0,0,,0,0.0,0,0.0 -1007,500000,10000,0,1,0,0,1,1,0,0,0,0,0,0,,0,0.0,0,0.0 +814,270756,29333,0,0,1,0,0,0,1,0,0,0,0,1,8.0,8,1.0,28,3.5 +815,231164,109027,1,0,0,0,0,1,1,0,0,0,0,0,8.0,8,1.0,16,2.0 +816,463899,33207,0,1,1,0,0,0,0,0,0,0,1,0,24.0,24,1.0,12,0.5 +817,399007,123759,0,1,0,0,0,0,1,0,0,0,1,0,16.0,16,1.0,12,0.75 +818,417191,61278,0,1,0,0,0,1,0,0,0,0,1,0,24.0,24,1.0,16,0.67 +819,343102,130956,0,1,0,1,0,0,0,0,0,0,1,0,12.0,12,1.0,24,2.0 +820,444359,117529,0,1,1,0,0,0,0,1,0,0,0,0,8.0,8,1.0,12,1.5 +821,456975,148816,1,0,0,0,0,1,0,0,0,0,0,1,8.0,8,1.0,16,2.0 +822,281006,127452,0,0,0,1,0,0,1,0,0,0,0,1,4.0,4,1.0,8,2.0 +823,397983,100855,0,1,0,0,1,0,0,1,0,0,0,0,12.0,12,1.0,0,0.0 +824,355294,76855,0,1,0,0,0,0,0,1,1,0,0,0,4.0,4,1.0,4,1.0 +825,422055,194612,1,0,0,1,0,0,0,0,1,0,0,0,,0,0.0,24,5.0 +826,214989,97653,0,0,0,0,0,1,0,0,1,0,1,0,8.0,8,1.0,16,2.0 +827,269975,157064,1,0,0,0,1,1,0,0,0,0,0,0,20.0,20,1.0,8,0.4 +828,494117,152366,1,0,0,0,1,0,0,0,1,0,0,0,12.0,12,1.0,16,1.33 +829,231915,65141,0,0,1,0,1,1,0,0,0,0,0,0,4.0,4,1.0,24,6.0 +830,458923,198250,1,0,0,0,0,1,0,0,1,0,0,0,8.0,8,1.0,12,1.5 +831,389130,141296,0,1,1,0,0,1,0,0,0,0,0,0,24.0,24,1.0,16,0.67 +832,470729,42046,0,0,1,0,1,0,0,0,0,0,0,1,16.0,16,1.0,8,0.5 +833,474279,30181,1,0,0,0,0,0,1,0,0,0,1,0,24.0,24,1.0,8,0.33 +834,406649,98575,0,0,0,1,0,0,0,0,0,1,1,0,8.0,8,1.0,8,1.0 +835,267055,184081,1,0,0,0,0,0,1,0,0,0,1,0,12.0,12,1.0,24,2.0 +836,276545,156447,0,0,0,1,1,0,0,0,0,0,0,1,12.0,12,1.0,12,1.0 +837,482190,36190,0,0,0,1,0,0,1,0,1,0,0,0,12.0,12,1.0,8,0.67 +838,370077,199736,0,1,0,0,1,0,0,1,0,0,0,0,28.0,28,1.0,12,0.43 +839,231280,154005,0,1,0,1,0,0,0,0,1,0,0,0,24.0,24,1.0,4,0.17 +840,495127,71108,0,1,0,0,0,1,0,0,1,0,0,0,8.0,8,1.0,12,1.5 +841,379036,151388,0,1,1,0,0,1,0,0,0,0,0,0,8.0,8,1.0,0,0.0 +842,275802,166094,1,0,0,0,0,1,0,0,0,0,1,0,8.0,8,1.0,12,1.5 +843,382652,65851,0,1,1,0,0,0,0,1,0,0,0,0,12.0,12,1.0,24,2.0 +844,442575,19464,0,0,1,1,0,0,0,0,0,1,0,0,8.0,8,1.0,8,1.0 +845,250858,124543,1,0,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 +846,337823,103129,0,0,0,0,1,0,0,1,0,0,1,0,20.0,20,1.0,4,0.2 +847,245935,144115,0,0,0,0,0,0,0,1,0,1,0,1,16.0,16,1.0,12,0.75 +848,370596,14192,0,0,0,0,1,1,0,0,1,0,0,0,12.0,12,1.0,16,1.33 +849,399212,20895,0,1,1,0,0,1,0,0,0,0,0,0,12.0,12,1.0,4,0.33 +850,476076,166707,0,0,0,1,0,0,1,0,0,0,0,1,8.0,8,1.0,12,1.5 +851,467415,130374,0,0,0,1,1,0,0,0,1,0,0,0,8.0,8,1.0,12,1.5 +852,367151,159580,1,0,0,0,0,0,1,0,1,0,0,0,24.0,24,1.0,12,0.5 +853,320912,97724,0,0,0,1,0,0,1,0,0,0,0,1,12.0,12,1.0,12,1.0 +854,227404,36661,0,1,0,1,0,0,0,0,0,0,0,1,20.0,20,1.0,12,0.6 +855,216256,178926,0,0,0,0,1,0,0,0,0,1,0,1,,0,0.0,4,5.0 +856,387475,80569,0,0,0,0,0,0,1,0,1,0,1,0,12.0,12,1.0,16,1.33 +857,249253,26385,0,0,1,0,1,0,0,1,0,0,0,0,8.0,8,1.0,24,3.0 +858,375847,176910,0,0,0,0,0,1,1,0,0,1,0,0,16.0,16,1.0,4,0.25 +859,251309,82035,0,0,0,0,0,0,0,1,1,0,0,1,20.0,20,1.0,12,0.6 +860,384041,50345,0,0,0,0,0,1,0,1,1,0,0,0,,0,0.0,8,5.0 +861,477161,106483,0,0,0,0,1,0,1,0,0,1,0,0,4.0,4,1.0,8,2.0 +862,218587,27343,1,0,1,0,0,0,0,0,0,0,0,1,16.0,16,1.0,16,1.0 +863,284722,71235,0,0,1,1,0,0,0,0,0,0,1,0,24.0,24,1.0,16,0.67 +864,223603,138762,1,0,1,0,0,0,1,0,0,0,0,0,4.0,4,1.0,0,0.0 +865,399762,13085,0,0,0,0,1,1,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +866,299484,114094,1,1,0,0,0,0,0,0,0,0,0,1,16.0,16,1.0,20,1.25 +867,407379,70302,0,1,0,0,1,0,0,1,0,0,0,0,12.0,12,1.0,12,1.0 +868,292348,137758,0,0,0,1,0,0,0,1,1,0,0,0,4.0,4,1.0,20,5.0 +869,287417,138395,0,0,0,1,0,0,1,0,1,0,0,0,16.0,16,1.0,12,0.75 +870,200590,66947,0,0,1,0,1,0,0,0,0,0,0,1,,0,0.0,8,5.0 +871,440673,85320,0,1,0,0,0,0,0,1,0,0,1,0,12.0,12,1.0,4,0.33 +872,356709,116973,0,1,0,1,0,0,1,0,0,0,0,0,4.0,4,1.0,12,3.0 +873,386039,186602,1,0,1,1,0,0,0,0,0,0,0,0,8.0,8,1.0,8,1.0 +874,270051,75658,0,0,0,0,1,0,0,1,1,0,0,0,12.0,12,1.0,12,1.0 +875,436580,164864,0,0,0,0,1,1,0,0,0,1,0,0,4.0,4,1.0,8,2.0 +876,202918,113863,1,1,0,0,0,0,0,1,0,0,0,0,,0,0.0,16,5.0 +877,318488,149400,1,0,0,1,0,0,0,1,0,0,0,0,8.0,8,1.0,20,2.5 +878,315256,140817,0,0,0,1,0,0,0,1,1,0,0,0,20.0,20,1.0,20,1.0 +879,489213,18867,0,1,0,1,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 +880,446056,70527,1,0,0,0,0,0,0,0,0,0,1,1,20.0,20,1.0,12,0.6 +881,409633,106178,0,0,0,0,0,0,0,1,0,0,1,1,4.0,4,1.0,12,3.0 +882,451351,49239,0,0,0,0,0,0,0,0,1,1,0,1,8.0,8,1.0,4,0.5 +883,215427,15383,0,0,0,1,0,0,1,0,0,1,0,0,12.0,12,1.0,12,1.0 +884,406498,31959,0,1,0,0,0,0,0,0,0,1,0,1,4.0,4,1.0,12,3.0 +885,365405,108994,0,1,0,1,0,0,1,0,0,0,0,0,8.0,8,1.0,8,1.0 +886,280593,104025,1,0,1,0,0,0,0,0,0,0,0,1,20.0,20,1.0,8,0.4 +887,275781,182917,1,1,0,0,0,0,0,0,0,1,0,0,24.0,24,1.0,8,0.33 +888,278623,51336,1,0,0,0,0,0,0,1,0,0,0,1,16.0,16,1.0,20,1.25 +889,479862,137118,1,0,0,0,0,0,0,0,1,0,0,1,4.0,4,1.0,16,4.0 +890,371046,109092,0,0,0,1,0,0,0,0,1,0,0,1,12.0,12,1.0,12,1.0 +891,226188,133532,0,1,0,0,0,0,1,1,0,0,0,0,4.0,4,1.0,12,3.0 +892,413603,178951,0,0,0,1,1,1,0,0,0,0,0,0,4.0,4,1.0,12,3.0 +893,330993,191499,0,1,0,0,0,0,1,1,0,0,0,0,16.0,16,1.0,20,1.25 +894,487445,144381,0,1,0,1,0,1,0,0,0,0,0,0,,0,0.0,12,5.0 +895,446419,102496,0,1,0,0,0,0,1,1,0,0,0,0,24.0,24,1.0,0,0.0 +896,411668,159943,0,0,0,0,0,0,0,0,1,1,0,1,,0,0.0,12,5.0 +897,274151,49946,0,0,0,0,0,0,1,0,1,0,1,0,16.0,16,1.0,8,0.5 +898,454904,173407,0,0,0,0,0,1,0,1,0,1,0,0,32.0,32,1.0,8,0.25 +899,468542,42955,0,0,0,0,0,1,0,1,0,1,0,0,12.0,12,1.0,20,1.67 +900,317048,61101,0,0,0,0,0,1,0,1,0,0,0,1,,0,0.0,4,5.0 +901,385289,74675,1,0,0,0,0,1,0,0,0,0,1,0,16.0,16,1.0,4,0.25 +902,266676,72409,0,0,1,0,0,0,1,1,0,0,0,0,28.0,28,1.0,4,0.14 +903,254136,100314,0,1,0,0,1,0,0,0,1,0,0,0,20.0,20,1.0,8,0.4 +904,314605,151225,1,0,0,0,1,0,0,1,0,0,0,0,20.0,20,1.0,12,0.6 +905,459648,45195,0,0,0,0,0,1,0,0,1,1,0,0,8.0,8,1.0,4,0.5 +906,295585,86609,0,1,0,0,0,0,1,0,0,0,1,0,8.0,8,1.0,0,0.0 +907,371041,75883,0,0,1,0,0,0,1,0,1,0,0,0,8.0,8,1.0,8,1.0 +908,371542,57193,0,0,0,1,0,0,0,1,0,1,0,0,4.0,4,1.0,12,3.0 +909,419290,32255,0,0,0,0,1,1,0,0,0,0,0,1,4.0,4,1.0,24,6.0 +910,332905,154274,0,1,0,0,0,0,1,1,0,0,0,0,20.0,20,1.0,8,0.4 +911,396616,92842,1,1,0,0,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 +912,225868,72568,0,0,0,1,1,0,0,0,0,0,1,0,16.0,16,1.0,8,0.5 +913,483313,158045,0,0,1,0,0,0,0,1,1,0,0,0,8.0,8,1.0,12,1.5 +914,409500,59021,0,0,0,0,0,0,0,1,1,1,0,0,4.0,4,1.0,20,5.0 +915,347719,136188,1,0,1,0,0,1,0,0,0,0,0,0,16.0,16,1.0,4,0.25 +916,300122,70254,0,0,1,0,1,0,1,0,0,0,0,0,12.0,12,1.0,24,2.0 +917,306186,172369,0,0,0,0,0,0,1,0,0,1,1,0,12.0,12,1.0,8,0.67 +918,317909,48222,0,0,0,0,0,0,0,1,1,1,0,0,16.0,16,1.0,4,0.25 +919,243993,51965,0,0,0,0,0,0,1,0,0,0,1,1,12.0,12,1.0,12,1.0 +920,322095,172458,1,1,0,0,0,0,1,0,0,0,0,0,20.0,20,1.0,12,0.6 +921,305860,60419,0,0,1,1,0,0,0,0,0,0,0,1,20.0,20,1.0,4,0.2 +922,399045,24312,1,0,0,1,0,0,0,0,0,0,0,1,24.0,24,1.0,8,0.33 +923,466188,25075,0,0,0,0,1,1,0,0,0,0,0,1,24.0,24,1.0,20,0.83 +924,328821,44907,1,0,0,0,0,0,0,0,1,0,0,1,12.0,12,1.0,12,1.0 +925,392780,194685,0,1,0,0,0,0,0,0,1,1,0,0,4.0,4,1.0,12,3.0 +926,263701,186881,0,0,0,0,1,0,1,0,0,0,1,0,12.0,12,1.0,12,1.0 +927,485278,149804,0,1,1,0,0,0,0,0,1,0,0,0,,0,0.0,8,5.0 +928,265979,25740,0,0,0,0,1,0,1,0,0,0,0,1,12.0,12,1.0,24,2.0 +929,296060,13802,0,1,1,0,1,0,0,0,0,0,0,0,,0,0.0,8,5.0 +930,314827,183938,0,1,1,0,0,0,0,0,0,1,0,0,8.0,8,1.0,4,0.5 +931,327857,75977,1,0,0,1,0,1,0,0,0,0,0,0,8.0,8,1.0,8,1.0 +932,336696,195946,0,0,1,0,0,0,0,0,1,1,0,0,20.0,20,1.0,24,1.2 +933,381827,61627,0,0,0,0,1,0,0,0,0,1,0,1,,0,0.0,12,5.0 +934,402772,37800,0,0,0,0,1,0,0,0,0,1,1,0,4.0,4,1.0,16,4.0 +935,419875,103666,0,0,0,1,1,0,0,0,0,1,0,0,8.0,8,1.0,24,3.0 +936,370256,147215,0,0,1,0,0,0,0,1,0,1,0,0,16.0,16,1.0,12,0.75 +937,499515,81170,0,1,0,1,0,0,0,0,1,0,0,0,28.0,28,1.0,8,0.29 +938,489669,123869,0,0,1,1,0,0,0,0,1,0,0,0,20.0,20,1.0,16,0.8 +939,318548,157598,0,0,0,0,0,0,0,1,1,1,0,0,20.0,20,1.0,0,0.0 +940,378058,73800,0,0,1,0,0,0,0,1,0,0,0,1,16.0,16,1.0,16,1.0 +941,295093,155387,0,0,0,1,0,0,1,0,0,1,0,0,20.0,20,1.0,8,0.4 +942,255137,77953,0,0,0,1,0,0,0,0,0,1,0,1,4.0,4,1.0,8,2.0 +943,460003,175620,0,0,1,0,0,0,1,1,0,0,0,0,12.0,12,1.0,4,0.33 +944,435210,67087,0,0,0,0,0,0,1,0,1,1,0,0,8.0,8,1.0,12,1.5 +945,408719,21594,0,0,0,0,0,0,0,0,1,0,1,1,20.0,20,1.0,0,0.0 +946,442983,64387,1,0,0,0,1,0,0,0,1,0,0,0,16.0,16,1.0,12,0.75 +947,417183,170553,1,0,1,1,0,0,0,0,0,0,0,0,28.0,28,1.0,0,0.0 +948,245504,60047,0,0,0,0,0,1,0,1,1,0,0,0,16.0,16,1.0,16,1.0 +949,253896,99657,1,1,0,0,0,0,0,1,0,0,0,0,4.0,4,1.0,8,2.0 +950,360681,78417,0,0,0,0,0,1,0,0,1,0,0,1,12.0,12,1.0,12,1.0 +951,217587,140616,0,1,1,0,0,0,0,0,1,0,0,0,12.0,12,1.0,20,1.67 +952,399434,26457,0,1,0,0,0,1,1,0,0,0,0,0,24.0,24,1.0,4,0.17 +953,311115,142051,1,0,0,0,1,0,1,0,0,0,0,0,20.0,20,1.0,4,0.2 +954,468873,130072,0,0,0,0,0,0,0,1,1,0,1,0,8.0,8,1.0,28,3.5 +955,387628,54418,0,0,1,0,0,1,0,0,0,1,0,0,8.0,8,1.0,8,1.0 +956,311418,79285,0,0,1,0,0,1,0,0,0,0,0,1,12.0,12,1.0,20,1.67 +957,282189,153537,1,0,0,0,0,1,0,0,1,0,0,0,12.0,12,1.0,16,1.33 +958,242619,113714,0,0,0,0,0,0,1,0,1,0,0,1,16.0,16,1.0,20,1.25 +959,326593,187624,0,0,0,0,1,0,1,0,0,0,1,0,16.0,16,1.0,16,1.0 +960,219358,105388,1,0,0,0,0,0,1,1,0,0,0,0,12.0,12,1.0,24,2.0 +961,212481,34643,0,0,0,0,0,0,1,0,1,1,0,0,8.0,8,1.0,28,3.5 +962,493117,142931,0,0,0,0,0,1,0,1,0,1,0,0,8.0,8,1.0,8,1.0 +963,256384,79402,0,0,1,0,0,0,0,0,0,1,1,0,8.0,8,1.0,12,1.5 +964,496916,99297,1,0,0,0,0,1,0,0,0,0,1,0,,0,0.0,4,5.0 +965,423393,174792,0,1,0,0,0,0,1,1,0,0,0,0,,0,0.0,28,5.0 +966,473484,177801,1,0,0,0,1,1,0,0,0,0,0,0,,0,0.0,8,5.0 +967,371779,80404,0,0,0,0,1,0,0,0,0,1,1,0,4.0,4,1.0,12,3.0 +968,384400,71324,0,0,1,0,0,0,0,0,1,0,0,1,8.0,8,1.0,4,0.5 +969,217937,143787,1,0,0,0,1,0,0,0,0,1,0,0,4.0,4,1.0,16,4.0 +970,213566,98135,1,0,0,0,0,1,0,0,0,1,0,0,4.0,4,1.0,8,2.0 +971,289192,56182,0,1,0,0,0,0,1,1,0,0,0,0,12.0,12,1.0,0,0.0 +972,403598,73657,0,0,1,0,0,1,0,1,0,0,0,0,4.0,4,1.0,16,4.0 +973,284210,130990,1,0,0,1,0,0,0,1,0,0,0,0,12.0,12,1.0,8,0.67 +974,401410,40083,0,1,0,0,0,0,1,0,0,0,1,0,16.0,16,1.0,24,1.5 +975,337305,22859,0,0,0,1,0,0,0,0,0,1,0,1,12.0,12,1.0,4,0.33 +976,226419,158974,0,0,0,1,0,1,0,0,0,0,0,1,,0,0.0,4,5.0 +977,316248,105993,0,0,0,1,0,0,1,0,1,0,0,0,8.0,8,1.0,4,0.5 +978,319823,180018,0,0,0,0,0,1,0,1,0,1,0,0,16.0,16,1.0,8,0.5 +979,468990,123010,0,1,0,1,0,0,0,0,0,0,0,1,8.0,8,1.0,8,1.0 +980,403627,147220,0,0,0,0,0,0,1,0,1,0,1,0,20.0,20,1.0,8,0.4 +981,317225,57958,0,0,0,0,1,1,0,0,0,0,1,0,16.0,16,1.0,0,0.0 +982,229734,168922,0,0,0,1,0,0,0,1,0,1,0,0,8.0,8,1.0,12,1.5 +983,468070,155375,0,0,0,1,0,0,1,1,0,0,0,0,20.0,20,1.0,12,0.6 +984,252884,174360,0,0,0,1,0,0,0,0,1,0,0,1,4.0,4,1.0,20,5.0 +985,466428,100568,0,0,0,0,0,1,0,0,0,1,0,1,8.0,8,1.0,8,1.0 +986,452565,72172,0,0,0,0,0,1,1,0,0,0,0,1,8.0,8,1.0,28,3.5 +987,220362,170895,0,0,0,1,0,0,1,0,0,0,1,0,8.0,8,1.0,20,2.5 +988,270701,101907,0,0,0,0,0,1,0,1,0,0,1,0,16.0,16,1.0,12,0.75 +989,391764,45376,0,0,1,1,1,0,0,0,0,0,0,0,8.0,8,1.0,12,1.5 +990,305302,145908,0,0,0,0,0,1,0,0,1,1,0,0,20.0,20,1.0,12,0.6 +991,356465,130869,1,0,0,1,0,0,0,0,1,0,0,0,4.0,4,1.0,8,2.0 +992,260584,156699,0,0,0,0,0,1,0,1,0,1,0,0,12.0,12,1.0,20,1.67 +993,264824,101853,0,0,0,1,0,0,1,0,1,0,0,0,12.0,12,1.0,16,1.33 +994,229223,55028,1,0,0,0,1,0,0,0,1,0,0,0,20.0,20,1.0,8,0.4 +995,226950,14602,0,0,0,1,0,0,0,0,0,1,1,0,28.0,28,1.0,12,0.43 +996,280148,36439,0,0,0,0,1,0,0,0,1,0,0,1,4.0,4,1.0,16,4.0 +997,254227,51432,0,0,0,0,0,0,1,0,0,1,0,1,,0,0.0,8,5.0 +998,404381,168755,1,0,0,0,0,0,0,0,1,0,0,1,12.0,12,1.0,8,0.67 +999,265544,17476,0,1,0,1,0,1,0,0,0,0,0,0,16.0,16,1.0,12,0.75 +1000,348762,67260,0,0,0,1,1,0,0,0,0,0,0,1,16.0,16,1.0,8,0.5 +1004,90000,10000,1,1,1,0,0,0,0,0,0,0,0,0,4.0,4,1.0,0,0.0 +1005,500000,10000,0,1,0,0,1,0,0,1,0,0,0,0,4.0,4,1.0,0,0.0 +1006,140000,70000,1,0,0,1,0,0,0,1,0,0,0,0,8.0,8,1.0,24,3.0 +1007,500000,10000,0,1,0,0,1,1,0,0,0,0,0,0,4.0,4,1.0,0,0.0