Every line of 'python extract data from json file' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
37 def return_train_data_from_json(input_json): 38 """Returns train data set from input JSON 39 40 Args: 41 input_json (dict): "Extraction" dictionary 42 43 Returns: 44 X (numpy.ndarray): Features 45 46 y (numpy.ndarray): Labels 47 """ 48 X, y = return_main_data_from_json(input_json) 49 50 if input_json['test_dataset']['method'] == 'split_from_main': 51 X, X_test, y, y_test = train_test_split( 52 X, 53 y, 54 test_size=input_json['test_dataset']['split_ratio'], 55 random_state=input_json['test_dataset']['split_seed'], 56 stratify=y 57 ) 58 59 if input_json['meta_feature_generation']['method'] == 'holdout_split': 60 X, X_test, y, y_test = train_test_split( 61 X, 62 y, 63 test_size=input_json['meta_feature_generation']['split_ratio'], 64 random_state=input_json['meta_feature_generation']['seed'], 65 stratify=y 66 ) 67 68 return X, y
33 def _get_parsed_data(file_obj, for_page=False): 34 raw = file_obj.read().decode('utf-8') 35 return json.loads(raw, object_hook=_object_version_data_hook)
43 def read_data(log_file): 44 """ 45 Read, parse and return the JSON data for the given log file name. 46 (As a side effect sets the global _last_log_file to the log file name.) 47 """ 48 global _last_log_file 49 try: 50 with open(log_file, "r") as f: 51 f.readline() 52 data = json.load(f) 53 # Keep track of the current log file in global variable in case we need to 54 # identify it later if there's a problem. (This works because the files are 55 # processed lazily.) 56 _last_log_file = log_file 57 except IOError: 58 sys.exit("Could not read log file %s" % log_file) 59 return data
107 def return_holdout_data_from_json(input_json): 108 """Returns holdout data set from input JSON 109 110 Args: 111 input_json (dict): "Extraction" dictionary 112 113 Returns: 114 X (numpy.ndarray): Features 115 116 y (numpy.ndarray): Labels 117 """ 118 if input_json['meta_feature_generation']['method'] == 'holdout_split': 119 X, y = return_main_data_from_json(input_json) 120 121 if input_json['test_dataset']['method'] == 'split_from_main': 122 X, X_test, y, y_test = train_test_split( 123 X, 124 y, 125 test_size=input_json['test_dataset']['split_ratio'], 126 random_state=input_json['test_dataset']['split_seed'], 127 stratify=y 128 ) 129 130 X, X_holdout, y, y_holdout = train_test_split( 131 X, 132 y, 133 test_size=input_json['meta_feature_generation']['split_ratio'], 134 random_state=input_json['meta_feature_generation']['seed'], 135 stratify=y 136 ) 137 138 return X_holdout, y_holdout 139 140 if input_json['meta_feature_generation']['method'] == 'holdout_source': 141 if 'source' not in input_json['meta_feature_generation'] or \ 142 not input_json['meta_feature_generation']['source']: 143 raise exceptions.UserError('Source is empty') 144 145 extraction_code = "".join(input_json['meta_feature_generation']["source"]) 146 extraction_function = import_object_from_string_code(extraction_code, 147 "extract_holdout_dataset") 148 X_holdout, y_holdout = extraction_function() 149 150 return np.array(X_holdout), np.array(y_holdout)
36 def load_and_parse_json(filename): 37 try: 38 return json.loads(read_file(filename)) 39 except: 40 print 'ERROR: Failed to parse %s' % filename 41 raise
27 def load_file(self): 28 with open('datafile.json', 'r') as f: 29 return json.loads(f.read())