Every line of 'how to fetch data from api in python' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
56 def fetch_data(self): 57 """Method returns results from response of Yahoo YQL API. It should returns always python list.""" 58 if relativedelta(self.end_date, self.start_date).years <= const.ONE_YEAR: 59 data = self.request.send(self.symbol, self.start_date, self.end_date) 60 else: 61 data = self.fetch_chunk_data() 62 63 return self.clean(data)
35 def get_data(): 36 global cached_data, last_data 37 if last_data is not None and time.time() - last_data < REQUEST_RATE: 38 print("Sending cached data...") 39 return cached_data 40 41 request = Request(GAMES_ADDR) 42 request.add_header('Referer', 'https://airma.sh') 43 request.add_header('User-Agent', 'Mozilla/5.0') 44 url = urlopen(request) 45 data = json.loads(url.read().decode()) 46 src_regions = json.loads(data['data']) 47 48 regions = {} 49 50 for region in src_regions: 51 games = {} 52 for game in region['games']: 53 games[game['id']] = { 54 'type': game['type'], 55 'id': game['id'], 56 'name': game['name'], 57 'nameShort': game['nameShort'], 58 'host': game['host'], 59 'players': game['players'], 60 'url': build_host(game['host'], game['id']) 61 } 62 regions[region['id']] = { 63 'name': region['name'], 64 'games': games 65 } 66 67 cached_data = { 68 'protocol': data['protocol'], 69 'regions': regions 70 } 71 72 last_data = time.time() 73 return cached_data
103 @RateLimited(150) 104 def getData(requestsConnection,url,retry,page,region,structure): 105 future=requestsConnection.get(url+str(page)) 106 logging.info('getting {}#{}#{}#{}'.format(retry,page,region,url+str(page))) 107 future.url=url 108 future.fullurl=url+str(page) 109 future.page=page 110 future.retry=retry 111 future.region=region 112 future.structure=structure 113 return future
39 def get_data(region, year, month, day): 40 r = requests.get(url.format(region=region, 41 year=year, 42 month=month, 43 day=day)) 44 45 assert r.status_code == 200, 'Could not get url' 46 47 jsonval_matches = re.findall("(?<=var jsonval = JSON.parse\(\').*(?=\'\)\n\t)", 48 r.text) 49 assert len(jsonval_matches), 'Data not found. Perhaps the format of the html file has changed, or data for this date is not yet available?' 50 51 df = pd.read_json(jsonval_matches[0]) 52 53 return df
192 def _fetch_data_from_rapidpi_api(single_date): 193 today_str = single_date.strftime("%Y-%m-%d") 194 headers = { 195 "X-RapidAPI-Host": "api-football-v1.p.rapidapi.com", 196 "X-RapidAPI-Key": os.environ.get('RAPIDAPI_KEY') 197 } 198 response = requests.get( 199 f'https://api-football-v1.p.rapidapi.com/v2/fixtures/date/{today_str}?timezone=Europe/London', 200 headers=headers 201 ) 202 return response
73 def get_data(url: str, token: str, params: dict = {}) -> dict: 74 headers: dict = { 75 "Content-Type": "application/json", 76 "Accept": "application/json", 77 "X-Cisco-Meraki-API-Key": f"{token}", 78 } 79 try: 80 log.debug(f"Preparing GET: url={url} with params={params}") 81 req = requests.get(url, params=params, headers=headers) 82 req.raise_for_status() 83 except HTTPError as http_err: 84 log.error(f"Error GET: url={url}") 85 log.error(f"HTTP error occurred: {http_err}") 86 raise 87 log.debug(req.status_code) 88 return req.json()
12 def data(place): 13 """get forecast data""" 14 lat, lon = place 15 url = "https://api.forecast.io/forecast/%s/%s,%s?solar" % (APIKEY, lat, lon) 16 w_data = json.loads(urllib2.urlopen(url).read()) 17 return w_data
5 def get_data(): 6 """ Retrieve the fpl player data from the hard-coded url 7 """ 8 response = requests.get("https://fantasy.premierleague.com/api/bootstrap-static/") 9 if response.status_code != 200: 10 raise Exception("Response was code " + str(response.status_code)) 11 responseStr = response.text 12 data = json.loads(responseStr) 13 return data
201 def _call_api(self): 202 if self._called: 203 return 204 else: 205 self._called = True 206 207 url="http://api.wunderground.com/api/%s/conditions/forecast/q/%f,%f.json" 208 209 final_url=url%(WUI_KEY, self.lat, self.lng) 210 211 self.forecast = requests.get(final_url).json()
174 def get_data(): 175 176 # fd = {'i': '2013/10/18', 'jm': '2013/03/22', 'j': '2011/04/15'} 177 fd = { 'm': '2017/08/28'} 178 179 # fd = {'v': '2009/05/25', 'b': '2004/12/22', 'm': ' 2000/07/17', 'a': '1999/01/04', 'y': '2006/01/09', 180 # 'jd': '2013/11/08', 'bb': '2013/12/06', 'jm': '2013/03/22', 'j': '2011/04/15', 'pp': '2014/02/28', 181 # 'l': '2007/07/31', 'i': '2013/10/18', 'fb': '2013/12/06', 'c': '2004/09/22', 'cs': '2014/12/19', 182 # 'p': '2007/10/29'} 183 try: 184 os.mkdir('commodity_options') 185 except FileExistsError: 186 pass 187 for code in fd.keys(): 188 print("正在爬取 "+code+' 成交持仓数据……') 189 spider(code, fd[code])