Every line of 'pandas read csv from url' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
4 def download_stock_data(csv_url): 5 response = request.urlopen(csv_url) 6 csv = response.read() 7 csv_str = str(csv) 8 lines = csv_str.split("\\n") 9 dest_url = r'goog.csv' 10 fx = open(dest_url, "w") 11 for line in lines: 12 fx.write(line + "\n") 13 fx.close()
36 def download_csv(): 37 post_and_check('/gtfs/download/')
37 def readcsv(filename, header=True): 38 return pd.read_csv(filename, header=None) if not header else pd.read_csv(filename)
27 def csv_reader(stream, size=None, url=None, params=None): 28 import csv 29 import __builtin__ 30 31 fieldnames = getattr(params, 'csv_fields', None) 32 dialect = getattr(params, 'csv_dialect', 'excel') 33 delimiter = getattr(params, 'delimiter', None) 34 35 if delimiter: 36 reader = csv.reader(stream, delimiter=delimiter) 37 else: 38 reader = csv.reader(stream, dialect=dialect) 39 40 done = False 41 while not done: 42 try: 43 line = reader.next() 44 if not line: 45 continue 46 if not fieldnames: 47 fieldnames = [str(x) for x in range(len(line))] 48 parts = dict(__builtin__.map(None, fieldnames, line)) 49 if None in parts: 50 # remove extra data values 51 del parts[None] 52 yield parts 53 except StopIteration as e: 54 done = True 55 except Exception as ee: 56 # just skip bad lines 57 print 'csv line error: %s' % ee