To reduce memory costs, I specified dtypes of my pandas dataframe using astype()
,like:
df['A'] = df['A'].astype(int8)
then I use to_csv()
to store it, but when I use read_csv()
to read it again and check the dtypes
, I found it still stored in int64
.
How can I preserve the dtypes while saving it in local storages?
Here's a way to do it:
import pandas as pd
# Create Example data with types
df = pd.DataFrame({
'words': ['foo', 'bar', 'spam', 'eggs'],
'nums': [1, 2, 3, 4]
}).astype(dtype={
'words': 'object',
'nums': 'int8'
})
def to_csv(df, path):
# Prepend dtypes to the top of df (from https://stackoverflow.com/a/43408736/7607701)
df.loc[-1] = df.dtypes
df.index = df.index + 1
df.sort_index(inplace=True)
# Then save it to a csv
df.to_csv(path, index=False)
def read_csv(path):
# Read types first line of csv
dtypes = pd.read_csv('tmp.csv', nrows=1).iloc[0].to_dict()
# Read the rest of the lines with the types from above
return pd.read_csv('tmp.csv', dtype=dtypes, skiprows=[1])
print('Before: \n{}\n'.format(df.dtypes))
to_csv(df, 'tmp.csv')
df = read_csv('tmp.csv')
print('After: \n{}\n'.format(df.dtypes))
Output:
Before:
nums int8
words object
dtype: object
After:
nums int8 # still int8
words object
dtype: object
A modification of #Aaron N. Brock to allow parse_dates as well (plus not change original DataFrame):
def to_csv(df, path):
# Prepend dtypes to the top of df
df2 = df.copy()
df2.loc[-1] = df2.dtypes
df2.index = df2.index + 1
df2.sort_index(inplace=True)
# Then save it to a csv
df2.to_csv(path, index=False)
def read_csv(path):
# Read types first line of csv
dtypes = {key:value for (key,value) in pd.read_csv(path,
nrows=1).iloc[0].to_dict().items() if 'date' not in value}
parse_dates = [key for (key,value) in pd.read_csv(path,
nrows=1).iloc[0].to_dict().items() if 'date' in value]
# Read the rest of the lines with the types from above
return pd.read_csv(path, dtype=dtypes, parse_dates=parse_dates, skiprows=[1])
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With