Skip to content

Commit

Permalink
DEPR: removal of deprecated sql functions
Browse files Browse the repository at this point in the history
Start for removing some of the deprecated functions.   Other functions
to add: `write_frame`, `tquery`, `uquery`

Author: Joris Van den Bossche <jorisvandenbossche@gmail.com>

Closes #12205 from jorisvandenbossche/remove-depr-sql and squashes the following commits:

b097dba [Joris Van den Bossche] DEPR: removal of deprecated write_frame function
541e0e7 [Joris Van den Bossche] DEPR: removal of deprecated pd.io.sql.read_frame/frame_query functions
  • Loading branch information
jorisvandenbossche authored and jreback committed Feb 8, 2016
1 parent 3d86e04 commit 1c51051
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 130 deletions.
4 changes: 3 additions & 1 deletion doc/source/whatsnew/v0.18.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -695,7 +695,9 @@ Removal of prior version deprecations/changes
- Removal of ``expanding_corr_pairwise`` in favor of ``.expanding().corr(pairwise=True)`` (:issue:`4950`)
- Removal of ``DataMatrix`` module. This was not imported into the pandas namespace in any event (:issue:`12111`)
- Removal of ``cols`` keyword in favor of ``subset`` in ``DataFrame.duplicated()`` and ``DataFrame.drop_duplicates()`` (:issue:`6680`)

- Removal of the ``read_frame`` and ``frame_query`` (both aliases for ``pd.read_sql``)
and ``write_frame`` (alias of ``to_sql``) functions in the ``pd.io.sql`` namespace,
deprecated since 0.14.0 (:issue:`6292`).

.. _whatsnew_0180.performance:

Expand Down
63 changes: 0 additions & 63 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1704,66 +1704,3 @@ def get_schema(frame, name, flavor='sqlite', keys=None, con=None, dtype=None):

pandas_sql = pandasSQL_builder(con=con, flavor=flavor)
return pandas_sql._create_sql_schema(frame, name, keys=keys, dtype=dtype)


# legacy names, with depreciation warnings and copied docs

@Appender(read_sql.__doc__, join='\n')
def read_frame(*args, **kwargs):
"""DEPRECATED - use read_sql
"""
warnings.warn("read_frame is deprecated, use read_sql", FutureWarning,
stacklevel=2)
return read_sql(*args, **kwargs)


@Appender(read_sql.__doc__, join='\n')
def frame_query(*args, **kwargs):
"""DEPRECATED - use read_sql
"""
warnings.warn("frame_query is deprecated, use read_sql", FutureWarning,
stacklevel=2)
return read_sql(*args, **kwargs)


def write_frame(frame, name, con, flavor='sqlite', if_exists='fail', **kwargs):
"""DEPRECATED - use to_sql
Write records stored in a DataFrame to a SQL database.
Parameters
----------
frame : DataFrame
name : string
con : DBAPI2 connection
flavor : {'sqlite', 'mysql'}, default 'sqlite'
The flavor of SQL to use.
if_exists : {'fail', 'replace', 'append'}, default 'fail'
- fail: If table exists, do nothing.
- replace: If table exists, drop it, recreate it, and insert data.
- append: If table exists, insert data. Create if does not exist.
index : boolean, default False
Write DataFrame index as a column
Notes
-----
This function is deprecated in favor of ``to_sql``. There are however
two differences:
- With ``to_sql`` the index is written to the sql database by default. To
keep the behaviour this function you need to specify ``index=False``.
- The new ``to_sql`` function supports sqlalchemy connectables to work
with different sql flavors.
See also
--------
pandas.DataFrame.to_sql
"""
warnings.warn("write_frame is deprecated, use to_sql", FutureWarning,
stacklevel=2)

# for backwards compatibility, set index=False when not specified
index = kwargs.pop('index', False)
return to_sql(frame, name, con, flavor=flavor, if_exists=if_exists,
index=index, **kwargs)
115 changes: 49 additions & 66 deletions pandas/io/tests/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,12 +524,6 @@ def test_read_sql_view(self):
"SELECT * FROM iris_view", self.conn)
self._check_iris_loaded_frame(iris_frame)

def test_legacy_read_frame(self):
with tm.assert_produces_warning(FutureWarning):
iris_frame = sql.read_frame(
"SELECT * FROM iris", self.conn)
self._check_iris_loaded_frame(iris_frame)

def test_to_sql(self):
sql.to_sql(self.test_frame1, 'test_frame1', self.conn, flavor='sqlite')
self.assertTrue(
Expand Down Expand Up @@ -598,17 +592,6 @@ def test_to_sql_panel(self):
self.assertRaises(NotImplementedError, sql.to_sql, panel,
'test_panel', self.conn, flavor='sqlite')

def test_legacy_write_frame(self):
# Assume that functionality is already tested above so just do
# quick check that it basically works
with tm.assert_produces_warning(FutureWarning):
sql.write_frame(self.test_frame1, 'test_frame_legacy', self.conn,
flavor='sqlite')

self.assertTrue(
sql.has_table('test_frame_legacy', self.conn, flavor='sqlite'),
'Table not written to DB')

def test_roundtrip(self):
sql.to_sql(self.test_frame1, 'test_frame_roundtrip',
con=self.conn, flavor='sqlite')
Expand Down Expand Up @@ -2239,7 +2222,7 @@ def test_write_row_by_row(self):

self.conn.commit()

result = sql.read_frame("select * from test", con=self.conn)
result = sql.read_sql("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame)

Expand All @@ -2254,7 +2237,7 @@ def test_execute(self):
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()

result = sql.read_frame("select * from test", self.conn)
result = sql.read_sql("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])

Expand Down Expand Up @@ -2327,8 +2310,8 @@ def test_na_roundtrip(self):
pass

def _check_roundtrip(self, frame):
sql.write_frame(frame, name='test_table', con=self.conn)
result = sql.read_frame("select * from test_table", self.conn)
sql.to_sql(frame, name='test_table', con=self.conn, index=False)
result = sql.read_sql("select * from test_table", self.conn)

# HACK! Change this once indexes are handled properly.
result.index = frame.index
Expand All @@ -2339,8 +2322,8 @@ def _check_roundtrip(self, frame):
frame['txt'] = ['a'] * len(frame)
frame2 = frame.copy()
frame2['Idx'] = Index(lrange(len(frame2))) + 10
sql.write_frame(frame2, name='test_table2', con=self.conn)
result = sql.read_frame("select * from test_table2", self.conn,
sql.to_sql(frame2, name='test_table2', con=self.conn, index=False)
result = sql.read_sql("select * from test_table2", self.conn,
index_col='Idx')
expected = frame.copy()
expected.index = Index(lrange(len(frame2))) + 10
Expand All @@ -2349,7 +2332,7 @@ def _check_roundtrip(self, frame):

def test_tquery(self):
frame = tm.makeTimeDataFrame()
sql.write_frame(frame, name='test_table', con=self.conn)
sql.to_sql(frame, name='test_table', con=self.conn, index=False)
result = sql.tquery("select A from test_table", self.conn)
expected = Series(frame.A.values, frame.index) # not to have name
result = Series(result, frame.index)
Expand All @@ -2367,7 +2350,7 @@ def test_tquery(self):

def test_uquery(self):
frame = tm.makeTimeDataFrame()
sql.write_frame(frame, name='test_table', con=self.conn)
sql.to_sql(frame, name='test_table', con=self.conn, index=False)
stmt = 'INSERT INTO test_table VALUES(2.314, -123.1, 1.234, 2.3)'
self.assertEqual(sql.uquery(stmt, con=self.conn), 1)

Expand All @@ -2387,22 +2370,22 @@ def test_keyword_as_column_names(self):
'''
'''
df = DataFrame({'From': np.ones(5)})
sql.write_frame(df, con=self.conn, name='testkeywords')
sql.to_sql(df, con=self.conn, name='testkeywords', index=False)

def test_onecolumn_of_integer(self):
# GH 3628
# a column_of_integers dataframe should transfer well to sql

mono_df = DataFrame([1, 2], columns=['c0'])
sql.write_frame(mono_df, con=self.conn, name='mono_df')
sql.to_sql(mono_df, con=self.conn, name='mono_df', index=False)
# computing the sum via sql
con_x = self.conn
the_sum = sum([my_c0[0]
for my_c0 in con_x.execute("select * from mono_df")])
# it should not fail, and gives 3 ( Issue #3628 )
self.assertEqual(the_sum, 3)

result = sql.read_frame("select * from mono_df", con_x)
result = sql.read_sql("select * from mono_df", con_x)
tm.assert_frame_equal(result, mono_df)

def test_if_exists(self):
Expand All @@ -2421,7 +2404,7 @@ def clean_up(test_table_to_drop):

# test if invalid value for if_exists raises appropriate error
self.assertRaises(ValueError,
sql.write_frame,
sql.to_sql,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
Expand All @@ -2430,34 +2413,34 @@ def clean_up(test_table_to_drop):
clean_up(table_name)

# test if_exists='fail'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail')
self.assertRaises(ValueError,
sql.write_frame,
sql.to_sql,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='sqlite',
if_exists='fail')

# test if_exists='replace'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace')
sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='replace', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)

# test if_exists='append'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='sqlite', if_exists='fail', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='append')
sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='sqlite', if_exists='append', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
Expand Down Expand Up @@ -2542,7 +2525,7 @@ def test_write_row_by_row(self):

self.conn.commit()

result = sql.read_frame("select * from test", con=self.conn)
result = sql.read_sql("select * from test", con=self.conn)
result.index = frame.index
tm.assert_frame_equal(result, frame)

Expand Down Expand Up @@ -2577,7 +2560,7 @@ def test_execute(self):
sql.execute(ins, self.conn, params=tuple(row))
self.conn.commit()

result = sql.read_frame("select * from test", self.conn)
result = sql.read_sql("select * from test", self.conn)
result.index = frame.index[:1]
tm.assert_frame_equal(result, frame[:1])

Expand Down Expand Up @@ -2666,9 +2649,9 @@ def _check_roundtrip(self, frame):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table',
con=self.conn, flavor='mysql')
result = sql.read_frame("select * from test_table", self.conn)
sql.to_sql(frame, name='test_table',
con=self.conn, flavor='mysql', index=False)
result = sql.read_sql("select * from test_table", self.conn)

# HACK! Change this once indexes are handled properly.
result.index = frame.index
Expand All @@ -2686,9 +2669,9 @@ def _check_roundtrip(self, frame):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Unknown table.*")
cur.execute(drop_sql)
sql.write_frame(frame2, name='test_table2',
con=self.conn, flavor='mysql')
result = sql.read_frame("select * from test_table2", self.conn,
sql.to_sql(frame2, name='test_table2',
con=self.conn, flavor='mysql', index=False)
result = sql.read_sql("select * from test_table2", self.conn,
index_col='Idx')
expected = frame.copy()

Expand All @@ -2706,8 +2689,8 @@ def test_tquery(self):
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table',
con=self.conn, flavor='mysql')
sql.to_sql(frame, name='test_table',
con=self.conn, flavor='mysql', index=False)
result = sql.tquery("select A from test_table", self.conn)
expected = Series(frame.A.values, frame.index) # not to have name
result = Series(result, frame.index)
Expand All @@ -2732,8 +2715,8 @@ def test_uquery(self):
drop_sql = "DROP TABLE IF EXISTS test_table"
cur = self.conn.cursor()
cur.execute(drop_sql)
sql.write_frame(frame, name='test_table',
con=self.conn, flavor='mysql')
sql.to_sql(frame, name='test_table',
con=self.conn, flavor='mysql', index=False)
stmt = 'INSERT INTO test_table VALUES(2.314, -123.1, 1.234, 2.3)'
self.assertEqual(sql.uquery(stmt, con=self.conn), 1)

Expand All @@ -2754,8 +2737,8 @@ def test_keyword_as_column_names(self):
'''
_skip_if_no_pymysql()
df = DataFrame({'From': np.ones(5)})
sql.write_frame(df, con=self.conn, name='testkeywords',
if_exists='replace', flavor='mysql')
sql.to_sql(df, con=self.conn, name='testkeywords',
if_exists='replace', flavor='mysql', index=False)

def test_if_exists(self):
_skip_if_no_pymysql()
Expand All @@ -2774,7 +2757,7 @@ def clean_up(test_table_to_drop):

# test if invalid value for if_exists raises appropriate error
self.assertRaises(ValueError,
sql.write_frame,
sql.to_sql,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
Expand All @@ -2783,34 +2766,34 @@ def clean_up(test_table_to_drop):
clean_up(table_name)

# test if_exists='fail'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail', index=False)
self.assertRaises(ValueError,
sql.write_frame,
sql.to_sql,
frame=df_if_exists_1,
con=self.conn,
name=table_name,
flavor='mysql',
if_exists='fail')

# test if_exists='replace'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace')
sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='replace', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)

# test if_exists='append'
sql.write_frame(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail')
sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name,
flavor='mysql', if_exists='fail', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B')])
sql.write_frame(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='append')
sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name,
flavor='mysql', if_exists='append', index=False)
self.assertEqual(sql.tquery(sql_select, con=self.conn),
[(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')])
clean_up(table_name)
Expand Down

0 comments on commit 1c51051

Please sign in to comment.